code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
## Messages for challenge scoring script.
import string
import sys
import warnings
## Module level state. You'll need to set a synapse object at least
## before using this module.
syn = None
send_messages = True
send_notifications = True
acknowledge_receipt = False
dry_run = False
## Edit these URLs to point to your challenge and its support forum
defaults = dict(
challenge_instructions_url = "https://www.synapse.org/",
support_forum_url = "http://support.sagebase.org/sagebase")
##---------------------------------------------------------
## Message templates:
## Edit to fit your challenge.
##---------------------------------------------------------
validation_failed_subject_template = "Validation error in submission to {queue_name}"
validation_failed_template = """\
Hello {username},
Sorry, but we were unable to validate your submission to the {queue_name}.
Please refer to the challenge instructions which can be found at \
{challenge_instructions_url} and to the error message below:
submission name: {submission_name}
submission ID: {submission_id}
{message}
If you have questions, please ask on the forums at {support_forum_url}.
Sincerely,
the scoring script
"""
validation_passed_subject_template = "Submission received to {queue_name}"
validation_passed_template = """\
Hello {username},
We have received your submission to the {queue_name} and confirmed that it is correctly formatted.
submission name: {submission_name}
submission ID: {submission_id}
If you have questions, please ask on the forums at {support_forum_url} or refer to the challenge \
instructions which can be found at {challenge_instructions_url}.
Sincerely,
the scoring script
"""
scoring_succeeded_subject_template = "Scored submission to {queue_name}"
scoring_succeeded_template = """\
Hello {username},
Your submission \"{submission_name}\" (ID: {submission_id}) to the {queue_name} has been scored:
{message}
If you have questions, please ask on the forums at {support_forum_url}.
Sincerely,
the scoring script
"""
scoring_error_subject_template = "Exception while scoring submission to {queue_name}"
scoring_error_template = """\
Hello {username},
Sorry, but we were unable to process your submission to the {queue_name}.
Please refer to the challenge instructions which can be found at \
{challenge_instructions_url} and to the error message below:
submission name: {submission_name}
submission ID: {submission_id}
{message}
If you have questions, please ask on the forums at {support_forum_url}.
Sincerely,
the scoring script
"""
notification_subject_template = "Exception while scoring submission to {queue_name}"
error_notification_template = """\
Hello Challenge Administrator,
The scoring script for {queue_name} encountered an error:
{message}
Sincerely,
the scoring script
"""
class DefaultingFormatter(string.Formatter):
"""
Python's string.format has the annoying habit of raising a KeyError
if you don't completely fill in the template. Let's do something a
bit nicer.
Adapted from: http://stackoverflow.com/a/19800610/199166
"""
def get_value(self, key, args, kwds):
if isinstance(key, str):
value = kwds.get(key, defaults.get(key, None))
if value is None:
value = "{{{0}}}".format(key)
warnings.warn("Missing template variable %s" % value)
return value
else:
Formatter.get_value(key, args, kwds)
formatter = DefaultingFormatter()
##---------------------------------------------------------
## functions for sending various types of messages
##---------------------------------------------------------
def validation_failed(userIds, **kwargs):
if send_messages:
return send_message(userIds=userIds,
subject_template=validation_failed_subject_template,
message_template=validation_failed_template,
kwargs=kwargs)
def validation_passed(userIds, **kwargs):
if acknowledge_receipt:
return send_message(userIds=userIds,
subject_template=validation_passed_subject_template,
message_template=validation_passed_template,
kwargs=kwargs)
def scoring_succeeded(userIds, **kwargs):
if send_messages:
return send_message(userIds=userIds,
subject_template=scoring_succeeded_subject_template,
message_template=scoring_succeeded_template,
kwargs=kwargs)
def scoring_error(userIds, **kwargs):
if send_messages:
return send_message(userIds=userIds,
subject_template=scoring_error_subject_template,
message_template=scoring_error_template,
kwargs=kwargs)
def error_notification(userIds, **kwargs):
if send_notifications:
return send_message(userIds=userIds,
subject_template=notification_subject_template,
message_template=error_notification_template,
kwargs=kwargs)
def send_message(userIds, subject_template, message_template, kwargs):
print kwargs
subject = formatter.format(subject_template, **kwargs)
message = formatter.format(message_template, **kwargs)
if dry_run:
print "\nDry Run: would have sent:"
print subject
print "-" * 60
print message
return None
elif syn:
response = syn.sendMessage(
userIds=userIds,
messageSubject=subject,
messageBody=message)
print "sent: ", unicode(response).encode('utf-8')
return response
else:
sys.stderr.write("Can't send message. No Synapse object configured\n")
|
Sage-Bionetworks/U4CChallenge
|
python/messages.py
|
Python
|
apache-2.0
| 5,891
|
from __future__ import unicode_literals
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.utils.unittest import TestCase
class PaginatorTests(TestCase):
"""
Tests for the Paginator and Page classes.
"""
def check_paginator(self, params, output):
"""
Helper method that instantiates a Paginator object from the passed
params and then checks that its attributes match the passed output.
"""
count, num_pages, page_range = output
paginator = Paginator(*params)
self.check_attribute('count', paginator, count, params)
self.check_attribute('num_pages', paginator, num_pages, params)
self.check_attribute('page_range', paginator, page_range, params, coerce=list)
def check_attribute(self, name, paginator, expected, params, coerce=None):
"""
Helper method that checks a single attribute and gives a nice error
message upon test failure.
"""
got = getattr(paginator, name)
if coerce is not None:
got = coerce(got)
self.assertEqual(expected, got,
"For '%s', expected %s but got %s. Paginator parameters were: %s"
% (name, expected, got, params))
def test_invalid_page_number(self):
"""
Tests that invalid page numbers result in the correct exception being
raised.
"""
paginator = Paginator([1, 2, 3], 2)
self.assertRaises(PageNotAnInteger, paginator.validate_number, None)
self.assertRaises(PageNotAnInteger, paginator.validate_number, 'x')
def test_paginator(self):
"""
Tests the paginator attributes using varying inputs.
"""
nine = [1, 2, 3, 4, 5, 6, 7, 8, 9]
ten = nine + [10]
eleven = ten + [11]
tests = (
# Each item is two tuples:
# First tuple is Paginator parameters - object_list, per_page,
# orphans, and allow_empty_first_page.
# Second tuple is resulting Paginator attributes - count,
# num_pages, and page_range.
# Ten items, varying orphans, no empty first page.
((ten, 4, 0, False), (10, 3, [1, 2, 3])),
((ten, 4, 1, False), (10, 3, [1, 2, 3])),
((ten, 4, 2, False), (10, 2, [1, 2])),
((ten, 4, 5, False), (10, 2, [1, 2])),
((ten, 4, 6, False), (10, 1, [1])),
# Ten items, varying orphans, allow empty first page.
((ten, 4, 0, True), (10, 3, [1, 2, 3])),
((ten, 4, 1, True), (10, 3, [1, 2, 3])),
((ten, 4, 2, True), (10, 2, [1, 2])),
((ten, 4, 5, True), (10, 2, [1, 2])),
((ten, 4, 6, True), (10, 1, [1])),
# One item, varying orphans, no empty first page.
(([1], 4, 0, False), (1, 1, [1])),
(([1], 4, 1, False), (1, 1, [1])),
(([1], 4, 2, False), (1, 1, [1])),
# One item, varying orphans, allow empty first page.
(([1], 4, 0, True), (1, 1, [1])),
(([1], 4, 1, True), (1, 1, [1])),
(([1], 4, 2, True), (1, 1, [1])),
# Zero items, varying orphans, no empty first page.
(([], 4, 0, False), (0, 0, [])),
(([], 4, 1, False), (0, 0, [])),
(([], 4, 2, False), (0, 0, [])),
# Zero items, varying orphans, allow empty first page.
(([], 4, 0, True), (0, 1, [1])),
(([], 4, 1, True), (0, 1, [1])),
(([], 4, 2, True), (0, 1, [1])),
# Number if items one less than per_page.
(([], 1, 0, True), (0, 1, [1])),
(([], 1, 0, False), (0, 0, [])),
(([1], 2, 0, True), (1, 1, [1])),
((nine, 10, 0, True), (9, 1, [1])),
# Number if items equal to per_page.
(([1], 1, 0, True), (1, 1, [1])),
(([1, 2], 2, 0, True), (2, 1, [1])),
((ten, 10, 0, True), (10, 1, [1])),
# Number if items one more than per_page.
(([1, 2], 1, 0, True), (2, 2, [1, 2])),
(([1, 2, 3], 2, 0, True), (3, 2, [1, 2])),
((eleven, 10, 0, True), (11, 2, [1, 2])),
# Number if items one more than per_page with one orphan.
(([1, 2], 1, 1, True), (2, 1, [1])),
(([1, 2, 3], 2, 1, True), (3, 1, [1])),
((eleven, 10, 1, True), (11, 1, [1])),
# Non-integer inputs
((ten, '4', 1, False), (10, 3, [1, 2, 3])),
((ten, '4', 1, False), (10, 3, [1, 2, 3])),
((ten, 4, '1', False), (10, 3, [1, 2, 3])),
((ten, 4, '1', False), (10, 3, [1, 2, 3])),
)
for params, output in tests:
self.check_paginator(params, output)
def check_indexes(self, params, page_num, indexes):
"""
Helper method that instantiates a Paginator object from the passed
params and then checks that the start and end indexes of the passed
page_num match those given as a 2-tuple in indexes.
"""
paginator = Paginator(*params)
if page_num == 'first':
page_num = 1
elif page_num == 'last':
page_num = paginator.num_pages
page = paginator.page(page_num)
start, end = indexes
msg = ("For %s of page %s, expected %s but got %s."
" Paginator parameters were: %s")
self.assertEqual(start, page.start_index(),
msg % ('start index', page_num, start, page.start_index(), params))
self.assertEqual(end, page.end_index(),
msg % ('end index', page_num, end, page.end_index(), params))
def test_page_indexes(self):
"""
Tests that paginator pages have the correct start and end indexes.
"""
ten = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
tests = (
# Each item is three tuples:
# First tuple is Paginator parameters - object_list, per_page,
# orphans, and allow_empty_first_page.
# Second tuple is the start and end indexes of the first page.
# Third tuple is the start and end indexes of the last page.
# Ten items, varying per_page, no orphans.
((ten, 1, 0, True), (1, 1), (10, 10)),
((ten, 2, 0, True), (1, 2), (9, 10)),
((ten, 3, 0, True), (1, 3), (10, 10)),
((ten, 5, 0, True), (1, 5), (6, 10)),
# Ten items, varying per_page, with orphans.
((ten, 1, 1, True), (1, 1), (9, 10)),
((ten, 1, 2, True), (1, 1), (8, 10)),
((ten, 3, 1, True), (1, 3), (7, 10)),
((ten, 3, 2, True), (1, 3), (7, 10)),
((ten, 3, 4, True), (1, 3), (4, 10)),
((ten, 5, 1, True), (1, 5), (6, 10)),
((ten, 5, 2, True), (1, 5), (6, 10)),
((ten, 5, 5, True), (1, 10), (1, 10)),
# One item, varying orphans, no empty first page.
(([1], 4, 0, False), (1, 1), (1, 1)),
(([1], 4, 1, False), (1, 1), (1, 1)),
(([1], 4, 2, False), (1, 1), (1, 1)),
# One item, varying orphans, allow empty first page.
(([1], 4, 0, True), (1, 1), (1, 1)),
(([1], 4, 1, True), (1, 1), (1, 1)),
(([1], 4, 2, True), (1, 1), (1, 1)),
# Zero items, varying orphans, allow empty first page.
(([], 4, 0, True), (0, 0), (0, 0)),
(([], 4, 1, True), (0, 0), (0, 0)),
(([], 4, 2, True), (0, 0), (0, 0)),
)
for params, first, last in tests:
self.check_indexes(params, 'first', first)
self.check_indexes(params, 'last', last)
# When no items and no empty first page, we should get EmptyPage error.
self.assertRaises(EmptyPage, self.check_indexes, ([], 4, 0, False), 1, None)
self.assertRaises(EmptyPage, self.check_indexes, ([], 4, 1, False), 1, None)
self.assertRaises(EmptyPage, self.check_indexes, ([], 4, 2, False), 1, None)
def test_page_sequence(self):
"""
Tests that a paginator page acts like a standard sequence.
"""
eleven = 'abcdefghijk'
page2 = Paginator(eleven, per_page=5, orphans=1).page(2)
self.assertEqual(len(page2), 6)
self.assertTrue('k' in page2)
self.assertFalse('a' in page2)
self.assertEqual(''.join(page2), 'fghijk')
self.assertEqual(''.join(reversed(page2)), 'kjihgf')
|
pygeek/django
|
tests/regressiontests/pagination_regress/tests.py
|
Python
|
bsd-3-clause
| 8,540
|
# -*- coding: utf-8 -*-
#
# Copyright 2014 - Intel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
import mock
import json
from healingclient.api import client
class FakeResponse(object):
"""Fake response for testing Healing Client."""
def __init__(self, status_code, content=None):
self.status_code = status_code
self.content = content
def json(self):
return json.loads(self.content)
class BaseClientTest(unittest2.TestCase):
def setUp(self):
self._client = client.Client(project_name="test",
healing_url="test")
self.slacontract = self._client.slacontract
def mock_http_get(self, content, status_code=200):
if isinstance(content, dict):
content = json.dumps(content)
self._client.http_client.get = \
mock.MagicMock(return_value=FakeResponse(status_code, content))
return self._client.http_client.get
def mock_http_post(self, content, status_code=201):
if isinstance(content, dict):
content = json.dumps(content)
self._client.http_client.post = \
mock.MagicMock(return_value=FakeResponse(status_code, content))
return self._client.http_client.post
def mock_http_put(self, content, status_code=200):
if isinstance(content, dict):
content = json.dumps(content)
self._client.http_client.put = \
mock.MagicMock(return_value=FakeResponse(status_code, content))
return self._client.http_client.put
def mock_http_delete(self, status_code=204):
self._client.http_client.delete = \
mock.MagicMock(return_value=FakeResponse(status_code))
return self._client.http_client.delete
class BaseCommandTest(unittest2.TestCase):
def setUp(self):
self.app = mock.Mock()
self.app.client = mock.Mock()
def call(self, command, app_args=[], prog_name=''):
cmd = command(self.app, app_args)
print('#######################################')
print(app_args)
print(prog_name)
print('#######################################')
parsed_args = cmd.get_parser(prog_name).parse_args(app_args)
return cmd.take_action(parsed_args)
|
sjcazzol/python-healingclient
|
healingclient/tests/base.py
|
Python
|
apache-2.0
| 2,820
|
# Copyright 2013 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The share snapshots api."""
from oslo_log import log
import six
import webob
from webob import exc
from manila.api import common
from manila.api.openstack import wsgi
from manila.api.views import share_snapshots as snapshot_views
from manila import exception
from manila.i18n import _, _LI
from manila import share
LOG = log.getLogger(__name__)
class ShareSnapshotsController(wsgi.Controller):
"""The Share Snapshots API controller for the OpenStack API."""
_view_builder_class = snapshot_views.ViewBuilder
def __init__(self):
super(ShareSnapshotsController, self).__init__()
self.share_api = share.API()
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['manila.context']
try:
snapshot = self.share_api.get_snapshot(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
return self._view_builder.detail(req, snapshot)
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['manila.context']
LOG.info(_LI("Delete snapshot with id: %s"), id, context=context)
try:
snapshot = self.share_api.get_snapshot(context, id)
self.share_api.delete_snapshot(context, snapshot)
except exception.NotFound:
raise exc.HTTPNotFound()
return webob.Response(status_int=202)
def index(self, req):
"""Returns a summary list of snapshots."""
return self._get_snapshots(req, is_detail=False)
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._get_snapshots(req, is_detail=True)
def _get_snapshots(self, req, is_detail):
"""Returns a list of snapshots."""
context = req.environ['manila.context']
search_opts = {}
search_opts.update(req.GET)
# Remove keys that are not related to share attrs
search_opts.pop('limit', None)
search_opts.pop('offset', None)
sort_key = search_opts.pop('sort_key', 'created_at')
sort_dir = search_opts.pop('sort_dir', 'desc')
# NOTE(vponomaryov): Manila stores in DB key 'display_name', but
# allows to use both keys 'name' and 'display_name'. It is leftover
# from Cinder v1 and v2 APIs.
if 'name' in search_opts:
search_opts['display_name'] = search_opts.pop('name')
common.remove_invalid_options(context, search_opts,
self._get_snapshots_search_options())
snapshots = self.share_api.get_all_snapshots(
context,
search_opts=search_opts,
sort_key=sort_key,
sort_dir=sort_dir,
)
limited_list = common.limited(snapshots, req)
if is_detail:
snapshots = self._view_builder.detail_list(req, limited_list)
else:
snapshots = self._view_builder.summary_list(req, limited_list)
return snapshots
def _get_snapshots_search_options(self):
"""Return share search options allowed by non-admin."""
return ('display_name', 'name', 'status', 'share_id', 'size')
def update(self, req, id, body):
"""Update a snapshot."""
context = req.environ['manila.context']
if not body or 'snapshot' not in body:
raise exc.HTTPUnprocessableEntity()
snapshot_data = body['snapshot']
valid_update_keys = (
'display_name',
'display_description',
)
update_dict = dict([(key, snapshot_data[key])
for key in valid_update_keys
if key in snapshot_data])
try:
snapshot = self.share_api.get_snapshot(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
snapshot = self.share_api.snapshot_update(context, snapshot,
update_dict)
snapshot.update(update_dict)
return self._view_builder.detail(req, snapshot)
@wsgi.response(202)
def create(self, req, body):
"""Creates a new snapshot."""
context = req.environ['manila.context']
if not self.is_valid_body(body, 'snapshot'):
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
share_id = snapshot['share_id']
share = self.share_api.get(context, share_id)
# Verify that share can be snapshotted
if not share['snapshot_support']:
msg = _("Snapshot cannot be created from share '%s', because "
"share back end does not support it.") % share_id
LOG.error(msg)
raise exc.HTTPUnprocessableEntity(explanation=msg)
LOG.info(_LI("Create snapshot from share %s"),
share_id, context=context)
# NOTE(rushiagr): v2 API allows name instead of display_name
if 'name' in snapshot:
snapshot['display_name'] = snapshot.get('name')
del snapshot['name']
# NOTE(rushiagr): v2 API allows description instead of
# display_description
if 'description' in snapshot:
snapshot['display_description'] = snapshot.get('description')
del snapshot['description']
new_snapshot = self.share_api.create_snapshot(
context,
share,
snapshot.get('display_name'),
snapshot.get('display_description'))
return self._view_builder.detail(
req, dict(six.iteritems(new_snapshot)))
def create_resource():
return wsgi.Resource(ShareSnapshotsController())
|
redhat-openstack/manila
|
manila/api/v1/share_snapshots.py
|
Python
|
apache-2.0
| 6,346
|
def checkio(teleports_string):
queue, paths = ['1'], []
while queue:
result = queue.pop()
path = paths.pop() if paths else []
if set('12345678') <= set(result) and result[-1] == '1':
return result
for v in teleports_string.split(','):
if v not in path and result[-1] in v:
queue.append(result + v[v[1]!=result[-1]])
paths.append(path + [v])
|
Pouf/CodingCompetition
|
CiO/disposable-teleports.py
|
Python
|
mit
| 437
|
import sublime
class Settings:
def __init__(self):
self.__settings_key = 'Format.sublime-settings'
def add_observer(self, key, observer):
self.__load().add_on_change(self.__observer_id(key), observer)
def remove_observer(self, key):
self.__load().clear_on_change(self.__observer_id(key))
def formatter(self, name):
return self.__formatters().get(name, {})
def paths(self):
return self.__load().get('paths', default=[])
def update_formatter(self, name, value):
formatters = self.__formatters()
formatters[name] = value
self.__update_formatters(formatters)
self.__save()
def __formatters(self):
return self.__load().get('formatters', default={})
def __update_formatters(self, formatters):
return self.__load().set('formatters', formatters)
def __load(self):
return sublime.load_settings(self.__settings_key)
def __save(self):
sublime.save_settings(self.__settings_key)
def __observer_id(self, key):
return '{}.{}'.format(self.__settings_key, key)
class FormatterSettings:
def __init__(self, name):
self.__name = name
self.__cache = None
self.__settings = Settings()
self.__settings.add_observer(self.__name, self.__invalidate_cache)
def __del__(self):
self.__settings.remove_observer(self.__name)
@property
def format_on_save(self):
return self.__get('format_on_save', default=False)
@format_on_save.setter
def format_on_save(self, value):
return self.__set('format_on_save', value)
@property
def sources(self):
return self.__get('sources', default=[])
@property
def options(self):
return self.__get('options', default=[])
def __get(self, value, default=None):
return self.__formatter_settings().get(value, default)
def __set(self, key, value):
formatter_settings = self.__formatter_settings()
formatter_settings[key] = value
self.__settings.update_formatter(self.__name, formatter_settings)
def __formatter_settings(self):
if self.__cache is None:
self.__cache = self.__settings.formatter(self.__name)
return self.__cache
def __invalidate_cache(self):
self.cache = None
|
Rypac/sublime-format
|
plugin/settings.py
|
Python
|
mit
| 2,344
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.compute import compute_service
from openstack import resource2
class ServerGroup(resource2.Resource):
resource_key = 'server_group'
resources_key = 'server_groups'
base_path = '/os-server-groups'
service = compute_service.ComputeService()
_query_mapping = resource2.QueryParameters("all_projects")
# capabilities
allow_create = True
allow_get = True
allow_delete = True
allow_list = True
# Properties
#: A name identifying the server group
name = resource2.Body('name')
#: The list of policies supported by the server group
policies = resource2.Body('policies')
#: The list of members in the server group
member_ids = resource2.Body('members')
#: The metadata associated with the server group
metadata = resource2.Body('metadata')
|
briancurtin/python-openstacksdk
|
openstack/compute/v2/server_group.py
|
Python
|
apache-2.0
| 1,373
|
#!/usr/bin/env python
import argparse
import greg.config
def main():
parser = argparse.ArgumentParser(description='Integrate your build server and source control')
parser.add_argument('--config', default='config.yaml', help='Path to config file')
parser.add_argument('--fix-hooks', action='store_const', const='fixhooks', dest='action', help='Ensure repos alert greg on changes')
parser.add_argument('--url', help='Base URL for greg')
args = parser.parse_args()
# Establish config filename
greg.config.get_config(args.config)
if args.action == 'fixhooks':
fix_hooks(args)
#TODO handle http server using greg.server as well
#import greg.server
#greg.server.start()
else:
print 'Not doing anything'
def fix_hooks(args):
import greg.provider
import greg.builder
from urlparse import urlparse
import re
# Reject when no url
if not args.url:
raise Exception('Must have URL to fix hooks')
my_url = args.url
# Enumerate all repo entries in config
config = greg.config.get_config()
for repo_conf in config.repos:
provider = greg.provider.locate_bridge(repo_conf.provider)
# Build proper URL
provider_url = urlparse(my_url)
provider_url = provider_url._replace(path=re.sub('/*$','/',provider_url.path)+'repo')
provider_url = provider_url._replace(query='provider=%s&token=%s'%(repo_conf.provider,provider.incoming_token))
# Enumerate over all organizations
for org in repo_conf.organizations:
# Find all repos that match the repo config
all_repos = provider.list_repos(org)
repos = filter(lambda repo: repo_conf.match(repo_conf.provider, org, repo), all_repos)
for repo in repos:
# Ensure webhooks on that repo
provider.ensure_webhook(org,repo,provider_url.geturl())
# Collect all jobs and builders
jobs = set([(job.name,job.builder) for repo in config.repos for job in repo.jobs.values()])
builders = set([job[1] for job in jobs])
for builder_name in builders:
builder_jobs = set([job[0] for job in jobs if job[1]==builder_name])
builder = greg.builder.locate_bridge(builder_name)
builder_url = urlparse(my_url)
builder_url = builder_url._replace(path=re.sub('/*$','/',builder_url.path)+'build')
builder_url = builder_url._replace(query='builder=%s&token=%s'%(builder_name,builder.incoming_token))
for job_name in builder_jobs:
builder.ensure_webhook(job_name,builder_url.geturl())
if __name__ == "__main__":
main()
|
BackSlasher/greg
|
greg/__main__.py
|
Python
|
gpl-3.0
| 2,640
|
#!/usr/bin/env python
'''
ZCR Shellcoder
ZeroDay Cyber Research
Z3r0D4y.Com
Ali Razmjoo
'''
def start(type,shellcode,job):
if 'chmod(' in job:
shellcode = 'N' + shellcode
if 'dir_create(' in job:
shellcode = 'N' + shellcode
if 'download_execute(' in job:
shellcode = 'N' + shellcode
if 'download(' in job:
shellcode = 'N' + shellcode
if 'exec(' in job:
shellcode = 'N' + shellcode
if 'file_create(' in job:
shellcode = 'N' + shellcode
if 'script_executor(' in job:
shellcode = 'N' + shellcode
if 'system(' in job:
shellcode = 'N' + shellcode
if 'write(' in job:
shellcode = 'N' + shellcode
return shellcode
|
firebitsbr/ZCR-Shellcoder
|
lib/encoder/windows_x86/xor_yourvalue.py
|
Python
|
gpl-3.0
| 637
|
import unittest
import code_helper
class Test0037(unittest.TestCase):
def test_problem(self):
primes = list(code_helper.range_prime(1000000))
s = 0
def trim_right_prime(n):
n /= 10
while n != 0:
if code_helper.binary_search(primes, n) == -1:
return False
n /= 10
return True
def trim_left_prime(n):
base = 10
while base < n:
base *= 10
while base != 10:
base /= 10
n %= base
if code_helper.binary_search(primes, n) == -1:
return False
return True
total = 0
for prime in primes:
#print prime
if prime > 10 and trim_left_prime(prime) and trim_right_prime(
prime):
total += prime
self.assertEqual(total, 748317)
|
mccxj/leetcode
|
projecteuler/p0037_test.py
|
Python
|
apache-2.0
| 953
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
import datetime
import factory
import random
from factory.base import BaseFactory, FactoryMetaClass, CREATE_STRATEGY
from factory.fuzzy import (
BaseFuzzyAttribute, FuzzyChoice, FuzzyDate, FuzzyDateTime, FuzzyInteger)
from factory.compat import UTC
from ggrc import models
from ggrc.models.reflection import AttributeInfo
def random_string(prefix='', no_unicode=False):
return u'{prefix}{suffix}{extra}'.format(
prefix=prefix,
suffix=random.randint(0,9999999999),
extra='' if no_unicode else u'\xff'
)
def random_string_attribute(prefix=''):
return factory.LazyAttribute(lambda m: random_string(prefix))
class FuzzyEmail(BaseFuzzyAttribute):
def fuzz(self):
return u"{0}@{1}.{2}".format(
random_string('user-', True), random_string('domain-', True), 'com')
class FactoryStubMarker(object):
def __init__(self, class_):
self.class_ = class_
class FactoryAttributeGenerator(object):
"""Use the SQLAlchemy ORM model to generate factory attributes."""
@classmethod
def generate(cls, attrs, model_class, attr):
"""Generate a factory attribute for `attr` by inspecting the mapping
type of the attribute in `model_class`. Add the attribute to the
`attrs` dictionary.
"""
if (hasattr(attr, '__call__')):
attr_name = attr.attr_name
value = []
else:
attr_name = attr
class_attr = getattr(model_class, attr_name)
#look up the class method to use to generate the attribute
method = getattr(cls, class_attr.__class__.__name__)
value = method(attr_name, class_attr)
attrs[attr_name] = value
@classmethod
def InstrumentedAttribute(cls, attr_name, class_attr):
method = getattr(cls, class_attr.property.__class__.__name__)
return method(attr_name, class_attr)
@classmethod
def ColumnProperty(cls, attr_name, class_attr):
method = getattr(
cls,
class_attr.property.expression.type.__class__.__name__,
cls.default_column_handler)
return method(attr_name, class_attr)
@classmethod
def default_column_handler(cls, attr_name, class_attr):
return random_string_attribute(attr_name)
@classmethod
def DateTime(cls, attr_name, class_attr):
return FuzzyDateTime(
datetime.datetime(2013,1,1,tzinfo=UTC),
datetime.datetime.now(UTC) + datetime.timedelta(days=730),
)
@classmethod
def Date(cls, attr_name, class_attr):
return FuzzyDate(
datetime.date(2013,1,1),
datetime.date.today() + datetime.timedelta(days=730),
)
@classmethod
def Boolean(cls, attr_name, class_attr):
return FuzzyChoice([True, False])
@classmethod
def Integer(cls, attr_name, class_attr):
return FuzzyInteger(0,100000)
@classmethod
def RelationshipProperty(cls, attr_name, class_attr):
if class_attr.property.uselist:
return []
else:
columns = tuple(class_attr.property.local_columns)
# FIXME: ? Doesn't handle multiple local columns, so won't work for
# polymorphic links
if columns[0].nullable:
# Not a required association, so skip it
return None
elif columns[0].primary_key:
# This is a 'reverse' association, so skip it (primary keys are
# not nullable, but the relationship may still be optional)
return None
else:
return FactoryStubMarker(class_attr.property.mapper.class_)
@classmethod
def AssociationProxy(cls, attr_name, class_attr):
return []
@classmethod
def property(cls, attr_name, class_attr):
return None
@classmethod
def simple_property(cls, attr_name, class_attr):
return None
class ModelFactoryMetaClass(FactoryMetaClass):
def __new__(cls, class_name, bases, attrs, extra_attrs=None):
"""Use model reflection to build up the list of factory attributes.
The default attributes can be overridden by defining a subclass
of `ModelFactory` and defining the attribute to be overriden.
"""
model_class = attrs.pop('MODEL', None)
if model_class:
attrs['FACTORY_FOR'] = dict
attribute_info = AttributeInfo(model_class)
for attr in attribute_info._create_attrs:
if hasattr(attr, '__call__'):
attr_name = attr.attr_name
else:
attr_name = attr
if not attr_name in attrs:
FactoryAttributeGenerator.generate(attrs, model_class, attr)
return super(ModelFactoryMetaClass, cls).__new__(
cls, class_name, bases, attrs)
ModelFactory = ModelFactoryMetaClass(
'ModelFactory', (BaseFactory,), {
'ABSTRACT_FACTORY': True,
'FACTORY_STRATEGY': CREATE_STRATEGY,
'__doc__': """ModelFactory base with build and create support.
This class has supports SQLAlchemy ORM.
""",
})
def factory_for(model_class):
"""Get the factory for a model by name or by class.
If there is a factory defined for this model in globals() that factory
will be used. Otherwise, one will be created and added to globals().
"""
if isinstance(model_class, (str, unicode)):
if '.' in model_class:
import sys
path = model_class.split('.')
module_name = '.'.join(path[:-1])
factory_name = path[-1]
__import__(module_name)
model_class = getattr(sys.modules[module_name], factory_name, None)
else:
factory_name = model_class
import ggrc.models
model_class = ggrc.models.get_model(model_class)
else:
factory_name = model_class.__name__
factory_name = '{0}Factory'.format(factory_name)
factory = globals().get(factory_name, None)
if not factory:
class model_factory(ModelFactory):
MODEL = model_class
model_factory.__name__ = factory_name
globals()[factory_name] = model_factory
factory = model_factory
return factory
class PersonFactory(ModelFactory):
MODEL = models.Person
email = FuzzyEmail()
# Governance Objects
class ProgramFactory(ModelFactory):
MODEL = models.Program
kind = FuzzyChoice(['Directive', 'Company Controls'])
status = FuzzyChoice(MODEL.VALID_STATES)
class ContractFactory(ModelFactory):
MODEL = models.Contract
kind = FuzzyChoice(MODEL.valid_kinds)
status = FuzzyChoice(MODEL.VALID_STATES)
class PolicyFactory(ModelFactory):
MODEL = models.Policy
kind = FuzzyChoice(MODEL.valid_kinds)
status = FuzzyChoice(MODEL.VALID_STATES)
class RegulationFactory(ModelFactory):
MODEL = models.Regulation
kind = FuzzyChoice(MODEL.valid_kinds)
status = FuzzyChoice(MODEL.VALID_STATES)
class StandardFactory(ModelFactory):
MODEL = models.Standard
kind = FuzzyChoice(MODEL.valid_kinds)
status = FuzzyChoice(MODEL.VALID_STATES)
class SectionFactory(ModelFactory):
MODEL = models.Section
# Explicit `directive` factory is necessary, since it's a `nullable`
# column, but uses @validate to maintain requirement
directive = FactoryStubMarker(models.Regulation)
class ClauseFactory(ModelFactory):
MODEL = models.Clause
class ObjectiveFactory(ModelFactory):
MODEL = models.Objective
status = FuzzyChoice(MODEL.VALID_STATES)
class ControlFactory(ModelFactory):
MODEL = models.Control
status = FuzzyChoice(MODEL.VALID_STATES)
# Business Objects
class DataAssetFactory(ModelFactory):
MODEL = models.DataAsset
status = FuzzyChoice(MODEL.VALID_STATES)
class FacilityFactory(ModelFactory):
MODEL = models.Facility
status = FuzzyChoice(MODEL.VALID_STATES)
class MarketFactory(ModelFactory):
MODEL = models.Market
status = FuzzyChoice(MODEL.VALID_STATES)
class OrgGroupFactory(ModelFactory):
MODEL = models.OrgGroup
status = FuzzyChoice(MODEL.VALID_STATES)
class ProductFactory(ModelFactory):
MODEL = models.Product
status = FuzzyChoice(MODEL.VALID_STATES)
class ProjectFactory(ModelFactory):
MODEL = models.Project
status = FuzzyChoice(MODEL.VALID_STATES)
class SystemFactory(ModelFactory):
MODEL = models.System
status = FuzzyChoice(MODEL.VALID_STATES)
class ProcessFactory(ModelFactory):
MODEL = models.Process
status = FuzzyChoice(MODEL.VALID_STATES)
# Audit Objects
class AuditFactory(ModelFactory):
MODEL = models.Audit
status = FuzzyChoice(MODEL.VALID_STATES)
class RequestFactory(ModelFactory):
MODEL = models.Request
status = FuzzyChoice(MODEL.VALID_STATES)
request_type = FuzzyChoice(MODEL.VALID_TYPES)
class ResponseFactory(ModelFactory):
MODEL = models.Response
status = FuzzyChoice(MODEL.VALID_STATES)
class DocumentationResponseFactory(ResponseFactory):
MODEL = models.DocumentationResponse
response_type = 'documentation'
class InterviewResponseFactory(ResponseFactory):
MODEL = models.InterviewResponse
response_type = 'interview'
class PopulationSampleResponseFactory(ResponseFactory):
MODEL = models.PopulationSampleResponse
response_type = 'population sample'
# Category Objects
class ControlCategoryFactory(ModelFactory):
MODEL = models.ControlCategory
type = "ControlCategory"
class ControlAssertionFactory(ModelFactory):
MODEL = models.ControlAssertion
type = "ControlAssertion"
# Mapping Objects
class ControlControlFactory(ModelFactory):
MODEL = models.ControlControl
status = FuzzyChoice(MODEL.VALID_STATES)
class ControlSectionFactory(ModelFactory):
MODEL = models.ControlSection
status = FuzzyChoice(MODEL.VALID_STATES)
class DirectiveControlFactory(ModelFactory):
MODEL = models.DirectiveControl
status = FuzzyChoice(MODEL.VALID_STATES)
class DirectiveSectionFactory(ModelFactory):
MODEL = models.DirectiveSection
status = FuzzyChoice(MODEL.VALID_STATES)
class ObjectControlFactory(ModelFactory):
MODEL = models.ObjectControl
status = FuzzyChoice(MODEL.VALID_STATES)
controllable = FactoryStubMarker(models.Market)
class ObjectDocumentFactory(ModelFactory):
MODEL = models.ObjectDocument
status = FuzzyChoice(MODEL.VALID_STATES)
documentable = FactoryStubMarker(models.Market)
class ObjectObjectiveFactory(ModelFactory):
MODEL = models.ObjectObjective
status = FuzzyChoice(MODEL.VALID_STATES)
objectiveable = FactoryStubMarker(models.Market)
class ObjectOwnerFactory(ModelFactory):
MODEL = models.ObjectOwner
status = FuzzyChoice(MODEL.VALID_STATES)
ownable = FactoryStubMarker(models.Market)
class ObjectPersonFactory(ModelFactory):
MODEL = models.ObjectPerson
status = FuzzyChoice(MODEL.VALID_STATES)
personable = FactoryStubMarker(models.Market)
class ObjectSectionFactory(ModelFactory):
MODEL = models.ObjectSection
status = FuzzyChoice(MODEL.VALID_STATES)
sectionable = FactoryStubMarker(models.Market)
class ObjectiveControlFactory(ModelFactory):
MODEL = models.ObjectiveControl
status = FuzzyChoice(MODEL.VALID_STATES)
class ProgramControlFactory(ModelFactory):
MODEL = models.ProgramControl
status = FuzzyChoice(MODEL.VALID_STATES)
class ProgramDirectiveFactory(ModelFactory):
MODEL = models.ProgramDirective
status = FuzzyChoice(MODEL.VALID_STATES)
class RelationshipFactory(ModelFactory):
MODEL = models.Relationship
status = FuzzyChoice(MODEL.VALID_STATES)
source = FactoryStubMarker(models.Market)
destination = FactoryStubMarker(models.Process)
class SectionObjectiveFactory(ModelFactory):
MODEL = models.SectionObjective
status = FuzzyChoice(MODEL.VALID_STATES)
# ggrc_basic_permissions model factories
class RoleFactory(ModelFactory):
MODEL = models.get_model("Role")
class UserRoleFactory(ModelFactory):
MODEL = models.get_model("UserRole")
class ContextImplicationFactory(ModelFactory):
MODEL = models.get_model("ContextImplication")
# ggrc_gdrive_integration model factories
class ObjectFileFactory(ModelFactory):
MODEL = models.get_model("ObjectFile")
fileable = FactoryStubMarker(models.DocumentationResponse)
class ObjectFolderFactory(ModelFactory):
MODEL = models.get_model("ObjectFolder")
folderable = FactoryStubMarker(models.Audit)
class ObjectEventFactory(ModelFactory):
MODEL = models.get_model("ObjectEvent")
eventable = FactoryStubMarker(models.Meeting)
|
vladan-m/ggrc-core
|
src/tests/ggrc/behave/factories.py
|
Python
|
apache-2.0
| 12,159
|
"""
Colour Matching Functions Transformations
=========================================
Defines various educational objects for colour matching functions
transformations:
- :func:`colour.colorimetry.RGB_2_degree_cmfs_to_XYZ_2_degree_cmfs`
- :func:`colour.colorimetry.RGB_10_degree_cmfs_to_XYZ_10_degree_cmfs`
- :func:`colour.colorimetry.RGB_10_degree_cmfs_to_LMS_10_degree_cmfs`
- :func:`colour.colorimetry.LMS_2_degree_cmfs_to_XYZ_2_degree_cmfs`
- :func:`colour.colorimetry.LMS_10_degree_cmfs_to_XYZ_10_degree_cmfs`
References
----------
- :cite:`CIETC1-362006a` : CIE TC 1-36. (2006). CIE 170-1:2006 Fundamental
Chromaticity Diagram with Physiological Axes - Part 1. Commission
Internationale de l'Eclairage. ISBN:978-3-901906-46-6
- :cite:`CVRLp` : CVRL. (n.d.). CIE (2012) 10-deg XYZ
"physiologically-relevant" colour matching functions. Retrieved June 25,
2014, from http://www.cvrl.org/database/text/cienewxyz/cie2012xyz10.htm
- :cite:`CVRLv` : CVRL. (n.d.). CIE (2012) 2-deg XYZ
"physiologically-relevant" colour matching functions. Retrieved June 25,
2014, from http://www.cvrl.org/database/text/cienewxyz/cie2012xyz2.htm
- :cite:`Wyszecki2000be` : Wyszecki, Günther, & Stiles, W. S. (2000). The
CIE 1964 Standard Observer. In Color Science: Concepts and Methods,
Quantitative Data and Formulae (p. 141). Wiley. ISBN:978-0-471-39918-6
- :cite:`Wyszecki2000bg` : Wyszecki, Günther, & Stiles, W. S. (2000). Table
1(3.3.3). In Color Science: Concepts and Methods, Quantitative Data and
Formulae (pp. 138-139). Wiley. ISBN:978-0-471-39918-6
"""
from __future__ import annotations
import numpy as np
from colour.algebra import vector_dot
from colour.colorimetry import (
MSDS_CMFS_LMS,
MSDS_CMFS_RGB,
SDS_LEFS_PHOTOPIC,
reshape_sd,
)
from colour.hints import FloatingOrArrayLike, NDArray
from colour.utilities import tstack
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"RGB_2_degree_cmfs_to_XYZ_2_degree_cmfs",
"RGB_10_degree_cmfs_to_XYZ_10_degree_cmfs",
"RGB_10_degree_cmfs_to_LMS_10_degree_cmfs",
"LMS_2_degree_cmfs_to_XYZ_2_degree_cmfs",
"LMS_10_degree_cmfs_to_XYZ_10_degree_cmfs",
]
def RGB_2_degree_cmfs_to_XYZ_2_degree_cmfs(
wavelength: FloatingOrArrayLike,
) -> NDArray:
"""
Convert *Wright & Guild 1931 2 Degree RGB CMFs* colour matching functions
into the *CIE 1931 2 Degree Standard Observer* colour matching functions.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` in nm.
Returns
-------
:class:`numpy.ndarray`
*CIE 1931 2 Degree Standard Observer* spectral tristimulus values.
Notes
-----
- Data for the *CIE 1931 2 Degree Standard Observer* already exists,
this definition is intended for educational purpose.
References
----------
:cite:`Wyszecki2000bg`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... RGB_2_degree_cmfs_to_XYZ_2_degree_cmfs(700) # doctest: +ELLIPSIS
array([ 0.0113577..., 0.004102 , 0. ])
"""
cmfs = MSDS_CMFS_RGB["Wright & Guild 1931 2 Degree RGB CMFs"]
rgb_bar = cmfs[wavelength]
rgb = rgb_bar / np.sum(rgb_bar)
M1 = np.array(
[
[0.49000, 0.31000, 0.20000],
[0.17697, 0.81240, 0.01063],
[0.00000, 0.01000, 0.99000],
]
)
M2 = np.array(
[
[0.66697, 1.13240, 1.20063],
[0.66697, 1.13240, 1.20063],
[0.66697, 1.13240, 1.20063],
]
)
xyz = vector_dot(M1, rgb)
xyz /= vector_dot(M2, rgb)
x, y, z = xyz[..., 0], xyz[..., 1], xyz[..., 2]
V = reshape_sd(
SDS_LEFS_PHOTOPIC["CIE 1924 Photopic Standard Observer"], cmfs.shape
)
L = V[wavelength]
x_bar = x / y * L
y_bar = L
z_bar = z / y * L
xyz_bar = tstack([x_bar, y_bar, z_bar])
return xyz_bar
def RGB_10_degree_cmfs_to_XYZ_10_degree_cmfs(
wavelength: FloatingOrArrayLike,
) -> NDArray:
"""
Convert *Stiles & Burch 1959 10 Degree RGB CMFs* colour matching
functions into the *CIE 1964 10 Degree Standard Observer* colour matching
functions.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` in nm.
Returns
-------
:class:`numpy.ndarray`
*CIE 1964 10 Degree Standard Observer* spectral tristimulus values.
Notes
-----
- Data for the *CIE 1964 10 Degree Standard Observer* already exists,
this definition is intended for educational purpose.
References
----------
:cite:`Wyszecki2000be`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... RGB_10_degree_cmfs_to_XYZ_10_degree_cmfs(700) # doctest: +ELLIPSIS
array([ 0.0096432..., 0.0037526..., -0.0000041...])
"""
cmfs = MSDS_CMFS_RGB["Stiles & Burch 1959 10 Degree RGB CMFs"]
rgb_bar = cmfs[wavelength]
M = np.array(
[
[0.341080, 0.189145, 0.387529],
[0.139058, 0.837460, 0.073316],
[0.000000, 0.039553, 2.026200],
]
)
xyz_bar = vector_dot(M, rgb_bar)
return xyz_bar
def RGB_10_degree_cmfs_to_LMS_10_degree_cmfs(
wavelength: FloatingOrArrayLike,
) -> NDArray:
"""
Convert *Stiles & Burch 1959 10 Degree RGB CMFs* colour matching
functions into the *Stockman & Sharpe 10 Degree Cone Fundamentals*
spectral sensitivity functions.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` in nm.
Returns
-------
:class:`numpy.ndarray`
*Stockman & Sharpe 10 Degree Cone Fundamentals* spectral tristimulus
values.
Notes
-----
- Data for the *Stockman & Sharpe 10 Degree Cone Fundamentals* already
exists, this definition is intended for educational purpose.
References
----------
:cite:`CIETC1-362006a`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... RGB_10_degree_cmfs_to_LMS_10_degree_cmfs(700) # doctest: +ELLIPSIS
array([ 0.0052860..., 0.0003252..., 0. ])
"""
cmfs = MSDS_CMFS_RGB["Stiles & Burch 1959 10 Degree RGB CMFs"]
rgb_bar = cmfs[wavelength]
M = np.array(
[
[0.1923252690, 0.749548882, 0.0675726702],
[0.0192290085, 0.940908496, 0.113830196],
[0.0000000000, 0.0105107859, 0.991427669],
]
)
lms_bar = vector_dot(M, rgb_bar)
lms_bar[..., -1][np.asarray(np.asarray(wavelength) > 505)] = 0
return lms_bar
def LMS_2_degree_cmfs_to_XYZ_2_degree_cmfs(
wavelength: FloatingOrArrayLike,
) -> NDArray:
"""
Convert *Stockman & Sharpe 2 Degree Cone Fundamentals* colour matching
functions into the *CIE 2012 2 Degree Standard Observer* colour matching
functions.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` in nm.
Returns
-------
:class:`numpy.ndarray`
*CIE 2012 2 Degree Standard Observer* spectral tristimulus values.
Notes
-----
- Data for the *CIE 2012 2 Degree Standard Observer* already exists,
this definition is intended for educational purpose.
References
----------
:cite:`CVRLv`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... LMS_2_degree_cmfs_to_XYZ_2_degree_cmfs(700) # doctest: +ELLIPSIS
array([ 0.0109677..., 0.0041959..., 0. ])
"""
cmfs = MSDS_CMFS_LMS["Stockman & Sharpe 2 Degree Cone Fundamentals"]
lms_bar = cmfs[wavelength]
M = np.array(
[
[1.94735469, -1.41445123, 0.36476327],
[0.68990272, 0.34832189, 0.00000000],
[0.00000000, 0.00000000, 1.93485343],
]
)
xyz_bar = vector_dot(M, lms_bar)
return xyz_bar
def LMS_10_degree_cmfs_to_XYZ_10_degree_cmfs(
wavelength: FloatingOrArrayLike,
) -> NDArray:
"""
Convert *Stockman & Sharpe 10 Degree Cone Fundamentals* colour matching
functions into the *CIE 2012 10 Degree Standard Observer* colour matching
functions.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` in nm.
Returns
-------
:class:`numpy.ndarray`
*CIE 2012 10 Degree Standard Observer* spectral tristimulus values.
Notes
-----
- Data for the *CIE 2012 10 Degree Standard Observer* already exists,
this definition is intended for educational purpose.
References
----------
:cite:`CVRLp`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... LMS_10_degree_cmfs_to_XYZ_10_degree_cmfs(700) # doctest: +ELLIPSIS
array([ 0.0098162..., 0.0037761..., 0. ])
"""
cmfs = MSDS_CMFS_LMS["Stockman & Sharpe 10 Degree Cone Fundamentals"]
lms_bar = cmfs[wavelength]
M = np.array(
[
[1.93986443, -1.34664359, 0.43044935],
[0.69283932, 0.34967567, 0.00000000],
[0.00000000, 0.00000000, 2.14687945],
]
)
xyz_bar = vector_dot(M, lms_bar)
return xyz_bar
|
colour-science/colour
|
colour/colorimetry/transformations.py
|
Python
|
bsd-3-clause
| 9,677
|
#!/usr/bin/env python3
# -*- Mode:python; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (c) 2015, Colorado State University.
#
# This file is part of ndn-atmos.
#
# ndn-atmos is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# ndn-atmos is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received copies of the GNU General Public License and GNU Lesser
# General Public License along with ndn-atmos, e.g., in COPYING.md file. If not, see
# <http://www.gnu.org/licenses/>.
#
# See AUTHORS.md for complete list of ndn-atmos authors and contributors.
'''This is the config file parser module.
Input = object with command line parameters.
Output = list of components for different config sections'''
import configparser
import sys, traceback
class ParseConf(object):
'''parses the name schema file and returns name mappings for translated output'''
def __init__(self, confName):
self.confName = confName
if __debug__:
print("Config file name: %s" %(self.confName))
self.filenameMap = []
self.ndnNameMap = []
self.seperatorsMap = []
self.userDefinedConfDir = {}
self.translator = []
#initialize the parser
self.parser = configparser.SafeConfigParser()
self.parser.optionxform=str
self.parser.read(self.confName)
self.fullConf = {}
#do the mapping
res = self.getMappings(confName)
if res is False:
print("Error getting values from config file")
raise error.with_traceback(sys.exc_info()[2])
def _parseConf(self):
#iterate over them and store the name components in fullConf
try:
for sectionName in self.parser.sections():
self.conf = {}
for name, value in self.parser.items(sectionName):
self.conf[name] = value
self.fullConf[sectionName] = self.conf
if __debug__:
print(self.fullConf)
except KeyError:
print("Key %s is not found in config file" %(name))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
return self.fullConf
def _doParsing(self):
#parser now contain a dictionary with the sections in conf
# first elements are section and second ones are variables defined in config file
try:
self.filenameMap = self.fullConf['Name']['filenameMapping'].replace(" ", "").split(',')
self.ndnNameMap = self.fullConf['Name']['ndnMapping'].replace(" ", "").split(',')
# user defined components look like this
#activity:cmip5, subactivity:atmos, organization:csu, ensemble:r3i1p1
userDefinedConf = self.fullConf['Name']['userDefinedComps'].replace(" ", "").split(',')
for item in userDefinedConf:
key, value = item.split(":")
self.userDefinedConfDir[key] = [value]
self.seperatorsMap = self.fullConf['Name']['seperators'].replace(" ", "").split(',')
#reads which translator to use
self.translator = self.fullConf['Translator']['translator'].replace(" ", "")
except KeyError:
print("Key %s is not found in config file" %(name))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
def getMappings(self, confName):
'''parses the schema file and provides name mappings'''
fullConf = self._parseConf()
#if dict is not empty
if fullConf:
res = self._doParsing()
if len(self.filenameMap) == 0 or len(self.ndnNameMap) == 0 or len(self.translator) == 0:
return False
else:
return True
else:
return False
|
named-data/ndn-atmos
|
lib/ndn_cmmap_translators/atmos2ndn_parser/conf_file_parser.py
|
Python
|
gpl-3.0
| 4,328
|
from django import template
register = template.Library()
from blog.models import Article, Category, ArticleTranslation
from blog.utils import Year, Month
from blog import settings as blog_settings
try:
from tagging.models import Tag
except:
Tag = None
@register.inclusion_tag('blog/includes/_nav.html')
def blog_nav():
articles = Article.objects.get_published_live().order_by('-publish_date')
return {'articles': articles}
@register.inclusion_tag('blog/includes/_categories.html')
def blog_categories():
categories = Category.objects.filter(published=True)
return {'categories': categories}
@register.inclusion_tag('blog/includes/_archive.html', takes_context=True)
def blog_archive(context, show_empty=blog_settings.ARCHIVE_SHOW_EMPTY, show_articles=blog_settings.ARCHIVE_SHOW_ARTICLES, show_count=blog_settings.ARCHIVE_SHOW_COUNT):
archive = {}
year_list = []
articles = Article.objects.get_published_live().order_by('-publish_date')
request = context['request']
if articles.count() > 0:
# Get first and last articles
first = articles[len(articles)-1]
last = articles[0]
# Define the earliest and latest years
earliest_year = first.publish_date.year
latest_year = last.publish_date.year
# How many years?
year_range = latest_year - earliest_year
# Create year list
for i in range(year_range+1):
year_list.append(Year(latest_year - i))
return {'archive':year_list, 'request':request, 'show_empty':show_empty, 'show_articles':show_articles, 'show_count':show_count }
@register.assignment_tag
def get_latest_articles(limit = 3, exclude=None):
if exclude:
articles = Article.objects.get_published_live().exclude(id=exclude.id).order_by('-publish_date')[:int(limit)]
else:
articles = Article.objects.get_published_live().order_by('-publish_date')[:int(limit)]
return articles
@register.assignment_tag
def get_latest_articles_per_language(language, limit=False):
from django.utils.translation import get_language
articles = Article.objects.get_published_translation_live(language_code=get_language()).order_by('-parent__publish_date')
if limit:
articles = articles[:int(limit)]
return articles
@register.assignment_tag
def get_latest_by_author(author, limit = 3, exclude = False):
if exclude:
articles = Article.objects.get_published_live().filter(published_from__authors=author).exclude(id=exclude.id).order_by('-publish_date')[:int(limit)]
else:
articles = Article.objects.get_published_live().filter(published_from__authors=author).order_by('-publish_date')[:int(limit)]
return articles
@register.assignment_tag
def tag_cloud_per_language(language_code):
from django.utils.translation import get_language
if Tag:
queryset = Article.objects.get_published_translation_live(language_code=get_language())
tags = Tag.objects.usage_for_queryset(queryset, counts=True)
return tags
else:
return []
|
Cotidia/cotidia-blog
|
blog/templatetags/blog_tags.py
|
Python
|
mit
| 3,067
|
# Miro - an RSS based video player application
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
# Participatory Culture Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
#
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
"""miro.plat.frontends.widgets.window -- Top-level Window class. """
import logging
from AppKit import *
from Foundation import *
from objc import YES, NO, nil
from PyObjCTools import AppHelper
from miro import signals
from miro import app
from miro import prefs
from miro.frontends.widgets import widgetconst
from miro.plat.frontends.widgets import osxmenus
from miro.plat.frontends.widgets import wrappermap
from miro.plat.frontends.widgets.helpers import NotificationForwarder
from miro.plat.frontends.widgets.base import Widget, FlippedView
from miro.plat.frontends.widgets.layout import VBox, HBox, Alignment
from miro.plat.frontends.widgets.control import Button
from miro.plat.frontends.widgets.simple import Label
from miro.plat.frontends.widgets.rect import Rect, NSRectWrapper
from miro.plat.utils import filename_to_unicode
# Tracks all windows that haven't been destroyed. This makes sure there
# object stay alive as long as the window is alive.
alive_windows = set()
class MiroResponderInterceptor(NSResponder):
"""Intercepts cocoa events and gives our wrappers and chance to handle
them first.
"""
def initWithResponder_(self, responder):
"""Initialize a MiroResponderInterceptor
We will give the wrapper for responder a chance to handle the event,
then pass it along to responder.
"""
self = super(MiroResponderInterceptor, self).init()
self.responder = responder
return self
def keyDown_(self, event):
if self.sendKeyDownToWrapper_(event):
return # signal handler returned True, stop processing
# If our responder is the last in the chain, we can stop intercepting
if self.responder.nextResponder() is None:
self.responder.keyDown_(event)
return
# Here's the tricky part, we want to call keyDown_ on our responder,
# but if it doesn't handle the event, then it will pass it along to
# it's next responder. We need to set things up so that we will
# intercept that call.
# Make a new MiroResponderInterceptor whose responder is the next
# responder down the chain.
next_intercepter = MiroResponderInterceptor.alloc().initWithResponder_(
self.responder.nextResponder())
# Install the interceptor
self.responder.setNextResponder_(next_intercepter)
# Send event along
self.responder.keyDown_(event)
# Restore old nextResponder value
self.responder.setNextResponder_(next_intercepter.responder)
def sendKeyDownToWrapper_(self, event):
"""Give a keyDown event to the wrapper for our responder
Return True if the wrapper handled the event
"""
key = event.charactersIgnoringModifiers()
if len(key) != 1 or not key.isalnum():
key = osxmenus.REVERSE_KEYS_MAP.get(key)
mods = osxmenus.translate_event_modifiers(event)
wrapper = wrappermap.wrapper(self.responder)
if isinstance(wrapper, Widget) or isinstance(wrapper, Window):
if wrapper.emit('key-press', key, mods):
return True
return False
class MiroWindow(NSWindow):
def initWithContentRect_styleMask_backing_defer_(self, rect, mask,
backing, defer):
self = NSWindow.initWithContentRect_styleMask_backing_defer_(self,
rect, mask, backing, defer)
self._last_focus_chain = None
return self
def handleKeyDown_(self, event):
if self.handle_tab_navigation(event):
return
interceptor = MiroResponderInterceptor.alloc().initWithResponder_(
self.firstResponder())
interceptor.keyDown_(event)
def handle_tab_navigation(self, event):
"""Handle tab navigation through the window.
:returns: True if we handled the event
"""
keystr = event.charactersIgnoringModifiers()
if keystr[0] == NSTabCharacter:
# handle cycling through views with Tab.
self.focusNextKeyView_(True)
return True
elif keystr[0] == NSBackTabCharacter:
self.focusNextKeyView_(False)
return True
return False
def acceptsMouseMovedEvents(self):
# HACK: for some reason calling setAcceptsMouseMovedEvents_() doesn't
# work, we have to forcefully override this method.
return NO
def sendEvent_(self, event):
if event.type() == NSKeyDown:
self.handleKeyDown_(event)
else:
NSWindow.sendEvent_(self, event)
def _calc_current_focus_wrapper(self):
responder = self.firstResponder()
while responder:
wrapper = wrappermap.wrapper(responder)
# check if we have a wrapper for the view, if not try the parent
# view
if wrapper is not None:
return wrapper
responder = responder.superview()
return None
def focusNextKeyView_(self, is_forward):
current_focus = self._calc_current_focus_wrapper()
my_wrapper = wrappermap.wrapper(self)
next_focus = my_wrapper.get_next_tab_focus(current_focus, is_forward)
if next_focus is not None:
next_focus.focus()
class MainMiroWindow(MiroWindow):
def isMovableByWindowBackground(self):
return YES
class Window(signals.SignalEmitter):
"""See https://develop.participatoryculture.org/index.php/WidgetAPI for a description of the API for this class."""
def __init__(self, title, rect):
signals.SignalEmitter.__init__(self)
self.create_signal('active-change')
self.create_signal('will-close')
self.create_signal('did-move')
self.create_signal('key-press')
self.create_signal('show')
self.create_signal('hide')
self.create_signal('on-shown')
self.nswindow = MainMiroWindow.alloc().initWithContentRect_styleMask_backing_defer_(
rect.nsrect,
self.get_style_mask(),
NSBackingStoreBuffered,
NO)
self.nswindow.setTitle_(title)
self.nswindow.setMinSize_(NSSize(800, 600))
self.nswindow.setReleasedWhenClosed_(NO)
self.content_view = FlippedView.alloc().initWithFrame_(rect.nsrect)
self.content_view.setAutoresizesSubviews_(NO)
self.nswindow.setContentView_(self.content_view)
self.content_widget = None
self.view_notifications = NotificationForwarder.create(self.content_view)
self.view_notifications.connect(self.on_frame_change, 'NSViewFrameDidChangeNotification')
self.window_notifications = NotificationForwarder.create(self.nswindow)
self.window_notifications.connect(self.on_activate, 'NSWindowDidBecomeMainNotification')
self.window_notifications.connect(self.on_deactivate, 'NSWindowDidResignMainNotification')
self.window_notifications.connect(self.on_did_move, 'NSWindowDidMoveNotification')
self.window_notifications.connect(self.on_will_close, 'NSWindowWillCloseNotification')
wrappermap.add(self.nswindow, self)
alive_windows.add(self)
def get_next_tab_focus(self, current, is_forward):
"""Return the next widget to cycle through for keyboard focus
Subclasses can override this to for find-grained control of keyboard
focus.
:param current: currently-focused widget
:param is_forward: are we tabbing forward?
"""
return None
def get_style_mask(self):
return (NSTitledWindowMask | NSClosableWindowMask |
NSMiniaturizableWindowMask | NSResizableWindowMask)
def set_title(self, title):
self.nswindow.setTitle_(title)
def get_title(self):
return self.nswindow.title()
def on_frame_change(self, notification):
self.place_child()
def on_activate(self, notification):
self.emit('active-change')
def on_deactivate(self, notification):
self.emit('active-change')
def on_did_move(self, notification):
self.emit('did-move')
def on_will_close(self, notification):
# unset the first responder. This allows text entry widgets to get
# the NSControlTextDidEndEditingNotification
self.nswindow.makeFirstResponder_(nil)
self.emit('will-close')
self.emit('hide')
def is_active(self):
return self.nswindow.isMainWindow()
def is_visible(self):
return self.nswindow.isVisible()
def show(self):
if self not in alive_windows:
raise ValueError("Window destroyed")
self.nswindow.makeKeyAndOrderFront_(nil)
self.nswindow.makeMainWindow()
self.emit('show')
# Cocoa doesn't apply default selections as forcefully as GTK, so
# currently there's no need for on-shown to actually wait until the
# window has been shown here
self.emit('on-shown')
def close(self):
self.nswindow.close()
def destroy(self):
self.close()
self.window_notifications.disconnect()
self.view_notifications.disconnect()
self.nswindow.setContentView_(nil)
wrappermap.remove(self.nswindow)
alive_windows.discard(self)
self.nswindow = None
def place_child(self):
rect = self.nswindow.contentRectForFrameRect_(self.nswindow.frame())
self.content_widget.place(NSRect(NSPoint(0, 0), rect.size),
self.content_view)
def hookup_content_widget_signals(self):
self.size_req_handler = self.content_widget.connect('size-request-changed',
self.on_content_widget_size_request_change)
def unhook_content_widget_signals(self):
self.content_widget.disconnect(self.size_req_handler)
self.size_req_handler = None
def on_content_widget_size_request_change(self, widget, old_size):
self.update_size_constraints()
self.place_child()
def set_content_widget(self, widget):
if self.content_widget:
self.content_widget.remove_viewport()
self.unhook_content_widget_signals()
self.content_widget = widget
self.hookup_content_widget_signals()
self.place_child()
self.update_size_constraints()
def update_size_constraints(self):
width, height = self.content_widget.get_size_request()
# It is possible the window is torn down between the size invalidate
# request and the actual size invalidation invocation. So check
# to see if nswindow is there if not then do not do anything.
if self.nswindow:
# FIXME: I'm not sure that this code does what we want it to do.
# It enforces the min-size when the user drags the window, but I
# think it should also call setContentSize_ if the window is
# currently too small to fit the content - BDK
self.nswindow.setContentMinSize_(NSSize(width, height))
rect = self.nswindow.contentRectForFrameRect_(self.nswindow.frame())
if rect.size.width < width or rect.size.height < height:
logging.warn("Content widget too large for this window "
"size available: %dx%d widget size: %dx%d",
rect.size.width, rect.size.height, width, height)
def get_content_widget(self):
return self.content_widget
def get_frame(self):
frame = self.nswindow.frame()
frame.size.height -= 22
return NSRectWrapper(frame)
def connect_menu_keyboard_shortcuts(self):
# All OS X windows are connected to the menu shortcuts
pass
class MainWindow(Window):
def __init__(self, title, rect):
Window.__init__(self, title, rect)
self.nswindow.setReleasedWhenClosed_(NO)
def close(self):
self.nswindow.orderOut_(nil)
class DialogBase(object):
def __init__(self):
self.sheet_parent = None
def set_transient_for(self, window):
self.sheet_parent = window
class MiroPanel(NSPanel):
def cancelOperation_(self, event):
wrappermap.wrapper(self).end_with_code(-1)
class Dialog(DialogBase):
def __init__(self, title, description=None):
DialogBase.__init__(self)
self.title = title
self.description = description
self.buttons = []
self.extra_widget = None
self.window = None
self.running = False
def add_button(self, text):
button = Button(text)
button.set_size(widgetconst.SIZE_NORMAL)
button.connect('clicked', self.on_button_clicked, len(self.buttons))
self.buttons.append(button)
def on_button_clicked(self, button, code):
self.end_with_code(code)
def end_with_code(self, code):
if self.sheet_parent is not None:
NSApp().endSheet_returnCode_(self.window, code)
else:
NSApp().stopModalWithCode_(code)
def build_text(self):
vbox = VBox(spacing=6)
if self.description is not None:
description_label = Label(self.description, wrap=True)
description_label.set_bold(True)
description_label.set_size_request(360, -1)
vbox.pack_start(description_label)
return vbox
def build_buttons(self):
hbox = HBox(spacing=12)
for button in reversed(self.buttons):
hbox.pack_start(button)
alignment = Alignment(xalign=1.0, yscale=1.0)
alignment.add(hbox)
return alignment
def build_content(self):
vbox = VBox(spacing=12)
vbox.pack_start(self.build_text())
if self.extra_widget:
vbox.pack_start(self.extra_widget)
vbox.pack_start(self.build_buttons())
alignment = Alignment(xscale=1.0, yscale=1.0)
alignment.set_padding(12, 12, 17, 17)
alignment.add(vbox)
return alignment
def build_window(self):
self.content_widget = self.build_content()
width, height = self.content_widget.get_size_request()
width = max(width, 400)
window = MiroPanel.alloc()
window.initWithContentRect_styleMask_backing_defer_(
NSMakeRect(400, 400, width, height),
NSTitledWindowMask, NSBackingStoreBuffered, NO)
view = FlippedView.alloc().initWithFrame_(NSMakeRect(0, 0, width,
height))
window.setContentView_(view)
window.setTitle_(self.title)
self.content_widget.place(view.frame(), view)
if self.buttons:
self.buttons[0].make_default()
return window
def hookup_content_widget_signals(self):
self.size_req_handler = self.content_widget.connect(
'size-request-changed',
self.on_content_widget_size_request_change)
def unhook_content_widget_signals(self):
self.content_widget.disconnect(self.size_req_handler)
self.size_req_handler = None
def on_content_widget_size_request_change(self, widget, old_size):
width, height = self.content_widget.get_size_request()
self.change_content_size(width, height)
def change_content_size(self, width, height):
content_rect = self.window.contentRectForFrameRect_(
self.window.frame())
# Cocoa's coordinate system is funky, adjust y so that the top stays
# in place
content_rect.origin.y += (content_rect.size.height - height)
# change our frame to fit the new content. It would be nice to
# animate the change, but timers don't work when we are displaying a
# modal dialog
content_rect.size = NSSize(width, height)
new_frame = self.window.frameRectForContentRect_(content_rect)
self.window.setFrame_display_(new_frame, NO)
# Need to call place() again, since our window has changed size
contentView = self.window.contentView()
self.content_widget.place(contentView.frame(), contentView)
def run(self):
self.window = self.build_window()
wrappermap.add(self.window, self)
self.hookup_content_widget_signals()
self.running = True
if self.sheet_parent is None:
response = NSApp().runModalForWindow_(self.window)
if self.window:
self.window.close()
else:
delegate = SheetDelegate.alloc().init()
NSApp().beginSheet_modalForWindow_modalDelegate_didEndSelector_contextInfo_(
self.window, self.sheet_parent.nswindow,
delegate, 'sheetDidEnd:returnCode:contextInfo:', 0)
response = NSApp().runModalForWindow_(self.window)
if self.window:
# self.window won't be around if we call destroy() to cancel
# the dialog
self.window.orderOut_(nil)
self.running = False
self.unhook_content_widget_signals()
if response < 0:
return -1
return response
def destroy(self):
if self.running:
NSApp().stopModalWithCode_(-1)
if self.window is not None:
self.window.setContentView_(None)
self.window.close()
self.window = None
self.buttons = None
self.extra_widget = None
def set_extra_widget(self, widget):
self.extra_widget = widget
def get_extra_widget(self):
return self.extra_widget
class SheetDelegate(NSObject):
@AppHelper.endSheetMethod
def sheetDidEnd_returnCode_contextInfo_(self, sheet, return_code, info):
NSApp().stopModalWithCode_(return_code)
class FileDialogBase(DialogBase):
def __init__(self):
DialogBase.__init__(self)
self._types = None
self._filename = None
self._directory = None
self._filter_on_run = True
def run(self):
self._panel.setAllowedFileTypes_(self._types)
if self.sheet_parent is None:
if self._filter_on_run:
response = self._panel.runModalForDirectory_file_types_(self._directory, self._filename, self._types)
else:
response = self._panel.runModalForDirectory_file_(self._directory, self._filename)
else:
delegate = SheetDelegate.alloc().init()
if self._filter_on_run:
self._panel.beginSheetForDirectory_file_types_modalForWindow_modalDelegate_didEndSelector_contextInfo_(
self._directory, self._filename, self._types,
self.sheet_parent.nswindow, delegate, 'sheetDidEnd:returnCode:contextInfo:', 0)
else:
self._panel.beginSheetForDirectory_file_modalForWindow_modalDelegate_didEndSelector_contextInfo_(
self._directory, self._filename,
self.sheet_parent.nswindow, delegate, 'sheetDidEnd:returnCode:contextInfo:', 0)
response = NSApp().runModalForWindow_(self._panel)
self._panel.orderOut_(nil)
return response
class FileSaveDialog(FileDialogBase):
def __init__(self, title):
FileDialogBase.__init__(self)
self._title = title
self._panel = NSSavePanel.savePanel()
self._panel.setCanChooseFiles_(YES)
self._panel.setCanChooseDirectories_(NO)
self._filename = None
self._filter_on_run = False
def set_filename(self, s):
self._filename = filename_to_unicode(s)
def get_filename(self):
# Use encode('utf-8') instead of unicode_to_filename, because
# unicode_to_filename has code to make sure nextFilename works, but it's
# more important here to not change the filename.
return self._filename.encode('utf-8')
def run(self):
response = FileDialogBase.run(self)
if response == NSFileHandlingPanelOKButton:
self._filename = self._panel.filename()
return 0
self._filename = ""
def destroy(self):
self._panel = None
set_path = set_filename
get_path = get_filename
class FileOpenDialog(FileDialogBase):
def __init__(self, title):
FileDialogBase.__init__(self)
self._title = title
self._panel = NSOpenPanel.openPanel()
self._panel.setCanChooseFiles_(YES)
self._panel.setCanChooseDirectories_(NO)
self._filenames = None
def set_select_multiple(self, value):
if value:
self._panel.setAllowsMultipleSelection_(YES)
else:
self._panel.setAllowsMultipleSelection_(NO)
def set_directory(self, d):
self._directory = filename_to_unicode(d)
def set_filename(self, s):
self._filename = filename_to_unicode(s)
def add_filters(self, filters):
self._types = []
for _, t in filters:
self._types += t
def get_filename(self):
if self._filenames is None:
# canceled
return None
return self.get_filenames()[0]
def get_filenames(self):
if self._filenames is None:
# canceled
return []
# Use encode('utf-8') instead of unicode_to_filename, because
# unicode_to_filename has code to make sure nextFilename works, but it's
# more important here to not change the filename.
return [f.encode('utf-8') for f in self._filenames]
def run(self):
response = FileDialogBase.run(self)
if response == NSFileHandlingPanelOKButton:
self._filenames = self._panel.filenames()
return 0
self._filename = ''
self._filenames = None
def destroy(self):
self._panel = None
set_path = set_filename
get_path = get_filename
class DirectorySelectDialog(FileDialogBase):
def __init__(self, title):
FileDialogBase.__init__(self)
self._title = title
self._panel = NSOpenPanel.openPanel()
self._panel.setCanChooseFiles_(NO)
self._panel.setCanChooseDirectories_(YES)
self._directory = None
def set_directory(self, d):
self._directory = filename_to_unicode(d)
def get_directory(self):
# Use encode('utf-8') instead of unicode_to_filename, because
# unicode_to_filename has code to make sure nextFilename
# works, but it's more important here to not change the
# filename.
return self._directory.encode('utf-8')
def run(self):
response = FileDialogBase.run(self)
if response == NSFileHandlingPanelOKButton:
self._directory = self._panel.filenames()[0]
return 0
self._directory = ""
def destroy(self):
self._panel = None
set_path = set_directory
get_path = get_directory
class AboutDialog(DialogBase):
def run(self):
optionsDictionary = dict()
revision = app.config.get(prefs.APP_REVISION_NUM)
if revision:
optionsDictionary['Version'] = revision
NSApplication.sharedApplication().orderFrontStandardAboutPanelWithOptions_(optionsDictionary)
def destroy(self):
pass
class AlertDialog(DialogBase):
def __init__(self, title, message, alert_type):
DialogBase.__init__(self)
self._nsalert = NSAlert.alloc().init();
self._nsalert.setMessageText_(title)
self._nsalert.setInformativeText_(message)
self._nsalert.setAlertStyle_(alert_type)
def add_button(self, text):
self._nsalert.addButtonWithTitle_(text)
def run(self):
self._nsalert.runModal()
def destroy(self):
self._nsalert = nil
class PreferenceItem(NSToolbarItem):
def setPanel_(self, panel):
self.panel = panel
class PreferenceToolbarDelegate(NSObject):
def initWithPanels_identifiers_window_(self, panels, identifiers, window):
self = super(PreferenceToolbarDelegate, self).init()
self.panels = panels
self.identifiers = identifiers
self.window = window
return self
def toolbarAllowedItemIdentifiers_(self, toolbar):
return self.identifiers
def toolbarDefaultItemIdentifiers_(self, toolbar):
return self.identifiers
def toolbarSelectableItemIdentifiers_(self, toolbar):
return self.identifiers
def toolbar_itemForItemIdentifier_willBeInsertedIntoToolbar_(self, toolbar,
itemIdentifier,
flag):
panel = self.panels[itemIdentifier]
item = PreferenceItem.alloc().initWithItemIdentifier_(itemIdentifier)
item.setLabel_(unicode(panel[1]))
item.setImage_(NSImage.imageNamed_(u"pref_tab_%s" % itemIdentifier))
item.setAction_("switchPreferenceView:")
item.setTarget_(self)
item.setPanel_(panel[0])
return item
def validateToolbarItem_(self, item):
return YES
def switchPreferenceView_(self, sender):
self.window.do_select_panel(sender.panel, YES)
class DialogWindow(Window):
def __init__(self, title, rect, allow_miniaturize=False):
self.allow_miniaturize = allow_miniaturize
Window.__init__(self, title, rect)
self.nswindow.setShowsToolbarButton_(NO)
def get_style_mask(self):
mask = (NSTitledWindowMask | NSClosableWindowMask)
if self.allow_miniaturize:
mask |= NSMiniaturizableWindowMask
return mask
class DonateWindow(Window):
def __init__(self, title):
Window.__init__(self, title, Rect(0, 0, 640, 440))
self.panels = dict()
self.identifiers = list()
self.first_show = True
self.nswindow.setShowsToolbarButton_(NO)
self.nswindow.setReleasedWhenClosed_(NO)
self.app_notifications = NotificationForwarder.create(NSApp())
self.app_notifications.connect(self.on_app_quit,
'NSApplicationWillTerminateNotification')
def destroy(self):
super(PreferencesWindow, self).destroy()
self.app_notifications.disconnect()
def get_style_mask(self):
return (NSTitledWindowMask | NSClosableWindowMask |
NSMiniaturizableWindowMask)
def show(self):
if self.first_show:
self.nswindow.center()
self.first_show = False
Window.show(self)
def on_app_quit(self, notification):
self.close()
class PreferencesWindow(Window):
def __init__(self, title):
Window.__init__(self, title, Rect(0, 0, 640, 440))
self.panels = dict()
self.identifiers = list()
self.first_show = True
self.nswindow.setShowsToolbarButton_(NO)
self.nswindow.setReleasedWhenClosed_(NO)
self.app_notifications = NotificationForwarder.create(NSApp())
self.app_notifications.connect(self.on_app_quit,
'NSApplicationWillTerminateNotification')
def destroy(self):
super(PreferencesWindow, self).destroy()
self.app_notifications.disconnect()
def get_style_mask(self):
return (NSTitledWindowMask | NSClosableWindowMask |
NSMiniaturizableWindowMask)
def append_panel(self, name, panel, title, image_name):
self.panels[name] = (panel, title)
self.identifiers.append(name)
def finish_panels(self):
self.tbdelegate = PreferenceToolbarDelegate.alloc().initWithPanels_identifiers_window_(self.panels, self.identifiers, self)
toolbar = NSToolbar.alloc().initWithIdentifier_(u"Preferences")
toolbar.setAllowsUserCustomization_(NO)
toolbar.setDelegate_(self.tbdelegate)
self.nswindow.setToolbar_(toolbar)
def select_panel(self, index):
panel = self.identifiers[index]
self.nswindow.toolbar().setSelectedItemIdentifier_(panel)
self.do_select_panel(self.panels[panel][0], NO)
def do_select_panel(self, panel, animate):
wframe = self.nswindow.frame()
vsize = list(panel.get_size_request())
if vsize[0] < 650:
vsize[0] = 650
if vsize[1] < 200:
vsize[1] = 200
toolbarHeight = wframe.size.height - self.nswindow.contentView().frame().size.height
wframe.origin.y += wframe.size.height - vsize[1] - toolbarHeight
wframe.size = (vsize[0], vsize[1] + toolbarHeight)
self.set_content_widget(panel)
self.nswindow.setFrame_display_animate_(wframe, YES, animate)
def show(self):
if self.first_show:
self.nswindow.center()
self.first_show = False
Window.show(self)
def on_app_quit(self, notification):
self.close()
def get_first_time_dialog_coordinates(width, height):
"""Returns the coordinates for the first time dialog.
"""
# windowFrame is None on first run. in that case, we want
# to put Miro in the middle.
mainscreen = NSScreen.mainScreen()
rect = mainscreen.frame()
x = (rect.size.width - width) / 2
y = (rect.size.height - height) / 2
return x, y
|
debugger06/MiroX
|
tv/osx/plat/frontends/widgets/window.py
|
Python
|
gpl-2.0
| 30,683
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('thedirectory', '0015_entry_creation_user'),
]
operations = [
migrations.AddField(
model_name='entry',
name='creation_date',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 28, 18, 57, 25, 658326, tzinfo=utc), verbose_name='Created on', auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='entry',
name='lastupdate_date',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 28, 18, 57, 40, 566441, tzinfo=utc), verbose_name='Last modification', auto_now=True),
preserve_default=False,
),
]
|
owatte/thecaribfos
|
apps/thedirectory/migrations/0016_auto_20150728_1457.py
|
Python
|
gpl-3.0
| 914
|
#!/usr/bin/env python
# import os, sys
# sys.path.append(os.path.abspath('infomap'))
# from __future__ import print_function # Python 3 print function in Python 2
from infomap import infomap
conf = infomap.init("--two-level -v -N2")
# Add output directory (and output name) to automatically write result to file
# conf = infomap.init("--two-level -v -N2 . --out-name test")
print("Creating network...")
network = infomap.Network(conf)
names = list("ABCDEF")
network.addNodes(names)
network.addLink(0, 1)
network.addLink(0, 2)
network.addLink(0, 3)
network.addLink(1, 0)
network.addLink(1, 2)
network.addLink(2, 1)
network.addLink(2, 0)
network.addLink(3, 0)
network.addLink(3, 4)
network.addLink(3, 5)
network.addLink(4, 3)
network.addLink(4, 5)
network.addLink(5, 4)
network.addLink(5, 3)
print("Num links: %d" % network.numLinks())
network.finalizeAndCheckNetwork()
tree = infomap.HierarchicalNetwork(conf)
infomap.run(network, tree)
print("Found %d top modules with codelength: %f" % (tree.numTopModules(), tree.codelength()))
communities = {}
clusterIndexLevel = 1 # 1, 2, ... or -1 for top, second, ... or lowest cluster level
print("Tree:")
for node in tree.treeIter(clusterIndexLevel):
print("%d %s %f %s" % (node.clusterIndex(), " " * node.depth(), node.data.flow, node.data.name))
if node.isLeafNode():
communities[node.originalLeafIndex] = node.clusterIndex()
print("Communities: %s" % communities)
print("Done!")
|
GraphProcessor/CommunityDetectionCodes
|
NonOverlappingCodes/2009-Community-Infomap-MapEquation/examples/python/Infomap.py
|
Python
|
gpl-2.0
| 1,445
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABCMeta, abstractmethod
import six
from neutron.common import exceptions as exc
from neutron.common import topics
from neutron.openstack.common import log
from neutron.plugins.ml2 import driver_api as api
LOG = log.getLogger(__name__)
TUNNEL = 'tunnel'
@six.add_metaclass(ABCMeta)
class TunnelTypeDriver(api.TypeDriver):
"""Define stable abstract interface for ML2 type drivers.
tunnel type networks rely on tunnel endpoints. This class defines abstract
methods to manage these endpoints.
"""
@abstractmethod
def add_endpoint(self, ip):
"""Register the endpoint in the type_driver database.
param ip: the ip of the endpoint
"""
pass
@abstractmethod
def get_endpoints(self):
"""Get every endpoint managed by the type_driver
:returns a list of dict [{id:endpoint_id, ip_address:endpoint_ip},..]
"""
pass
def _parse_tunnel_ranges(self, tunnel_ranges, current_range, tunnel_type):
for entry in tunnel_ranges:
entry = entry.strip()
try:
tun_min, tun_max = entry.split(':')
tun_min = tun_min.strip()
tun_max = tun_max.strip()
current_range.append((int(tun_min), int(tun_max)))
except ValueError as ex:
LOG.error(_("Invalid tunnel ID range: '%(range)s' - %(e)s. "
"Agent terminated!"),
{'range': tunnel_ranges, 'e': ex})
LOG.info(_("%(type)s ID ranges: %(range)s"),
{'type': tunnel_type, 'range': current_range})
def validate_provider_segment(self, segment):
physical_network = segment.get(api.PHYSICAL_NETWORK)
if physical_network:
msg = _("provider:physical_network specified for %s "
"network") % segment.get(api.NETWORK_TYPE)
raise exc.InvalidInput(error_message=msg)
segmentation_id = segment.get(api.SEGMENTATION_ID)
if not segmentation_id:
msg = _("segmentation_id required for %s provider "
"network") % segment.get(api.NETWORK_TYPE)
raise exc.InvalidInput(error_message=msg)
for key, value in segment.items():
if value and key not in [api.NETWORK_TYPE,
api.SEGMENTATION_ID]:
msg = (_("%(key)s prohibited for %(tunnel)s provider network"),
{'key': key, 'tunnel': segment.get(api.NETWORK_TYPE)})
raise exc.InvalidInput(error_message=msg)
class TunnelRpcCallbackMixin(object):
def __init__(self, notifier, type_manager):
self.notifier = notifier
self.type_manager = type_manager
def tunnel_sync(self, rpc_context, **kwargs):
"""Update new tunnel.
Updates the database with the tunnel IP. All listening agents will also
be notified about the new tunnel IP.
"""
tunnel_ip = kwargs.get('tunnel_ip')
tunnel_type = kwargs.get('tunnel_type')
if not tunnel_type:
msg = _("Network_type value needed by the ML2 plugin")
raise exc.InvalidInput(error_message=msg)
driver = self.type_manager.drivers.get(tunnel_type)
if driver:
tunnel = driver.obj.add_endpoint(tunnel_ip)
tunnels = driver.obj.get_endpoints()
entry = {'tunnels': tunnels}
# Notify all other listening agents
self.notifier.tunnel_update(rpc_context, tunnel.ip_address,
tunnel_type)
# Return the list of tunnels IP's to the agent
return entry
else:
msg = _("network_type value '%s' not supported") % tunnel_type
raise exc.InvalidInput(error_message=msg)
class TunnelAgentRpcApiMixin(object):
def _get_tunnel_update_topic(self):
return topics.get_topic_name(self.client.target.topic,
TUNNEL,
topics.UPDATE)
def tunnel_update(self, context, tunnel_ip, tunnel_type):
cctxt = self.client.prepare(fanout=True,
topic=self._get_tunnel_update_topic())
cctxt.cast(context, 'tunnel_update',
tunnel_ip=tunnel_ip,
tunnel_type=tunnel_type)
|
beagles/neutron_hacking
|
neutron/plugins/ml2/drivers/type_tunnel.py
|
Python
|
apache-2.0
| 5,052
|
import os
import sys
from distutils.sysconfig import get_python_lib
from setuptools import find_packages, setup, Command
# Dynamically calculate the version based on VERSION.
version = __import__('oneway').get_version()
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(
name='oneway',
version=version,
url='https://github.com/green-latte/oneway',
author='Green Latte',
author_email='k.takeuchi@warrantee.co.jp',
description=('[Todo: description]'),
license='MIT',
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
],
cmdclass = {'test': PyTest},
)
|
green-latte/oneway
|
setup.py
|
Python
|
mit
| 1,413
|
# -*- coding: utf-8 -*-
from django import template
from django.utils.safestring import mark_safe
from django_js_reverse.core import generate_js
try:
from django.urls import get_resolver
except ImportError:
from django.core.urlresolvers import get_resolver
register = template.Library()
urlconf = template.Variable('request.urlconf')
def _get_urlconf(context):
try:
return context.request.urlconf
except AttributeError:
pass
try:
return urlconf.resolve(context)
except template.VariableDoesNotExist:
pass
@register.simple_tag(takes_context=True)
def js_reverse_inline(context):
"""
Outputs a string of javascript that can generate URLs via the use
of the names given to those URLs.
"""
return mark_safe(generate_js(get_resolver(_get_urlconf(context))))
|
ierror/django-js-reverse
|
django_js_reverse/templatetags/js_reverse.py
|
Python
|
mit
| 838
|
from mkidreadout.channelizer.Roach2Controls import Roach2Controls
import sys, time, os, datetime, calendar
import numpy as np
if __name__=='__main__':
paramFile='/mnt/data0/neelay/MkidDigitalReadout/DataReadout/ChannelizerControls/DarknessFpga_V2.param'
timeInterval = sys.argv[1]
timeReg = 'timekeeper_sec_now'
roachList = []
for ip in sys.argv[2:]:
roachList.append(Roach2Controls('10.0.0.'+str(ip), paramFile, True, False))
for roach in roachList:
roach.connect()
while True:
for roach in roachList:
curYr = datetime.datetime.utcnow().year
yrStart = datetime.date(curYr, 1, 1)
tsOffs = calendar.timegm(yrStart.timetuple())
curTime = int(time.time())- tsOffs
roachTime = roach.fpga.read_int(timeReg)
print roachTime
if not roachTime==curTime: #make sure mismatch is not due to sync issue
time.sleep(0.3)
curTime = int(time.time())-tsOffs
roachTime = roach.fpga.read_int(timeReg)
print 'one misalignment'
if not roachTime==curTime:
print 'Correcting timestamp for roach', roach.ip
roach.loadCurTimestamp()
time.sleep(float(timeInterval))
|
bmazin/SDR
|
Projects/FirmwareTests/darkDebug/tsChecker.py
|
Python
|
gpl-2.0
| 1,311
|
# _*_ coding:utf-8 _*_
import logging
from collections import defaultdict
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework import status
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.repo_tags.models import RepoTags
from seahub.file_tags.models import FileTags
from seahub.api2.utils import api_error, to_python_boolean
from seahub.views import check_folder_permission
from seahub.constants import PERMISSION_READ_WRITE
from seaserv import seafile_api
logger = logging.getLogger(__name__)
class RepoTagsView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def get(self, request, repo_id):
"""list all repo_tags by repo_id.
"""
# argument check
include_file_count = request.GET.get('include_file_count', 'true')
if include_file_count not in ['true', 'false']:
error_msg = 'include_file_count invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
include_file_count = to_python_boolean(include_file_count)
# resource check
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
if not check_folder_permission(request, repo_id, '/'):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
# get files tags
files_count = defaultdict(int)
if include_file_count:
try:
files_tags = FileTags.objects.select_related('repo_tag').filter(repo_tag__repo_id=repo_id)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
for file_tag in files_tags:
files_count[file_tag.repo_tag_id] += 1
repo_tags = []
try:
repo_tag_list = RepoTags.objects.get_all_by_repo_id(repo_id)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
for repo_tag in repo_tag_list:
res = repo_tag.to_dict()
repo_tag_id = res["repo_tag_id"]
if repo_tag_id in files_count:
res["files_count"] = files_count[repo_tag_id]
else:
res["files_count"] = 0
repo_tags.append(res)
return Response({"repo_tags": repo_tags}, status=status.HTTP_200_OK)
def post(self, request, repo_id):
"""add one repo_tag.
"""
# argument check
tag_name = request.data.get('name')
if not tag_name:
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
tag_color = request.data.get('color')
if not tag_color:
error_msg = 'color invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# resource check
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo_tag = RepoTags.objects.get_repo_tag_by_name(repo_id, tag_name)
if repo_tag:
error_msg = 'repo tag %s already exist.' % tag_name
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# permission check
if check_folder_permission(request, repo_id, '/') != PERMISSION_READ_WRITE:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
try:
repo_tag = RepoTags.objects.create_repo_tag(repo_id, tag_name, tag_color)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({"repo_tag": repo_tag.to_dict()}, status=status.HTTP_201_CREATED)
class RepoTagView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def put(self, request, repo_id, repo_tag_id):
"""update one repo_tag
"""
# argument check
tag_name = request.data.get('name')
if not tag_name:
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
tag_color = request.data.get('color')
if not tag_color:
error_msg = 'color invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# resource check
repo_tag = RepoTags.objects.get_repo_tag_by_id(repo_tag_id)
if not repo_tag:
error_msg = 'repo_tag not found.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
if check_folder_permission(request, repo_id, '/') != PERMISSION_READ_WRITE:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
try:
repo_tag.name = tag_name
repo_tag.color = tag_color
repo_tag.save()
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({"repo_tag": repo_tag.to_dict()}, status=status.HTTP_200_OK)
def delete(self, request, repo_id, repo_tag_id):
"""delete one repo_tag
"""
# resource check
repo_tag = RepoTags.objects.get_repo_tag_by_id(repo_tag_id)
if not repo_tag:
error_msg = 'repo_tag not found.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
if check_folder_permission(request, repo_id, '/') != PERMISSION_READ_WRITE:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
try:
RepoTags.objects.delete_repo_tag(repo_tag_id)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({"success": "true"}, status=status.HTTP_200_OK)
|
miurahr/seahub
|
seahub/api2/endpoints/repo_tags.py
|
Python
|
apache-2.0
| 6,920
|
#
# This file is part of Bluepass. Bluepass is Copyright (c) 2012-2013
# Geert Jansen.
#
# Bluepass is free software available under the GNU General Public License,
# version 3. See the file LICENSE distributed with this file for the exact
# licensing terms.
from __future__ import absolute_import, print_function
import os
import sys
import time
import signal
import stat
from gruvi.socketpair import socketpair
from support import *
from bluepass import platform
class TestPlatform(UnitTest):
def test_lock_basic(self):
# Ensure that a basic lock + unlock works.
fname = self.tempname()
lock = platform.lock_file(fname)
platform.unlock_file(lock)
def test_lock_multiple(self):
# Lock + unlock a lock multiple times.
fname = self.tempname()
lock = platform.lock_file(fname)
platform.unlock_file(lock)
lock = platform.lock_file(fname)
platform.unlock_file(lock)
@unix_only
def test_lock_locked_unix(self):
# Lock a lock that is already locked. This should raise an OSError.
fname = self.tempname()
pid = os.fork()
# The locks are per process. So need to fork here.
if pid == 0:
# child
lock = platform.lock_file(fname)
try:
time.sleep(1)
except KeyboardInterrupt:
pass
platform.unlock_file(lock)
os._exit(0)
time.sleep(0.1)
self.assertRaises(OSError, platform.lock_file, fname)
# Exit the child now, which will release the lock
os.kill(pid, signal.SIGINT)
os.waitpid(pid, 0)
lock = platform.lock_file(fname)
platform.unlock_file(lock)
@unix_only
def test_get_homedir_unix(self):
# Ensure that get_homedir() works with and without $HOME
oldhome = os.environ.pop('HOME')
os.environ['HOME'] = self.tempdir
self.assertEqual(platform.get_homedir(), self.tempdir)
del os.environ['HOME']
import pwd
self.assertEqual(platform.get_homedir(), pwd.getpwuid(os.getuid()).pw_dir)
if oldhome is not None:
os.environ['HOME'] = oldhome
@unix_only
def test_get_appdir_unix(self):
# Ensure that get_appdir() returns something that exists and is below
# get_homedir()
oldhome = os.environ.pop('HOME')
os.environ['HOME'] = self.tempdir
appdir = platform.get_appdir('foo')
self.assertTrue(appdir.startswith(platform.get_homedir()))
st = os.stat(appdir)
self.assertTrue(stat.S_ISDIR(st.st_mode))
if oldhome is not None:
os.environ['HOME'] = oldhome
@unix_only
def test_disable_debugging(self):
# Ensure that disable_debugging() works.
pid = os.fork()
if pid == 0:
# child
platform.disable_debugging()
try:
time.sleep(1)
except KeyboardInterrupt:
pass
os._exit(0)
time.sleep(0.1)
if sys.platform.startswith('linux'):
st = os.stat('/proc/{0}/mem'.format(pid))
self.assertEqual(st.st_uid, 0)
else:
raise AssertionError('test not implemented for {!r}'.format(sys.platform))
os.kill(pid, signal.SIGINT)
os.waitpid(pid, 0)
def test_get_process_info(self):
# Ensure that get_process_info() called for oud pid returns information
# about ourselves.
pinfo = platform.get_process_info(os.getpid())
self.assertIsInstance(pinfo, tuple)
self.assertEqual(pinfo.exe, os.path.realpath(sys.executable))
self.assertEqual(pinfo.cmdline[1:], sys.argv)
if hasattr(os, 'getuid'):
self.assertEqual(pinfo.uid, os.getuid())
if hasattr(os, 'getgid'):
self.assertEqual(pinfo.gid, os.getgid())
def test_get_peer_info(self):
# Ensure that get_peer_info() when called for a connected socket that
# we created returns information about ourselves.
s1, s2 = socketpair()
pinfo = platform.get_peer_info(s1.getsockname(), s1.getpeername())
self.assertIsInstance(pinfo, tuple)
self.assertEqual(pinfo.exe, os.path.realpath(sys.executable))
self.assertEqual(pinfo.cmdline[1:], sys.argv)
if hasattr(os, 'getuid'):
self.assertEqual(pinfo.uid, os.getuid())
if hasattr(os, 'getgid'):
self.assertEqual(pinfo.gid, os.getgid())
s1.close()
s2.close()
if __name__ == '__main__':
unittest.main()
|
geertj/bluepass
|
tests/test_platform.py
|
Python
|
gpl-3.0
| 4,626
|
import urllib2
import urllib
import os
from core import directorio
def descargar(items, direct=None):
agent = "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1"
if not direct:
#Creamos un directorio temporal
direct = directorio.crear()
for url in items:
#url = urllib.quote(url)
#print "Descargando:", url
try:
(server, nombre) = os.path.split(url)
opener = urllib2.Request(url)
opener.add_header("User-Agent", agent)
opener.add_header("Host", opener.get_host())
opener.add_header("Referer", opener.get_host())
ruta = os.path.join(direct, nombre)
data = urllib2.urlopen(opener)
descargado = open(ruta, "wb")
for byte in data:
descargado.write(byte)
descargado.close()
except NameError:
#print "Error descargando:", url
pass
return(direct)
|
GrampusTeam/Grampus
|
core/descargas.py
|
Python
|
bsd-3-clause
| 1,002
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import collections
import fnmatch
import itertools
import logging
import os
import xml.etree.ElementTree as ET
from abc import abstractmethod
from functools import total_ordering
from pants.base.mustache import MustacheRenderer
from pants.util.dirutil import safe_mkdir_for, safe_walk
from pants.util.memo import memoized_property
from pants.util.meta import AbstractClass
from pants.util.objects import datatype
from pants.util.strutil import ensure_binary
_LOGGER = logging.getLogger(__name__)
@total_ordering
class ReportTestSuite(object):
"""Data object for a JUnit test suite"""
class MergeError(Exception):
def __init__(self, suites, test_cases):
error_message = ('Refusing to merge duplicate test cases in suite {!r} from files {}:'
'\n {}').format(suites[0].name,
', '.join(s.file for s in suites),
'\n '.join(map(str, test_cases)))
super(ReportTestSuite.MergeError, self).__init__(error_message)
@classmethod
def merged(cls, report_test_suites, error_on_conflict=True, logger=None):
"""Merges any like-named test suites into one test suite encompasing all the suite's test cases.
:param report_test_suites: A sequence of test suites to merge results from.
:type report_test_suites: :class:`collections.Iterable` of :class:`ReportTestSuite`
:param bool error_on_conflict: `True` to raise when two or more test cases in a given test suite
have the same name; otherwise the conflict is logged and the 1st
encountered duplicate is used.
:param logger: An optional logger to use for logging merge conflicts.
:type logger: :class:`logging.Logger`
:raises: :class:`ReportTestSuite.MergeError` if configured to do so on merge errors.
:yields: One test suite per unique test suite name in `report_test_suites` with the results of
all like-named test suites merged.
:rtype: iter of :class:`ReportTestSuite`
"""
logger = logger or _LOGGER
suites_by_name = collections.defaultdict(list)
for report_test_suite in report_test_suites:
suites_by_name[report_test_suite.name].append(report_test_suite)
for suite_name, suites in suites_by_name.items():
cases_by_name = collections.defaultdict(list)
for case in itertools.chain.from_iterable(s.testcases for s in suites):
cases_by_name[case.name].append(case)
test_cases = []
tests, errors, failures, skipped, time = 0, 0, 0, 0, 0
for cases in cases_by_name.values():
if len(cases) > 1:
if error_on_conflict:
raise cls.MergeError(suites, cases)
else:
logger.warning('Found duplicate test case results in suite {!r} from files: {}, '
'using first result:\n -> {}'.format(suite_name,
', '.join(s.file for s in suites),
'\n '.join(map(str, cases))))
case = iter(cases).next()
tests += 1
time += case.time
if case.error:
errors += 1
elif case.failure:
failures += 1
elif case.skipped:
skipped += 1
test_cases.append(case)
yield cls(name=suite_name,
tests=tests,
errors=errors,
failures=failures,
skipped=skipped,
time=time,
testcases=test_cases)
def __init__(self, name, tests, errors, failures, skipped, time, testcases, file=None):
self.name = name
self.tests = int(tests)
self.errors = int(errors)
self.failures = int(failures)
self.skipped = int(skipped)
self.time = float(time)
self.testcases = testcases
self.file = file
def __lt__(self, other):
if (self.errors, self.failures) > (other.errors, other.failures):
return True
elif (self.errors, self.failures) < (other.errors, other.failures):
return False
else:
return self.name.lower() < other.name.lower()
@staticmethod
def success_rate(test_count, error_count, failure_count, skipped_count):
if test_count:
unsuccessful_count = error_count + failure_count + skipped_count
return '{:.2f}%'.format((test_count - unsuccessful_count) * 100.0 / test_count)
return '0.00%'
@staticmethod
def icon_class(test_count, error_count, failure_count, skipped_count):
icon_class = 'test-passed'
if test_count == skipped_count:
icon_class = 'test-skipped'
elif error_count > 0:
icon_class = 'test-error'
elif failure_count > 0:
icon_class = 'test-failure'
return icon_class
def as_dict(self):
d = dict(name=self.name,
tests=self.tests,
errors=self.errors,
failures=self.failures,
skipped=self.skipped,
time=self.time)
d['success'] = ReportTestSuite.success_rate(self.tests, self.errors, self.failures,
self.skipped)
d['icon_class'] = ReportTestSuite.icon_class(self.tests, self.errors, self.failures,
self.skipped)
d['testcases'] = map(lambda tc: tc.as_dict(), self.testcases)
return d
class ReportTestCase(datatype('ReportTestCase', ['name', 'time', 'failure', 'error', 'skipped'])):
"""Data object for a JUnit test case"""
def __new__(cls, name, time, failure=None, error=None, skipped=False):
return super(ReportTestCase, cls).__new__(cls, name, float(time), failure, error, skipped)
@memoized_property
def icon_class(self):
icon_class = 'test-passed'
if self.skipped:
icon_class = 'test-skipped'
elif self.error:
icon_class = 'test-error'
elif self.failure:
icon_class = 'test-failure'
return icon_class
def as_dict(self):
d = dict(name=self.name,
time=self.time,
icon_class=self.icon_class)
if self.error:
d['message'] = self.error
elif self.failure:
d['message'] = self.failure
return d
class JUnitHtmlReportInterface(AbstractClass):
"""The interface JUnit html reporters must support."""
@abstractmethod
def report(self, output_dir):
"""Generate the junit test result report
:returns: The generated report path iff it should be opened for the user.
:rtype: str
"""
class NoJunitHtmlReport(JUnitHtmlReportInterface):
"""JUnit html reporter that never produces a report."""
def report(self, output_dir):
return None
class JUnitHtmlReport(JUnitHtmlReportInterface):
"""Generates an HTML report from JUnit TEST-*.xml files"""
@classmethod
def create(cls, xml_dir, open_report=False, logger=None, error_on_conflict=True):
return cls(xml_dir=xml_dir,
open_report=open_report,
logger=logger,
error_on_conflict=error_on_conflict)
def __init__(self, xml_dir, open_report=False, logger=None, error_on_conflict=True):
self._xml_dir = xml_dir
self._open_report = open_report
self._logger = logger or _LOGGER
self._error_on_conflict = error_on_conflict
def report(self, output_dir):
self._logger.debug('Generating JUnit HTML report...')
testsuites = self._parse_xml_files()
report_file_path = os.path.join(output_dir, 'reports', 'junit-report.html')
safe_mkdir_for(report_file_path)
with open(report_file_path, 'wb') as fp:
fp.write(ensure_binary(self._generate_html(testsuites)))
self._logger.debug('JUnit HTML report generated to {}'.format(report_file_path))
if self._open_report:
return report_file_path
def _parse_xml_files(self):
testsuites = []
for root, dirs, files in safe_walk(self._xml_dir, topdown=True):
dirs.sort() # Ensures a consistent gathering order.
for xml_file in sorted(fnmatch.filter(files, 'TEST-*.xml')):
testsuites += self._parse_xml_file(os.path.join(root, xml_file))
merged_suites = ReportTestSuite.merged(testsuites,
logger=self._logger,
error_on_conflict=self._error_on_conflict)
return sorted(merged_suites)
@staticmethod
def _parse_xml_file(xml_file):
testsuites = []
root = ET.parse(xml_file).getroot()
testcases = []
for testcase in root.iter('testcase'):
failure = None
for f in testcase.iter('failure'):
failure = f.text
error = None
for e in testcase.iter('error'):
error = e.text
skipped = False
for _s in testcase.iter('skipped'):
skipped = True
testcases.append(ReportTestCase(
testcase.attrib['name'],
testcase.attrib.get('time', 0),
failure,
error,
skipped
))
for testsuite in root.iter('testsuite'):
testsuites.append(ReportTestSuite(
testsuite.attrib['name'],
testsuite.attrib['tests'],
testsuite.attrib['errors'],
testsuite.attrib['failures'],
testsuite.attrib.get('skipped', 0),
testsuite.attrib['time'],
testcases,
file=xml_file,
))
return testsuites
@staticmethod
def _generate_html(testsuites):
values = {
'total_tests': 0,
'total_errors': 0,
'total_failures': 0,
'total_skipped': 0,
'total_time': 0.0
}
for testsuite in testsuites:
values['total_tests'] += testsuite.tests
values['total_errors'] += testsuite.errors
values['total_failures'] += testsuite.failures
values['total_skipped'] += testsuite.skipped
values['total_time'] += testsuite.time
values['total_success'] = ReportTestSuite.success_rate(values['total_tests'],
values['total_errors'],
values['total_failures'],
values['total_skipped'])
values['summary_icon_class'] = ReportTestSuite.icon_class(values['total_tests'],
values['total_errors'],
values['total_failures'],
values['total_skipped'])
values['testsuites'] = map(lambda ts: ts.as_dict(), testsuites)
package_name, _, _ = __name__.rpartition('.')
renderer = MustacheRenderer(package_name=package_name)
html = renderer.render_name('junit_report.html', values)
return html
|
UnrememberMe/pants
|
src/python/pants/backend/jvm/tasks/reports/junit_html_report.py
|
Python
|
apache-2.0
| 11,048
|
from Tools.Profile import profile
profile("LOAD:GUISkin")
from Components.GUISkin import GUISkin
profile("LOAD:Source")
from Components.Sources.Source import Source
profile("LOAD:GUIComponent")
from Components.GUIComponent import GUIComponent
class Screen(dict, GUISkin):
False, SUSPEND_STOPS, SUSPEND_PAUSES = range(3)
ALLOW_SUSPEND = False
global_screen = None
def __init__(self, session, parent = None):
dict.__init__(self)
self.skinName = self.__class__.__name__
self.session = session
self.parent = parent
GUISkin.__init__(self)
self.onClose = [ ]
self.onFirstExecBegin = [ ]
self.onExecBegin = [ ]
self.onShown = [ ]
self.onShow = [ ]
self.onHide = [ ]
self.execing = False
self.shown = True
# already shown is false until the screen is really shown (after creation)
self.already_shown = False
self.renderer = [ ]
# in order to support screens *without* a help,
# we need the list in every screen. how ironic.
self.helpList = [ ]
self.close_on_next_exec = None
# stand alone screens (for example web screens)
# don't care about having or not having focus.
self.stand_alone = False
def execBegin(self):
self.active_components = [ ]
if self.close_on_next_exec is not None:
tmp = self.close_on_next_exec
self.close_on_next_exec = None
self.execing = True
self.close(*tmp)
else:
single = self.onFirstExecBegin
self.onFirstExecBegin = []
for x in self.onExecBegin + single:
x()
if not self.stand_alone and self.session.current_dialog != self:
return
# assert self.session == None, "a screen can only exec once per time"
# self.session = session
for val in self.values() + self.renderer:
val.execBegin()
if not self.stand_alone and self.session.current_dialog != self:
return
self.active_components.append(val)
self.execing = True
for x in self.onShown:
x()
def execEnd(self):
active_components = self.active_components
# for (name, val) in self.items():
self.active_components = None
for val in active_components:
val.execEnd()
# assert self.session != None, "execEnd on non-execing screen!"
# self.session = None
self.execing = False
# never call this directly - it will be called from the session!
def doClose(self):
self.hide()
for x in self.onClose:
x()
# fixup circular references
del self.helpList
GUISkin.close(self)
# first disconnect all render from their sources.
# we might split this out into a "unskin"-call,
# but currently we destroy the screen afterwards
# anyway.
for val in self.renderer:
val.disconnectAll() # disconnected converter/sources and probably destroy them. Sources will not be destroyed.
del self.session
for (name, val) in self.items():
val.destroy()
del self[name]
self.renderer = [ ]
# really delete all elements now
self.__dict__.clear()
def close(self, *retval):
if not self.execing:
self.close_on_next_exec = retval
else:
self.session.close(self, *retval)
def setFocus(self, o):
self.instance.setFocus(o.instance)
def show(self):
if (self.shown and self.already_shown) or not self.instance:
return
self.shown = True
self.already_shown = True
self.instance.show()
for x in self.onShow:
x()
for val in self.values() + self.renderer:
if isinstance(val, GUIComponent) or isinstance(val, Source):
val.onShow()
def hide(self):
if not self.shown or not self.instance:
return
self.shown = False
self.instance.hide()
for x in self.onHide:
x()
for val in self.values() + self.renderer:
if isinstance(val, GUIComponent) or isinstance(val, Source):
val.onHide()
def __repr__(self):
return str(type(self))
def getRelatedScreen(self, name):
if name == "session":
return self.session.screen
elif name == "parent":
return self.parent
elif name == "global":
return self.global_screen
else:
return None
|
kakunbsc/enigma2.1
|
lib/python/Screens/Screen.py
|
Python
|
gpl-2.0
| 3,934
|
"""This modules is used to capture the screen
"""
import pyautogui
import time
import Globals
PATH = './Captures/'
def capture_area(area):
"""
Captures area of the screen
Args:
area (Tuple (x,y,width,height)): Area to capture
Returns:
Image : Image of the area captured
"""
img = pyautogui.screenshot(region=area)
return img
def save_area(area, filename=None):
"""
Saves area of the screen to file
Args:
area (Tuple (x,y,width,height)): Area to capture save
filename (String): File name to save
"""
if filename is None:
filename = ('area_snap_' + str(area).replace('(', ' ').replace(')', ' '))
save_img(capture_area(area=area), filename)
def get_game_screen():
"""
Get game screen image
Returns:
Image : Image of screen area
"""
return capture_area(area=Globals.GAME_REGION)
def save_game_screen(filename=('full_snap_' + str(time.time()))):
"""
Saves game area screen shot to file
Args:
filename (String): Name of file to save to
"""
save_img(get_game_screen(), filename)
def save_img(img, filename):
"""
Saves image to file
Args:
img (Image): Image to save
filename (String): Image save name
"""
img.save(PATH + filename + '.png')
|
DavidBarishev/DDtankFarmingBot
|
Ddtank_farm_bot/Framework/Capture.py
|
Python
|
gpl-3.0
| 1,336
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import base.models
import django.utils.timezone
from django.conf import settings
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Asset',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('name', models.CharField(max_length=255)),
('data_orig_name', models.CharField(max_length=255)),
('data', models.FileField(upload_to=base.models.UPLOAD_TO_HANDLER)),
('kind', models.CharField(default='default', max_length=255)),
('object_id', models.PositiveIntegerField(null=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType', null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(default='', max_length=255)),
('affiliation', models.CharField(default='', max_length=255)),
('research_area', models.TextField(default='')),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
|
pmeier82/spike_gnode
|
base/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 2,046
|
"""Legacy URLs."""
# This module reflects the URLs and behavior of the former Django
# application.
import logging
import os
import time
from google.appengine.api import memcache
from flask import Blueprint, make_response, render_template, Response, request
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import SqlLexer, PythonLexer, PhpLexer
import simplejson as json
import sqlparse
legacy = Blueprint('', 'legacy')
EXAMPLES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../examples'))
@legacy.route('/', methods=['POST', 'GET'])
def index():
data = {'examples': _get_examples()}
extra = {'highlight': True, 'comments': False,
'keywords': 'upper', 'idcase': '',
'n_indents': '2',
'lang': 'sql'}
sql_orig = 'select * from foo join bar on val1 = val2 where id = 123;'
if request.method == 'POST':
oformat = request.form.get('format', 'html')
extra['highlight'] = 'highlight' in request.form
extra['comments'] = 'remove_comments' in request.form
extra['keywords'] = request.form.get('keyword_case', '')
extra['idcase'] = request.form.get('identifier_case', '')
extra['n_indents'] = request.form.get('n_indents', '2')
extra['lang'] = request.form.get('output_format', 'sql')
sql = _get_sql(request.form, request.files)
sql_orig = sql
start = time.time()
data['output'] = _format_sql(sql, request.form, format=oformat)
data['proc_time'] = '%.3f' % (time.time()-start)
if oformat == 'json':
data['errors'] = ''
return make_response(Response(json.dumps(data),
content_type='text/x-json'))
elif oformat == 'text':
return make_response(Response(data['output'], content_type='text/plain'))
data['sql_orig'] = sql_orig
data['extra'] = extra
return render_template('index.html', **data)
@legacy.route('/source/')
def source():
return render_template('source.html')
@legacy.route('/about/')
def about():
return render_template('about.html')
@legacy.route('/api/')
def api():
return render_template('api.html')
@legacy.route('/format/', methods=['GET', 'POST'])
@legacy.route('/format', methods=['GET', 'POST'])
def format_():
if request.method == 'POST':
sql = _get_sql(request.form, request.files)
data = request.form
else:
sql = _get_sql(request.args)
data = request.args
formatted = _format_sql(sql, data, format='text')
return make_response(Response(formatted, content_type='text/plain'))
@legacy.route('/load_example', methods=['GET', 'POST'])
def load_example():
fname = request.form.get('fname')
if fname is None:
answer = 'Uups, I\'ve got no filename...'
elif fname not in _get_examples():
answer = 'Hmm, I think you don\'t want to do that.'
else:
answer = open(os.path.join(EXAMPLES_DIR, fname)).read()
data = json.dumps({'answer': answer})
return make_response(Response(data, content_type='text/x-json'))
def _get_examples():
examples = memcache.get('legacy_examples')
if examples is None:
examples = os.listdir(EXAMPLES_DIR)
memcache.set('legacy_examples', examples)
return examples
def _get_sql(data, files=None):
sql = None
if files is not None and 'datafile' in files:
raw = files['datafile'].read()
try:
sql = raw.decode('utf-8')
except UnicodeDecodeError, err:
logging.error(err)
logging.debug(repr(raw))
sql = (u'-- UnicodeDecodeError: %s\n'
u'-- Please make sure to upload UTF-8 encoded data for now.\n'
u'-- If you want to help improving this part of the application\n'
u'-- please file a bug with some demo data at:\n'
u'-- http://code.google.com/p/python-sqlparse/issues/entry\n'
u'-- Thanks!\n' % err)
if not sql:
sql = data.get('data')
return sql or ''
def _format_sql(sql, data, format='html'):
popts = {}
if data.get('remove_comments'):
popts['strip_comments'] = True
if data.get('keyword_case', 'undefined') not in ('undefined', ''):
popts['keyword_case'] = data.get('keyword_case')
if data.get('identifier_case', 'undefined') not in ('undefined', ''):
popts['identifier_case'] = data.get('identifier_case')
if data.get('n_indents', None) is not None:
val = data.get('n_indents')
try:
popts['indent_width'] = max(1, min(1000, int(val)))
popts['reindent'] = True
except (ValueError, TypeError):
pass
if (not 'indent_width' in popts and
data.get('reindent', '').lower() in ('1', 'true', 't')):
popts['indent_width'] = 2
popts['reindent'] = True
if data.get('output_format', None) is not None:
popts['output_format'] = data.get('output_format')
logging.debug('Format: %s, POPTS: %r', format, popts)
logging.debug(sql)
sql = sqlparse.format(sql, **popts)
if format in ('html', 'json'):
if data.get('highlight', False):
if popts['output_format'] == 'python':
lexer = PythonLexer()
elif popts['output_format'] == 'php':
lexer = PhpLexer()
else:
lexer = SqlLexer()
sql = highlight(sql, lexer, HtmlFormatter())
else:
sql = ('<textarea class="resizable" '
'style="height: 350px; margin-top: 1em;">%s</textarea>'
% sql)
return sql
|
sriniiyer/codenn
|
src/sqlparse/extras/appengine/sqlformat/legacy.py
|
Python
|
mit
| 5,743
|
#!/bin/env python3
"""
Convert a Material Design Icons Codepoints file to QML.
This script creates a QML component which defines
constants for all icons listed in the file
`MaterialIcons-Regular.codepoints` in the FontAwesome package.
"""
import fire
from typing import Optional, NamedTuple, List
from pathlib import Path
from textwrap import dedent
class Icon(NamedTuple):
name: str
code: str
class CodepointsConverter:
def __init__(self):
super().__init__()
self._input_file: Optional[str] = None
self._output_file: Optional[str] = None
self._icons: List[Icon] = []
def convert(
self, input_file: Optional[str] = None, output_file: Optional[str] = None
):
"""
Convert Code Points file to QML.
This commands converts the Material Design Icons codepoints file
to a QML file, which provides a nice way to access these icons
in QML.
"""
if input_file is None:
input_file = str(
Path(__file__).parent.parent
/ "app"
/ "Fonts"
/ "material-design-icons"
/ "font"
/ "MaterialIconsOutlined-Regular.codepoints"
)
self._input_file = input_file
self._output_file = output_file
self._parse()
self._write()
def _parse(self):
icons = self._icons
with open(self._input_file) as file:
for line in file:
chunks = line.split()
if len(chunks) < 2:
continue
name = chunks[0]
code_point = chunks[1]
icons.append(Icon(self._fix_name(name), f"\\u{code_point}"))
def _fix_name(self, name):
name = "mdi_" + name
try:
while True:
idx = name.index("_")
pre = name[0:idx]
suf = name[idx + 2 :]
cap = name[idx + 1 : idx + 2].capitalize()
name = pre + cap + suf
except ValueError:
pass
return name
def _write(self):
icons = self._icons
lines = "\n".join(
[
f' readonly property string {icon.name}: "{icon.code}"'
for icon in icons
]
)
content = dedent("""\
pragma Singleton
import QtQuick 2.0
QtObject {{
{lines}
}}
""").format(lines=lines)
output_file = self._output_file
if output_file is not None:
with open(output_file, "w") as file:
file.write(content)
else:
print(content)
if __name__ == "__main__":
fire.Fire(CodepointsConverter())
# import sys
# def _dash2cap(name):
# try:
# while True:
# idx = name.index("-")
# pre = name[0:idx]
# suf = name[idx + 2:]
# cap = name[idx + 1:idx + 2].capitalize()
# name = pre + cap + suf
# except ValueError:
# pass
# return name
# def _main(argv):
# import json
# if len(argv) != 3:
# print("Usage:")
# msg = " {} path/to/icons.json path/to/Icons.qml"
# msg = msg.format(argv[0])
# print(msg)
# sys.exit(1)
# with open(argv[1], "r") as file:
# icons = json.load(file)
# lines = []
# lines.append("pragma Singleton")
# lines.append("import QtQuick 2.0")
# lines.append("")
# lines.append("QtObject {")
# prop = ' readonly property string {}: "{}"'
# for key in icons:
# name = "fa-" + key
# name = _dash2cap(name)
# code = "\\u" + icons[key]["unicode"]
# line = prop.format(name, code)
# lines.append(line)
# lines.append("}")
# with open(argv[2], "w") as file:
# file.write("\n".join(lines))
# if __name__ == '__main__':
# _main(sys.argv)
|
mhoeher/opentodolist
|
bin/material_design_icons_codepoints2qml.py
|
Python
|
gpl-3.0
| 3,970
|
#
#
# This source file is part of ELINA (ETH LIbrary for Numerical Analysis).
# ELINA is Copyright © 2019 Department of Computer Science, ETH Zurich
# This software is distributed under GNU Lesser General Public License Version 3.0.
# For more information, see the ELINA project website at:
# http://elina.ethz.ch
#
# THE SOFTWARE IS PROVIDED "AS-IS" WITHOUT ANY WARRANTY OF ANY KIND, EITHER
# EXPRESS, IMPLIED OR STATUTORY, INCLUDING BUT NOT LIMITED TO ANY WARRANTY
# THAT THE SOFTWARE WILL CONFORM TO SPECIFICATIONS OR BE ERROR-FREE AND ANY
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE,
# TITLE, OR NON-INFRINGEMENT. IN NO EVENT SHALL ETH ZURICH BE LIABLE FOR ANY
# DAMAGES, INCLUDING BUT NOT LIMITED TO DIRECT, INDIRECT,
# SPECIAL OR CONSEQUENTIAL DAMAGES, ARISING OUT OF, RESULTING FROM, OR IN
# ANY WAY CONNECTED WITH THIS SOFTWARE (WHETHER OR NOT BASED UPON WARRANTY,
# CONTRACT, TORT OR OTHERWISE).
#
#
from elina_auxiliary_imports import *
from elina_linexpr0 import *
from tests.test_imports import *
def test_set_linexpr_scalar_int(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = 1
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
num = c_int(random.randint(0, 99))
elina_linexpr0_set_cst_scalar_int(linexpr, num)
for i in range(new_size.value):
num = c_int(random.randint(0, 99))
elina_linexpr0_set_coeff_scalar_int(linexpr, ElinaDim(i), num)
print_c('set scalar int size : {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_set_linexpr_scalar_frac(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = c_size_t(1)
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
p = c_long(random.randint(0, 999))
q = c_ulong(random.randint(1, 20))
elina_linexpr0_set_cst_scalar_frac(linexpr, p, q)
for i in range(new_size.value):
p = c_long(random.randint(0, 999))
q = c_ulong(random.randint(1, 20))
elina_linexpr0_set_coeff_scalar_frac(linexpr, ElinaDim(i), p, q)
print_c('set scalar frac size: {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_set_linexpr_scalar_double(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = 1
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
d = c_double(random.uniform(-1, 1))
elina_linexpr0_set_cst_scalar_double(linexpr, d)
for i in range(new_size.value):
d = c_double(random.uniform(-1, 1))
elina_linexpr0_set_coeff_scalar_double(linexpr, ElinaDim(i), d)
print_c('set scalar double size : {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_set_linexpr_interval_int(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = 1
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
inf = c_int(random.randint(0, 99))
sup = c_int(random.randint(inf.value, 99 + inf.value))
elina_linexpr0_set_cst_interval_int(linexpr, inf, sup)
for i in range(new_size.value):
inf = c_int(random.randint(0, 99))
sup = c_int(random.randint(inf.value, 99 + inf.value))
elina_linexpr0_set_coeff_interval_int(linexpr, ElinaDim(i), inf, sup)
print_c('set interval int size : {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_set_linexpr_interval_frac(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = 1
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
numinf = c_long(random.randint(0, 99))
deninf = c_ulong(random.randint(1, 20))
numsup = c_long(numinf.value + random.randint(0, 99))
densup = deninf
elina_linexpr0_set_cst_interval_frac(linexpr, numinf, deninf, numsup, densup)
for i in range(new_size.value):
numinf = c_long(random.randint(0, 99))
deninf = c_ulong(random.randint(1, 20))
numsup = c_long(numinf.value + random.randint(0, 99))
densup = deninf
elina_linexpr0_set_coeff_interval_frac(linexpr, ElinaDim(i), numinf, deninf, numsup, densup)
print_c('set interval frac size : {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_set_linexpr_interval_double(linexpr):
size = elina_linexpr0_size(linexpr)
new_size = size // 2
if not new_size:
new_size = 1
new_size = c_size_t(new_size)
elina_linexpr0_realloc(linexpr, new_size)
inf = c_double(random.uniform(-1, 1))
sup = c_double(inf.value + random.uniform(-1, 1))
elina_linexpr0_set_cst_interval_double(linexpr, inf, sup)
for i in range(new_size.value):
inf = c_double(random.uniform(-1, 1))
sup = c_double(inf.value + random.uniform(-1, 1))
elina_linexpr0_set_coeff_interval_double(linexpr, ElinaDim(i), inf, sup)
print_c('set interval double size : {} linexpr: '.format(new_size))
elina_linexpr0_print(linexpr, None)
print_c(' is linear: {} is quasilinear: {}\n'.format(elina_linexpr0_is_linear(linexpr),
elina_linexpr0_is_quasilinear(linexpr)))
intdim = c_size_t(random.randint(0, new_size.value-1))
realdim = c_size_t(random.randint(0, new_size.value-1))
print_c('dim: {} is integer: {} dim: {} is real: {}\n'.format(intdim, elina_linexpr0_is_integer(linexpr, intdim),
realdim, elina_linexpr0_is_real(linexpr, realdim)))
def test_linexpr_compare(linexpr1, linexpr2, linexpr3, linexpr4):
print_c('linexpr1: ')
elina_linexpr0_print(linexpr1, None)
print_c('\nlinexpr2: ')
elina_linexpr0_print(linexpr2, None)
print_c('\nlinexpr3: ')
elina_linexpr0_print(linexpr3, None)
print_c('\nlinexpr4: ')
elina_linexpr0_print(linexpr4, None)
print_c('\nlinexpr1 <= linexpr2: {} linexpr2 <= linexpr1: {}'.format(elina_linexpr0_compare(linexpr1, linexpr2),
elina_linexpr0_compare(linexpr2, linexpr1)))
print_c('\nlinexpr1 <= linexpr3: {} linexpr3 <= linexpr1: {}'.format(elina_linexpr0_compare(linexpr1, linexpr3),
elina_linexpr0_compare(linexpr3, linexpr1)))
print_c('\nlinexpr1 <= linexpr4: {} linexpr4 <= linexpr1: {}'.format(elina_linexpr0_compare(linexpr1, linexpr4),
elina_linexpr0_compare(linexpr4, linexpr1)))
print_c('\nlinexpr2 <= linexpr3: {} linexpr3 <= linexpr2: {}'.format(elina_linexpr0_compare(linexpr2, linexpr3),
elina_linexpr0_compare(linexpr3, linexpr2)))
print_c('\nlinexpr2 <= linexpr4: {} linexpr4 <= linexpr2: {}'.format(elina_linexpr0_compare(linexpr2, linexpr4),
elina_linexpr0_compare(linexpr4, linexpr2)))
print_c('\nlinexpr3 <= linexpr4: {} linexpr4 <= linexpr3: {}\n'.format(elina_linexpr0_compare(linexpr3, linexpr4),
elina_linexpr0_compare(linexpr4, linexpr3)))
def test_linexpr_equality(linexpr1, linexpr2, linexpr3, linexpr4):
print_c('linexpr1: ')
elina_linexpr0_print(linexpr1, None)
print_c('\nlinexpr2: ')
elina_linexpr0_print(linexpr2, None)
print_c('\nlinexpr3: ')
elina_linexpr0_print(linexpr3, None)
print_c('\nlinexpr4: ')
elina_linexpr0_print(linexpr4, None)
print_c('\nlinexpr1 == linexpr2: {}'.format(elina_linexpr0_equal(linexpr1, linexpr2)))
print_c('\nlinexpr1 == linexpr3: {}'.format(elina_linexpr0_equal(linexpr1, linexpr3)))
print_c('\nlinexpr1 == linexpr4: {}'.format(elina_linexpr0_equal(linexpr1, linexpr4)))
print_c('\nlinexpr2 == linexpr3: {}'.format(elina_linexpr0_equal(linexpr2, linexpr3)))
print_c('\nlinexpr2 == linexpr4: {}'.format(elina_linexpr0_equal(linexpr2, linexpr4)))
print_c('\nlinexpr3 == linexpr4: {}\n'.format(elina_linexpr0_equal(linexpr3, linexpr4)))
size = c_size_t(random.randint(3, 22))
linexpr1 = elina_linexpr0_alloc(ElinaLinexprDiscr.ELINA_LINEXPR_SPARSE, size)
linexpr2 = elina_linexpr0_alloc(ElinaLinexprDiscr.ELINA_LINEXPR_DENSE, size)
linexpr3 = elina_linexpr0_alloc(ElinaLinexprDiscr.ELINA_LINEXPR_SPARSE, size)
linexpr4 = elina_linexpr0_alloc(ElinaLinexprDiscr.ELINA_LINEXPR_SPARSE, size)
test_set_linexpr_scalar_int(linexpr1)
test_set_linexpr_scalar_int(linexpr2)
test_set_linexpr_scalar_frac(linexpr1)
test_set_linexpr_scalar_frac(linexpr2)
test_set_linexpr_scalar_double(linexpr1)
test_set_linexpr_scalar_double(linexpr2)
test_set_linexpr_interval_int(linexpr3)
test_set_linexpr_interval_int(linexpr4)
test_set_linexpr_interval_frac(linexpr3)
test_set_linexpr_interval_frac(linexpr4)
test_set_linexpr_interval_double(linexpr3)
test_set_linexpr_interval_double(linexpr4)
test_linexpr_compare(linexpr1, linexpr2, linexpr3, linexpr4)
test_linexpr_equality(linexpr1, linexpr2, linexpr3, linexpr4)
|
eth-srl/OptOctagon
|
python_interface/tests/linexpr0_test.py
|
Python
|
apache-2.0
| 12,187
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import copy
import re
import reportlab
import reportlab.lib.units
from openerp.tools.safe_eval import safe_eval as eval
_regex = re.compile('\[\[(.+?)\]\]')
def _child_get(node, self=None, tagname=None):
for n in node:
if self and self.localcontext and n.get('rml_loop', False):
oldctx = self.localcontext
for ctx in eval(n.get('rml_loop'),{}, self.localcontext):
self.localcontext.update(ctx)
if (tagname is None) or (n.tag==tagname):
if n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except Exception:
continue
if n.get('rml_tag'):
try:
(tag,attr) = eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.copy(n)
n2.tag = tag
n2.attrib.update(attr)
yield n2
except Exception:
yield n
else:
yield n
self.localcontext = oldctx
continue
if self and self.localcontext and n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except Exception:
continue
if (tagname is None) or (n.tag==tagname):
yield n
def _process_text(self, txt):
if not self.localcontext:
return txt
if not txt:
return ''
result = ''
sps = _regex.split(txt)
while sps:
# This is a simple text to translate
result += self.localcontext.get('translate', lambda x:x)(sps.pop(0))
if sps:
try:
txt2 = eval(sps.pop(0),self.localcontext)
except Exception:
txt2 = ''
if isinstance(txt2, (int, float)):
txt2 = str(txt2)
if isinstance(txt2, basestring):
result += txt2
return result
def text_get(node):
rc = ''
for node in node.getchildren():
rc = rc + node.text
return rc
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
if size:
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
def tuple_int_get(node, attr_name, default=None):
if not node.get(attr_name):
return default
res = [int(x) for x in node.get(attr_name).split(',')]
return res
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict=None):
if dict is None:
dict = {}
res = {}
for name in attrs:
if node.get(name):
res[name] = unit_get(node.get(name))
for key in dict:
if node.get(key):
if dict[key]=='str':
res[key] = str(node.get(key))
elif dict[key]=='bool':
res[key] = bool_get(node.get(key))
elif dict[key]=='int':
res[key] = int(node.get(key))
elif dict[key]=='unit':
res[key] = unit_get(node.get(key))
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
diogocs1/comps
|
web/openerp/report/render/rml2txt/utils.py
|
Python
|
apache-2.0
| 4,710
|
# ***************************************************************************
# * Copyright (c) 2020 Bernd Hahnebach <bernd@bimstatik.org> *
# * Copyright (c) 2020 Sudhanshu Dubey <sudhanshu.thethunder@gmail.com *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
# to run the example use:
"""
from femexamples.constraint_contact_solid_solid import setup
setup()
"""
# constraint contact for solid to solid mesh
# https://forum.freecadweb.org/viewtopic.php?f=18&t=20276
import FreeCAD
from FreeCAD import Rotation
from FreeCAD import Vector
import Fem
import ObjectsFem
import Part
mesh_name = "Mesh" # needs to be Mesh to work with unit tests
def init_doc(doc=None):
if doc is None:
doc = FreeCAD.newDocument()
return doc
def get_information():
info = {"name": "Constraint Contact Solid Solid",
"meshtype": "solid",
"meshelement": "Tet10",
"constraints": ["fixed", "pressure", "contact"],
"solvers": ["calculix"],
"material": "solid",
"equation": "mechanical"
}
return info
def setup(doc=None, solvertype="ccxtools"):
# setup model
if doc is None:
doc = init_doc()
# geometry objects
# bottom box
bottom_box_obj = doc.addObject("Part::Box", "BottomBox")
bottom_box_obj.Length = 100
bottom_box_obj.Width = 25
bottom_box_obj.Height = 500
bottom_box_obj.Placement = FreeCAD.Placement(
Vector(186, 0, -247),
Rotation(0, 0, 0),
Vector(0, 0, 0),
)
doc.recompute()
# top half cylinder, https://forum.freecadweb.org/viewtopic.php?f=18&t=43001#p366111
top_halfcyl_obj = doc.addObject("Part::Cylinder", "TopHalfCylinder")
top_halfcyl_obj.Radius = 30
top_halfcyl_obj.Height = 500
top_halfcyl_obj.Angle = 180
top_halfcyl_sh = Part.getShape(top_halfcyl_obj, '', needSubElement=False, refine=True)
top_halfcyl_obj.Shape = top_halfcyl_sh
top_halfcyl_obj.Placement = FreeCAD.Placement(
Vector(0, -42, 0),
Rotation(0, 90, 0),
Vector(0, 0, 0),
)
doc.recompute()
# all geom fusion
geom_obj = doc.addObject("Part::MultiFuse", "AllGeomFusion")
geom_obj.Shapes = [bottom_box_obj, top_halfcyl_obj]
if FreeCAD.GuiUp:
bottom_box_obj.ViewObject.hide()
top_halfcyl_obj.ViewObject.hide()
doc.recompute()
if FreeCAD.GuiUp:
geom_obj.ViewObject.Document.activeView().viewAxonometric()
geom_obj.ViewObject.Document.activeView().fitAll()
# analysis
analysis = ObjectsFem.makeAnalysis(doc, "Analysis")
# solver
if solvertype == "calculix":
solver_object = analysis.addObject(
ObjectsFem.makeSolverCalculix(doc, "SolverCalculiX")
)[0]
elif solvertype == "ccxtools":
solver_object = analysis.addObject(
ObjectsFem.makeSolverCalculixCcxTools(doc, "CalculiXccxTools")
)[0]
solver_object.WorkingDir = u""
else:
FreeCAD.Console.PrintWarning(
"Not known or not supported solver type: {}. "
"No solver object was created.\n".format(solvertype)
)
if solvertype == "calculix" or solvertype == "ccxtools":
solver_object.AnalysisType = "static"
solver_object.GeometricalNonlinearity = "linear"
solver_object.ThermoMechSteadyState = False
solver_object.MatrixSolverType = "default"
solver_object.IterationsControlParameterTimeUse = False
solver_object.SplitInputWriter = False
"""
# solver parameter from fandaL, but they are not needed (see forum topic)
solver_object.IterationsControlParameterTimeUse = True
solver_object.IterationsControlParameterCutb = '0.25,0.5,0.75,0.85,,,1.5,'
solver_object.IterationsControlParameterIter = '4,8,9,200,10,400,,200,,'
solver_object.IterationsUserDefinedTimeStepLength = True
solver_object.TimeInitialStep = 0.1
solver_object.TimeEnd = 1.0
solver_object.IterationsUserDefinedIncrementations = True # parameter DIRECT
"""
# material
material_obj = analysis.addObject(
ObjectsFem.makeMaterialSolid(doc, "MechanicalMaterial")
)[0]
mat = material_obj.Material
mat["Name"] = "Steel-Generic"
mat["YoungsModulus"] = "200000 MPa"
mat["PoissonRatio"] = "0.30"
material_obj.Material = mat
analysis.addObject(material_obj)
# constraint fixed
con_fixed = analysis.addObject(
ObjectsFem.makeConstraintFixed(doc, "ConstraintFixed")
)[0]
con_fixed.References = [
(geom_obj, "Face5"),
(geom_obj, "Face6"),
(geom_obj, "Face8"),
(geom_obj, "Face9"),
]
# constraint pressure
con_pressure = analysis.addObject(
ObjectsFem.makeConstraintPressure(doc, name="ConstraintPressure")
)[0]
con_pressure.References = [(geom_obj, "Face10")]
con_pressure.Pressure = 100.0 # Pa ? = 100 Mpa ?
con_pressure.Reversed = False
# constraint contact
con_contact = doc.Analysis.addObject(
ObjectsFem.makeConstraintContact(doc, name="ConstraintContact")
)[0]
con_contact.References = [
(geom_obj, "Face7"), # first seams slave face, TODO proof in writer code!
(geom_obj, "Face3"), # second seams master face, TODO proof in writer code!
]
con_contact.Friction = 0.0
con_contact.Slope = 1000000.0 # contact stiffness 1000000.0 kg/(mm*s^2)
# mesh
from .meshes.mesh_contact_box_halfcylinder_tetra10 import create_nodes, create_elements
fem_mesh = Fem.FemMesh()
control = create_nodes(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating nodes.\n")
control = create_elements(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating elements.\n")
femmesh_obj = analysis.addObject(
ObjectsFem.makeMeshGmsh(doc, mesh_name)
)[0]
femmesh_obj.FemMesh = fem_mesh
femmesh_obj.Part = geom_obj
femmesh_obj.SecondOrderLinear = False
doc.recompute()
return doc
|
Fat-Zer/FreeCAD_sf_master
|
src/Mod/Fem/femexamples/constraint_contact_solid_solid.py
|
Python
|
lgpl-2.1
| 7,624
|
# Copyright 2002, 2004 John T. Reese.
# email: jtr at ofb.net
#
# This file is part of Yammer.
#
# Yammer is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Yammer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Yammer; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os, string, time
from InstallSettings import settings
import YOS, glob, Gale
from pygale.pygale import *
import pygale.authcache
def havePrivateFor(recp):
return authcache.have_a_privkey([recp])
def privRoot():
return os.environ['HOME'] + "/.gale/auth/private/"
def privPath(recp):
path= privRoot() + recp
if os.path.exists(path):
return path
path= path + ".gpri"
if os.path.exists(path):
return path
return None
def getFullname(keyname):
n, k= lookup_location(keyname, do_akd=False)
if not isinstance(k, (tuple, list)) or not hasattr(k[0], 'comment'):
return '?? %r ??' % k
return k[0].comment()
def userFromKey(key):
key= os.path.basename(key)
if len(key) > 5 and key[-5:] == '.gpri':
key= key[:-5]
return key
# returns a tuple: signed time/expire time, or None if the key is too
# old to have these fields
def getKeyTime(key):
p= privPath(key)
return os.stat(p).st_mtime
def listPrivateUsers():
privs= []
for domain in settings['galeDomains'].split():
privs += glob.glob(privRoot() + "*@" + domain)
privs += glob.glob(privRoot() + "*@" + domain \
+ ".gpri")
return map(userFromKey, privs)
def pubPath(recp):
path= os.environ['HOME'] + "/.gale/auth/local/" + recp
if os.path.exists(path):
return path
path= privRoot() + recp + ".gpub"
if os.path.exists(path):
return path
return None
def gkgen(id, name, source=None):
from YGaleClient import YGaleClient
if string.find(id, '@') == -1:
raise 'nodomain'
error= ''
gkf= YOS.psafepopen(['gkgen', id, '/' + name], 'r')
while 1:
line= gkf.readline()
if not line: break
error= error + line
if gkf.close() == 0:
try:
yg= YGaleClient()
msg= 'new key: %(id)s %(name)s' % locals()
if source is not None:
msg += ' (via %s)' % source
yg.gsend(id, ['_gale.notice.' + id], msg)
except:
pass
return
else:
return error
|
nandub/yammer
|
lib/KeyStore.py
|
Python
|
gpl-2.0
| 2,728
|
from tendrl.commons.utils import ini2json
PATH = "tendrl/node_agent/tests/samples/gluster_state_sample.yaml"
class TendrlGlusterfsMonitoringBase(object):
def __init__(self):
self.CONFIG = dict(
{
"integration_id": "7bccda8c-8c85-45a8-8be0-3f71f4db7db7",
"graphite_host": "localhost",
"graphite_port": 8080,
"peer_name": "10.70.41.169"
}
)
def gluster_state():
return ini2json.ini_to_dict(PATH)
|
r0h4n/node-agent
|
tendrl/node_agent/tests/mock_gluster_state.py
|
Python
|
lgpl-2.1
| 515
|
import unittest
import numpy as np
import os
import EXOSIMS
from EXOSIMS.Prototypes import PostProcessing
import numpy as np
r"""PostProcessing module unit tests
Paul Nunez, JPL, Aug. 2016
"""
# need a dummy BackgroundSources
specs = {'modules':{'BackgroundSources':' '}}
class Test_PostProcessing_prototype(unittest.TestCase):
def setUp(self):
self.TL = {}
self.mode = {'SNR':5.0}
def test_nontrivialFAP(self):
obj = PostProcessing.PostProcessing(FAP=0.1,MDP=0.0,**specs)
#Test a case for which the false alarm prob is 0.1
FAs = np.zeros(1000,dtype=bool)
for j in np.arange(len(FAs)):
FA,_ = obj.det_occur(np.array([5]),self.mode,self.TL,0,0)
FAs[j] = FA
#~0.3% of the time this test should fail! due to random number gen.
np.testing.assert_allclose(len(FAs)*obj.FAP, len(np.where(FAs)[0]), rtol=0.3, atol=0.)
if __name__ == '__main__':
unittest.main()
|
dsavransky/EXOSIMS
|
tests/Prototypes/test_PostProcessing.py
|
Python
|
bsd-3-clause
| 976
|
import os
import unittest
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
import pytest
import itertools as itt
DIRECTORY = os.path.dirname(os.path.realpath(__file__))
# CHECK THIS http://stackoverflow.com/questions/1676269/writing-a-re-usable-parametrized-unittest-testcase-method
def get_all_ipynb(directory):
for root, directory, files in os.walk(directory):
for file in files:
if file.endswith('.ipynb') and 'checkpoints' not in root:
yield os.path.join(root, file)
@pytest.mark.parametrize('path', get_all_ipynb(DIRECTORY))
def test_notebook(path):
with open(os.path.join(DIRECTORY, path)) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python3')
ep.preprocess(nb, {'metadata': {'path': DIRECTORY}})
|
pybel/pybel-notebooks
|
test.py
|
Python
|
apache-2.0
| 849
|
# -*- coding: utf-8
"""Clustering operations and helper functions"""
import os
import hcluster
import numpy as np
from sklearn import manifold
from sklearn import preprocessing
import anvio
import anvio.utils as utils
import anvio.terminal as terminal
import anvio.filesnpaths as filesnpaths
from anvio.errors import ConfigError
with terminal.SuppressAllOutput():
from ete2 import Tree
__author__ = "A. Murat Eren"
__copyright__ = "Copyright 2015, The anvio Project"
__credits__ = []
__license__ = "GPL 3.0"
__version__ = anvio.__version__
__maintainer__ = "A. Murat Eren"
__email__ = "a.murat.eren@gmail.com"
__status__ = "Development"
run = terminal.Run()
progress = terminal.Progress()
progress.verbose = False
def set_num_components_for_each_matrix(config):
denominator = float(sum([r['ratio'] for r in config.matrices_dict.values()]))
for matrix in config.matrices:
m = config.matrices_dict[matrix]
num_components_for_ratio = int(round(config.num_components * (m['ratio'] / denominator)))
m['num_components'] = num_components_for_ratio
return config
def set_null_ratios_for_matrices(config):
for matrix in config.matrices:
config.matrices_dict[matrix]['ratio'] = 1
return config
def depth(current_root):
"""
current_root: represent for root, used in for loops.
diff: distance between farthest_node and current_root.
first_dist : in the beginning value for compare distances
dist : distance between current node and root node in for loop.
"""
diff = 0
if current_root.get_sisters():
node_sister = current_root.get_sisters()[0]
farthest_node = current_root
if current_root.get_children():
farthest_node = current_root.get_children()[0]
first_dist = farthest_node.get_distance(current_root, topology_only=True)
# node variable is current root
for cr_node in current_root.traverse("preorder"):
# it's mean, all the cr_node children visited.
if cr_node == node_sister:
break
# finding farthest_node to current_root
else:
dist = cr_node.get_distance(current_root, topology_only=True)
if dist > first_dist:
farthest_node = cr_node
first_dist = dist
diff = farthest_node.get_distance(current_root)
return diff
def synchronize(current_root, control):
# If the node modified in previous step. modified by node's sister.
if not current_root.name.endswith(control):
# this control catch root node.
if current_root.get_sisters():
new_root_sister = current_root.get_sisters()[0]
current_node_depth = depth(current_root)
sister_node_depth = depth(new_root_sister)
if current_node_depth > sister_node_depth:
diff = current_node_depth - sister_node_depth
new_root_sister.dist += diff
elif current_node_depth < sister_node_depth:
diff = sister_node_depth - current_node_depth
current_root.dist += diff
# "_!$!" added into node names, cause this function modify the couples
# but for loop in main function, send nodes one by one, not both of.
current_root.name += control
new_root_sister.name += control
def get_normalized_newick(root):
"""A function written by Doğan Can Kilment. It converts this:
/-C
|
|--D
|
/--------| /-r4
| | /--------|
| | /--------| \-r3
| | | |
| | | \-r5
| \--------|
---------| | /-r6
| | /--------|
| \--------| \-r2
| |
| \-r1
|
\-B
into this, for visualization purposes:
/--------------------------------C
|
|---------------------------------D
|
/--------| /-r4
| | /--------|
| | /--------| \-r3
| | | |
| | | \-----------r5
| \--------|
---------| | /-r6
| | /--------|
| \--------| \-r2
| |
| \-----------r1
|
\------------------------------------------B
"""
farthest_node = root.get_farthest_node()
circle_radius = farthest_node[0].get_distance(root)
control = '_!$!' # this is to keep track of what is edited
for cr_node in root.traverse("levelorder"):
if not cr_node.is_leaf():
farthest_leaf = cr_node.get_farthest_leaf()[0]
dist_to_root = farthest_leaf.get_distance(root)
diff = circle_radius - dist_to_root
cr_node.dist = cr_node.dist + diff
else:
synchronize(cr_node, control)
# final traverse to clean 'control' strings.
for cr_node in root.traverse("levelorder"):
if cr_node.name.endswith(control):
cr_node.name = cr_node.name[:-len(control)]
return root
def get_newick_tree_data(observation_matrix_path, output_file_name = None, clustering_distance='euclidean',
clustering_method = 'complete', norm = 'l1', progress = progress):
filesnpaths.is_file_exists(observation_matrix_path)
filesnpaths.is_file_tab_delimited(observation_matrix_path)
if output_file_name:
output_file_name = os.path.abspath(output_file_name)
output_directory = os.path.dirname(output_file_name)
if not os.access(output_directory, os.W_OK):
raise ConfigError, "You do not have write permission for the output directory: '%s'" % output_directory
id_to_sample_dict, sample_to_id_dict, header, vectors = utils.get_vectors_from_TAB_delim_matrix(observation_matrix_path)
vectors = np.array(vectors)
# normalize vectors:
vectors = get_normalized_vectors(vectors, norm=norm, progress=progress)
tree = get_clustering_as_tree(vectors, clustering_distance, clustering_method, progress)
newick = get_tree_object_in_newick(tree, id_to_sample_dict)
if output_file_name:
open(output_file_name, 'w').write(newick.strip() + '\n')
return newick
def get_scaled_vectors(vectors, user_seed = None, n_components = 12, normalize=True, progress = progress):
if user_seed:
seed = np.random.RandomState(seed=user_seed)
else:
seed = np.random.RandomState()
# FIXME: Make this optional:
from sklearn.metrics.pairwise import euclidean_distances as d
vectors = get_normalized_vectors(np.array(vectors)) if normalize else np.array(vectors)
# compute similarities based on d
progress.update('Computing similarity matrix')
similarities = d(vectors)
progress.update('Scaling using %d components' % n_components)
mds = manifold.MDS(n_components=n_components, max_iter=300, eps=1e-10, random_state=seed,
dissimilarity="precomputed", n_jobs=1)
progress.update('Fitting')
scaled_vectors = mds.fit(similarities).embedding_
return scaled_vectors
def get_normalized_vectors(vectors, norm='l1', progress = progress, pad_zeros = True):
progress.update('Normalizing vectors using "%s" norm' % norm)
vectors = np.array(vectors, dtype=np.float64)
if pad_zeros:
vectors += 0.0000001
normalizer = preprocessing.Normalizer(norm=norm)
return normalizer.fit_transform(vectors)
def get_clustering_as_tree(vectors, ward = True, clustering_distance='euclidean', clustering_method = 'complete', progress = progress):
if ward:
progress.update('Clustering data with Ward linkage and euclidean distances')
clustering_result = hcluster.ward(vectors)
else:
progress.update('Computing distance matrix using "%s" distance' % clustering_distance)
distance_matrix = hcluster.pdist(vectors, clustering_distance)
progress.update('Clustering data with "%s" linkage' % clustering_method)
clustering_result = hcluster.linkage(distance_matrix, method = clustering_method)
progress.update('Returning results')
return hcluster.to_tree(clustering_result)
def get_tree_object_in_newick(tree, id_to_sample_dict, normalize_branches = False):
"""i.e., tree = hcluster.to_tree(c_res)"""
root = Tree()
root.dist = 0
root.name = "root"
item2node = {tree: root}
to_visit = [tree]
while to_visit:
node = to_visit.pop()
cl_dist = node.dist / 2.0
for ch_node in [node.left, node.right]:
if ch_node:
ch = Tree()
ch.dist = cl_dist
if ch_node.is_leaf():
ch.name = id_to_sample_dict[ch_node.id]
else:
ch.name = 'Int' + str(ch_node.id)
item2node[node].add_child(ch)
item2node[ch_node] = ch
to_visit.append(ch_node)
if normalize_branches:
root = get_normalized_newick(root)
return root.write(format=1)
def order_contigs_simple(config, progress = progress, run = run, debug = False):
if not config.matrices_dict[config.matrices[0]]['ratio']:
config = set_null_ratios_for_matrices(config)
if debug:
run.info_single('Peak at the first 5 items in the first 5 rows in matrices:', mc='green', nl_before=2)
for matrix in config.matrices:
m = config.matrices_dict[matrix]
m['scaled_vectors'] = np.array(m['vectors'], dtype=np.float64)
if m['normalize']:
m['scaled_vectors'] = get_normalized_vectors(m['scaled_vectors'])
if m['log']:
m['scaled_vectors'] = np.log10(m['scaled_vectors'] + 1)
if debug:
summary = '\n'.join(['%s (...)' % m['scaled_vectors'][i][0:5] for i in range(0, 5)])
run.warning(summary, 'Vectors for "%s" (%d by %d)' % (matrix, len(m['scaled_vectors']), len(m['scaled_vectors'][0])), lc='crimson', raw=True)
progress.new('Vectors from %d matrices' % config.num_matrices)
progress.update('Combining ...')
config.combined_vectors = []
config.combined_id_to_sample = {}
for i in range(0, len(config.master_rows)):
row = config.master_rows[i]
config.combined_id_to_sample[i] = config.master_rows[i]
combined_scaled_vectors_for_row = [m['scaled_vectors'][m['sample_to_id'][row]] for m in config.matrices_dict.values()]
config.combined_vectors.append(np.concatenate(combined_scaled_vectors_for_row))
progress.update('Clustering ...')
tree = get_clustering_as_tree(config.combined_vectors, progress = progress)
newick = get_tree_object_in_newick(tree, config.combined_id_to_sample)
progress.end()
if config.output_file_path:
open(config.output_file_path, 'w').write(newick + '\n')
return newick
def order_contigs_experimental(config, progress = progress, run = run, debug = False):
if not config.multiple_matrices:
# there is one matrix. could be coverage, could be tnf. we don't care.
# we do what we gotta do: skip scaling and perform clustering using all
# dimensions.
m = config.matrices_dict[config.matrices[0]]
progress.new('Single matrix (%s)' % m['alias'])
progress.update('Performing cluster analysis ...')
tree = get_clustering_as_tree(m['vectors'], progress = progress)
newick = get_tree_object_in_newick(tree, m['id_to_sample'])
progress.end()
if config.output_file_path:
open(config.output_file_path, 'w').write(newick + '\n')
return newick
else:
# FIXME: this part needs to be parallelized.
# ok. there is more than one matrix, so there will be a mixture of scaled vectors prior to clustering.
# we first will determine whether ratios were set in the config file. if ratios were not set the simplest
# thing to do is to equally distributing num_components across all matrices; so we will set ratios to 1.
# a heuristic that handles the initial config file before calling this function can determine what ratios
# would be appropriate considering the number of samples in the experiment and/or other experiment-specific
# properties
if not config.matrices_dict[config.matrices[0]]['ratio']:
config = set_null_ratios_for_matrices(config)
# at this point the ratios are set one way or another. it is time to find out about the distribution of
# components across matrices. note here we introduce a new member that was not in the original config class,
# "num_components" per matrix.
config = set_num_components_for_each_matrix(config)
# now we know the exact number of components for each matrix. we can scale them to the expected number of
# dimensions now.
for matrix in config.matrices:
m = config.matrices_dict[matrix]
progress.new('Scaling matrix %d of %d (%s), for %d components' % (config.matrices.index(matrix) + 1,
config.num_matrices,
m['alias'],
m['num_components']))
m['scaled_vectors'] = get_scaled_vectors(m['vectors'],
user_seed = config.seed,
n_components = m['num_components'],
normalize = m['normalize'],
progress=progress)
progress.update('Normalizing scaled vectors ...')
m['scaled_vectors'] = get_normalized_vectors(m['scaled_vectors'])
progress.end()
# scaled vectors are in place. it is time to combine them to generate the input for final clustering
progress.new('Scaled vectors for %d matrices' % config.num_matrices)
progress.update('Combining ...')
config.combined_vectors = []
config.combined_id_to_sample = {}
for i in range(0, len(config.master_rows)):
row = config.master_rows[i]
config.combined_id_to_sample[i] = config.master_rows[i]
combined_scaled_vectors_for_row = [m['scaled_vectors'][m['sample_to_id'][row]] for m in config.matrices_dict.values()]
config.combined_vectors.append(np.concatenate(combined_scaled_vectors_for_row))
progress.update('Clustering ...')
tree = get_clustering_as_tree(config.combined_vectors, progress = progress)
newick = get_tree_object_in_newick(tree, config.combined_id_to_sample)
progress.end()
if config.output_file_path:
open(config.output_file_path, 'w').write(newick + '\n')
return newick
|
caglar10ur/anvio
|
anvio/clustering.py
|
Python
|
gpl-3.0
| 15,883
|
import datetime
import os
SCHEDULER_TASKS = [
{
"path": "tests.tasks.general.MongoInsert",
"params": {
"a": 1
},
"dailytime": datetime.datetime.fromtimestamp(float(os.environ.get("MRQ_TEST_SCHEDULER_TIME"))).time()
},
{
"path": "tests.tasks.general.MongoInsert",
"params": {
"a": 1,
"b": "test",
"c": 3.0
},
"dailytime": datetime.datetime.fromtimestamp(float(os.environ.get("MRQ_TEST_SCHEDULER_TIME"))).time()
}
]
SCHEDULER_INTERVAL = 1
|
IAlwaysBeCoding/mrq
|
tests/fixtures/config-scheduler3.py
|
Python
|
mit
| 561
|
from django.conf import settings
import boto3
import os
def upload_file_to_s3(filename_and_path):
'''
Uploads a file to S3
'''
file = str(filename_and_path)
if file:
client = boto3.client('s3')
client.upload_file(settings.MEDIA_ROOT + file, settings.AWS_STORAGE_BUCKET_NAME, file)
def remove_file_from_s3(filename_and_path):
'''
Removes file from S3
'''
file = str(filename_and_path)
if file:
try:
client = boto3.client('s3')
client.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=str(file))
except:
pass
def get_file_from_s3(filename_and_path):
'''
Gets a file from S3
'''
file = str(filename_and_path)
# Create directory if it does not exist
directory, filename = os.path.split(settings.MEDIA_ROOT + file)
if not os.path.exists(directory):
os.makedirs(directory)
client = boto3.client('s3')
client.download_file(settings.AWS_STORAGE_BUCKET_NAME, file, settings.MEDIA_ROOT + file)
|
JustinWingChungHui/MyFamilyRoot
|
common/s3_synch.py
|
Python
|
gpl-2.0
| 1,056
|
# - coding: utf-8 -
# Copyright (C) 2008-2010 Toms Bauģis <toms.baugis at gmail.com>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
import gtk, gobject
import pango
import datetime as dt
import time
import graphics
import locale
def escape_pango(text):
if not text:
return text
text = text.replace ("&", "&")
text = text.replace("<", "<")
text = text.replace(">", ">")
return text
class Bar(graphics.Sprite):
def __init__(self, key, value, normalized, label_color):
graphics.Sprite.__init__(self, cache_as_bitmap=True)
self.key, self.value, self.normalized = key, value, normalized
self.height = 0
self.width = 20
self.interactive = True
self.fill = None
self.label = graphics.Label(value, size=8, color=label_color)
self.label_background = graphics.Rectangle(self.label.width + 4, self.label.height + 4, 4, visible=False)
self.add_child(self.label_background)
self.add_child(self.label)
self.connect("on-render", self.on_render)
def on_render(self, sprite):
# invisible rectangle for the mouse, covering whole area
self.graphics.rectangle(0, 0, self.width, self.height)
self.graphics.fill("#000", 0)
size = round(self.width * self.normalized)
self.graphics.rectangle(0, 0, size, self.height, 3)
self.graphics.rectangle(0, 0, min(size, 3), self.height)
self.graphics.fill(self.fill)
self.label.y = (self.height - self.label.height) / 2
horiz_offset = min(10, self.label.y * 2)
if self.label.width < size - horiz_offset * 2:
#if it fits in the bar
self.label.x = size - self.label.width - horiz_offset
else:
self.label.x = size + 3
self.label_background.x = self.label.x - 2
self.label_background.y = self.label.y - 2
class Chart(graphics.Scene):
__gsignals__ = {
"bar-clicked": (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT, )),
}
def __init__(self, max_bar_width = 20, legend_width = 70, value_format = "%.2f", interactive = True):
graphics.Scene.__init__(self)
self.selected_keys = [] # keys of selected bars
self.bars = []
self.labels = []
self.data = None
self.key_colors = {}
self.max_width = max_bar_width
self.legend_width = legend_width
self.value_format = value_format
self.graph_interactive = interactive
self.plot_area = graphics.Sprite(interactive = False)
self.add_child(self.plot_area)
self.bar_color, self.label_color = None, None
self.connect("on-enter-frame", self.on_enter_frame)
if self.graph_interactive:
self.connect("on-mouse-over", self.on_mouse_over)
self.connect("on-mouse-out", self.on_mouse_out)
self.connect("on-click", self.on_click)
def find_colors(self):
bg_color = self.get_style().bg[gtk.STATE_NORMAL].to_string()
self.bar_color = self.colors.contrast(bg_color, 30)
# now for the text - we want reduced contrast for relaxed visuals
fg_color = self.get_style().fg[gtk.STATE_NORMAL].to_string()
self.label_color = self.colors.contrast(fg_color, 80)
def on_mouse_over(self, scene, bar):
if bar.key not in self.selected_keys:
bar.fill = self.get_style().base[gtk.STATE_PRELIGHT].to_string()
def on_mouse_out(self, scene, bar):
if bar.key not in self.selected_keys:
bar.fill = self.bar_color
def on_click(self, scene, event, clicked_bar):
if not clicked_bar: return
self.emit("bar-clicked", clicked_bar.key)
def plot(self, keys, data):
self.data = data
bars = dict([(bar.key, bar.normalized) for bar in self.bars])
max_val = float(max(data or [0]))
new_bars, new_labels = [], []
for key, value in zip(keys, data):
if max_val:
normalized = value / max_val
else:
normalized = 0
bar = Bar(key, locale.format(self.value_format, value), normalized, self.label_color)
bar.interactive = self.graph_interactive
if key in bars:
bar.normalized = bars[key]
self.tweener.add_tween(bar, normalized=normalized)
new_bars.append(bar)
label = graphics.Label(escape_pango(key), size = 8, alignment = pango.ALIGN_RIGHT)
new_labels.append(label)
self.plot_area.remove_child(*self.bars)
self.remove_child(*self.labels)
self.bars, self.labels = new_bars, new_labels
self.add_child(*self.labels)
self.plot_area.add_child(*self.bars)
self.show()
self.redraw()
def on_enter_frame(self, scene, context):
# adjust sizes and positions on redraw
legend_width = self.legend_width
if legend_width < 1: # allow fractions
legend_width = int(self.width * legend_width)
self.find_colors()
self.plot_area.y = 0
self.plot_area.height = self.height - self.plot_area.y
self.plot_area.x = legend_width + 8
self.plot_area.width = self.width - self.plot_area.x
y = 0
for i, (label, bar) in enumerate(zip(self.labels, self.bars)):
bar_width = min(round((self.plot_area.height - y) / (len(self.bars) - i)), self.max_width)
bar.y = y
bar.height = bar_width
bar.width = self.plot_area.width
if bar.key in self.selected_keys:
bar.fill = self.key_colors.get(bar.key, self.get_style().bg[gtk.STATE_SELECTED].to_string())
if bar.normalized == 0:
bar.label.color = self.get_style().fg[gtk.STATE_SELECTED].to_string()
bar.label_background.fill = self.get_style().bg[gtk.STATE_SELECTED].to_string()
bar.label_background.visible = True
else:
bar.label_background.visible = False
if bar.label.x < round(bar.width * bar.normalized):
bar.label.color = self.get_style().fg[gtk.STATE_SELECTED].to_string()
else:
bar.label.color = self.label_color
if not bar.fill:
bar.fill = bar.fill or self.key_colors.get(bar.key, self.bar_color)
bar.label.color = self.label_color
bar.label_background.fill = None
label.y = y + (bar_width - label.height) / 2 + self.plot_area.y
label.width = legend_width
if not label.color:
label.color = self.label_color
y += bar_width + 1
class HorizontalDayChart(graphics.Scene):
"""Pretty much a horizontal bar chart, except for values it expects tuple
of start and end time, and the whole thing hangs in air"""
def __init__(self, max_bar_width, legend_width):
graphics.Scene.__init__(self)
self.max_bar_width = max_bar_width
self.legend_width = legend_width
self.start_time, self.end_time = None, None
self.connect("on-enter-frame", self.on_enter_frame)
def plot_day(self, keys, data, start_time = None, end_time = None):
self.keys, self.data = keys, data
self.start_time, self.end_time = start_time, end_time
self.show()
self.redraw()
def on_enter_frame(self, scene, context):
g = graphics.Graphics(context)
rowcount, keys = len(self.keys), self.keys
start_hour = 0
if self.start_time:
start_hour = self.start_time
end_hour = 24 * 60
if self.end_time:
end_hour = self.end_time
# push graph to the right, so it doesn't overlap
legend_width = self.legend_width or self.longest_label(keys)
self.graph_x = legend_width
self.graph_x += 8 #add another 8 pixes of padding
self.graph_width = self.width - self.graph_x
# TODO - should handle the layout business in graphics
self.layout = context.create_layout()
default_font = pango.FontDescription(self.get_style().font_desc.to_string())
default_font.set_size(8 * pango.SCALE)
self.layout.set_font_description(default_font)
#on the botttom leave some space for label
self.layout.set_text("1234567890:")
label_w, label_h = self.layout.get_pixel_size()
self.graph_y, self.graph_height = 0, self.height - label_h - 4
if not self.data: #if we have nothing, let's go home
return
positions = {}
y = 0
bar_width = min(self.graph_height / float(len(self.keys)), self.max_bar_width)
for i, key in enumerate(self.keys):
positions[key] = (y + self.graph_y, round(bar_width - 1))
y = y + round(bar_width)
bar_width = min(self.max_bar_width,
(self.graph_height - y) / float(max(1, len(self.keys) - i - 1)))
max_bar_size = self.graph_width - 15
# now for the text - we want reduced contrast for relaxed visuals
fg_color = self.get_style().fg[gtk.STATE_NORMAL].to_string()
label_color = self.colors.contrast(fg_color, 80)
self.layout.set_alignment(pango.ALIGN_RIGHT)
self.layout.set_ellipsize(pango.ELLIPSIZE_END)
# bars and labels
self.layout.set_width(legend_width * pango.SCALE)
factor = max_bar_size / float(end_hour - start_hour)
# determine bar color
bg_color = self.get_style().bg[gtk.STATE_NORMAL].to_string()
base_color = self.colors.contrast(bg_color, 30)
for i, label in enumerate(keys):
g.set_color(label_color)
self.layout.set_text(label)
label_w, label_h = self.layout.get_pixel_size()
context.move_to(0, positions[label][0] + (positions[label][1] - label_h) / 2)
context.show_layout(self.layout)
if isinstance(self.data[i], list) == False:
self.data[i] = [self.data[i]]
for row in self.data[i]:
bar_x = round((row[0]- start_hour) * factor)
bar_size = round((row[1] - start_hour) * factor - bar_x)
g.fill_area(round(self.graph_x + bar_x),
positions[label][0],
bar_size,
positions[label][1],
base_color)
#white grid and scale values
self.layout.set_width(-1)
context.set_line_width(1)
pace = ((end_hour - start_hour) / 3) / 60 * 60
last_position = positions[keys[-1]]
grid_color = self.get_style().bg[gtk.STATE_NORMAL].to_string()
for i in range(start_hour + 60, end_hour, pace):
x = round((i - start_hour) * factor)
minutes = i % (24 * 60)
self.layout.set_markup(dt.time(minutes / 60, minutes % 60).strftime("%H<small><sup>%M</sup></small>"))
label_w, label_h = self.layout.get_pixel_size()
context.move_to(self.graph_x + x - label_w / 2,
last_position[0] + last_position[1] + 4)
g.set_color(label_color)
context.show_layout(self.layout)
g.set_color(grid_color)
g.move_to(round(self.graph_x + x) + 0.5, self.graph_y)
g.line_to(round(self.graph_x + x) + 0.5,
last_position[0] + last_position[1])
context.stroke()
|
omaciel/billreminder
|
src/gui/widgets/charting.py
|
Python
|
gpl-3.0
| 12,341
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._hasattributebase import HasAttributeBase
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasAttribute(HasAttributeBase):
"""Rule that checks for a family with a particular family attribute"""
labels = [ _('Family attribute:'), _('Value:') ]
name = _('Families with the family <attribute>')
description = _("Matches families with the family attribute "
"of a particular value")
|
SNoiraud/gramps
|
gramps/gen/filters/rules/family/_hasattribute.py
|
Python
|
gpl-2.0
| 1,789
|
# -*- encoding: utf-8 -*-
"""
Rapids expressions. These are helper classes for H2OFrame.
:copyright: (c) 2016 H2O.ai
:license: Apache License Version 2.0 (see LICENSE for details)
"""
from __future__ import division, print_function, absolute_import, unicode_literals
import collections
import copy
import gc
import math
import sys
import time
import numbers
import tabulate
import h2o
from h2o.backend.connection import H2OConnectionError
from h2o.utils.compatibility import * # NOQA
from h2o.utils.compatibility import repr2, viewitems, viewvalues
from h2o.utils.shared_utils import _is_fr, _py_tmp_key
from h2o.model.model_base import ModelBase
from h2o.expr_optimizer import optimize
class ExprNode(object):
"""
Composable Expressions: This module contains code for the lazy expression DAG.
Execution Overview
------------------
The job of ExprNode is to provide a layer of indirection to H2OFrame instances that
are built of arbitrarily large, lazy expression DAGs. In order to do this job well,
ExprNode must also track top-level entry points to the such DAGs, maintain a sane
amount of state to know which H2OFrame instances are temporary (or not), and maintain
a cache of H2OFrame properties (nrows, ncols, types, names, few rows of data).
Top-Level Entry Points
----------------------
An expression is declared top-level if it
A) Computes and returns an H2OFrame to some on-demand call from somewhere
B) An H2OFrame instance has more referrers than the 4 needed for the usual flow
of python execution (see MAGIC_REF_COUNT below for more details).
Sane Amount of State
--------------------
Instances of H2OFrame live and die by the state contained in the _ex field. The three
pieces of state -- _op, _children, _cache -- are the fewest pieces of state (and no
fewer) needed to unambiguously track temporary H2OFrame instances and prune
them according to the usual scoping laws of python.
If _cache._id is None, then this DAG has never been sent over to H2O, and there's
nothing more to do when the object goes out of scope.
If _cache._id is not None, then there has been some work done by H2O to compute the
big data object sitting in H2O to which _id points. At the time that __del__ is
called on this object, a determination to throw out the corresponding data in H2O or
to keep that data is made by the None'ness of _children.
tl;dr:
If _cache._id is not None and _children is None, then do not delete in H2O cluster
If _cache._id is not None and _children is not None, then do delete in H2O cluster
H2OCache
--------
To prevent several unnecessary REST calls and unnecessary execution, a few of the
oft-needed attributes of the H2OFrame are cached for convenience. The primary
consumers of these cached values are __getitem__, __setitem__, and a few other
H2OFrame ops that do argument validation or exchange (e.g. colnames for indices).
There are more details available under the H2OCache class declaration.
"""
# Magical count-of-5: (get 2 more when looking at it in debug mode)
# 2 for _get_ast_str frame, 2 for _get_ast_str local dictionary list, 1 for parent
MAGIC_REF_COUNT = 5 if sys.gettrace() is None else 7 # M = debug ? 7 : 5
# Flag to control application of local expression tree optimizations
__ENABLE_EXPR_OPTIMIZATIONS__ = True
def __init__(self, op="", *args):
# assert isinstance(op, str), op
self._op = op # Base opcode string
self._children = tuple(
a._ex if _is_fr(a) else a for a in args) # ast children; if not None and _cache._id is not None then tmp
self._cache = H2OCache() # ncols, nrows, names, types
# try to fuse/simplify expression
if self.__ENABLE_EXPR_OPTIMIZATIONS__:
self._optimize()
def _eager_frame(self):
if not self._cache.is_empty(): return
if self._cache._id is not None: return # Data already computed under ID, but not cached locally
self._eval_driver(True)
def _eager_scalar(self): # returns a scalar (or a list of scalars)
if not self._cache.is_empty():
assert self._cache.is_scalar()
return self
assert self._cache._id is None
self._eval_driver(False)
assert self._cache._id is None
assert self._cache.is_scalar()
return self._cache._data
def _eager_map_frame(self): # returns a scalar (or a list of scalars)
self._eval_driver(False)
return self._cache
def _eval_driver(self, top):
exec_str = self._get_ast_str(top)
res = ExprNode.rapids(exec_str)
if 'scalar' in res:
if isinstance(res['scalar'], list):
self._cache._data = [float(x) for x in res['scalar']]
else:
self._cache._data = None if res['scalar'] is None else float(res['scalar'])
if 'string' in res: self._cache._data = res['string']
if 'funstr' in res: raise NotImplementedError
if 'key' in res:
self._cache.nrows = res['num_rows']
self._cache.ncols = res['num_cols']
if 'map_keys' in res:
self._cache.map_keys = res['map_keys']
self._cache.frames = res['frames']
return self
def _optimize(self):
while True:
opt = optimize(self)
if opt is not None:
opt(ctx=None)
else:
break
# Recursively build a rapids execution string. Any object with more than
# MAGIC_REF_COUNT referrers will be cached as a temp until the next client GC
# cycle - consuming memory. Do Not Call This except when you need to do some
# other cluster operation on the evaluated object. Examples might be: lazy
# dataset time parse vs changing the global timezone. Global timezone change
# is eager, so the time parse as to occur in the correct order relative to
# the timezone change, so cannot be lazy.
#
def _get_ast_str(self, top):
if not self._cache.is_empty(): # Data already computed and cached; could a "false-like" cached value
return str(self._cache._data) if self._cache.is_scalar() else self._cache._id
if self._cache._id is not None:
return self._cache._id # Data already computed under ID, but not cached
assert isinstance(self._children,tuple)
exec_str = "({} {})".format(self._op, " ".join([ExprNode._arg_to_expr(ast) for ast in self._children]))
gc_ref_cnt = len(gc.get_referrers(self))
if top or gc_ref_cnt >= ExprNode.MAGIC_REF_COUNT:
self._cache._id = _py_tmp_key(append=h2o.connection().session_id)
exec_str = "(tmp= {} {})".format(self._cache._id, exec_str)
return exec_str
@staticmethod
def _arg_to_expr(arg):
if arg is None:
return "[]" # empty list
if isinstance(arg, ExprNode):
return arg._get_ast_str(False)
if isinstance(arg, ASTId):
return str(arg)
if isinstance(arg, (list, tuple, range)):
return "[%s]" % " ".join(repr2(x) for x in arg)
if isinstance(arg, slice):
start = 0 if arg.start is None else arg.start
stop = float("nan") if arg.stop is None else arg.stop
step = 1 if arg.step is None else arg.step
assert start >= 0 and step >= 1 and (math.isnan(stop) or stop >= start + step)
if step == 1:
return "[%d:%s]" % (start, str(stop - start))
else:
return "[%d:%s:%d]" % (start, str((stop - start + step - 1) // step), step)
if isinstance(arg, ModelBase):
return arg.model_id
# Number representation without Py2 L suffix enforced
if isinstance(arg, numbers.Integral):
return repr2(arg).strip('L')
return repr2(arg)
def __del__(self):
try:
if self._cache._id is not None and self._children is not None:
ExprNode.rapids("(rm {})".format(self._cache._id))
except (AttributeError, H2OConnectionError):
pass
def arg(self, idx):
return self._children[idx]
def args(self):
return self._children
def narg(self):
return len(self._children)
@staticmethod
def _collapse_sb(sb):
return ' '.join("".join(sb).replace("\n", "").split()).replace(" )", ")")
def _debug_print(self, pprint=True):
return "".join(self._2_string(sb=[])) if pprint else ExprNode._collapse_sb(self._2_string(sb=[]))
def _to_string(self):
return ' '.join(["(" + self._op] + [ExprNode._arg_to_expr(a) for a in self._children] + [")"])
def _2_string(self, depth=0, sb=None):
sb += ['\n', " " * depth, "(" + self._op, " "]
if self._children is not None:
for child in self._children:
if _is_fr(child) and child._ex._cache._id is None:
child._ex._2_string(depth + 2, sb)
elif _is_fr(child):
sb += ['\n', ' ' * (depth + 2), child._ex._cache._id]
elif isinstance(child, ExprNode):
child._2_string(depth + 2, sb)
else:
sb += ['\n', ' ' * (depth + 2), str(child)]
sb += ['\n', ' ' * depth + ") "] + ['\n'] * (depth == 0) # add a \n if depth == 0
return sb
def __repr__(self):
return "<Expr(%s)%s%s>" % (
" ".join([self._op] + [repr(x) for x in (self._children or [])]),
"#%s" % self._cache._id if self._cache._id else "",
"; scalar" if self._cache.is_scalar() else "",
)
@staticmethod
def rapids(expr):
"""
Execute a Rapids expression.
:param expr: The rapids expression (ascii string).
:returns: The JSON response (as a python dictionary) of the Rapids execution
"""
return h2o.api("POST /99/Rapids", data={"ast": expr, "session_id": h2o.connection().session_id})
class ASTId:
def __init__(self, name=None):
if name is None:
raise ValueError("Attempted to make ASTId with no name.")
self.name = name
def __repr__(self):
return self.name
class H2OCache(object):
def __init__(self):
self._id = None
self._nrows = -1
self._ncols = -1
self._types = None # col types
self._names = None # col names
self._data = None # ordered dict of cached rows, or a scalar
self._l = 0 # nrows cached
@property
def nrows(self):
return self._nrows
@nrows.setter
def nrows(self, value):
self._nrows = value
def nrows_valid(self):
return self._nrows >= 0
@property
def ncols(self):
return self._ncols
@ncols.setter
def ncols(self, value):
self._ncols = value
def ncols_valid(self):
return self._ncols >= 0
@property
def names(self):
return self._names
@names.setter
def names(self, value):
self._names = value
def names_valid(self):
return self._names is not None
@property
def types(self):
return self._types
@types.setter
def types(self, value):
self._types = value
def types_valid(self):
return self._types is not None
@property
def scalar(self):
return self._data if self.is_scalar() else None
@scalar.setter
def scalar(self, value):
self._data = value
def __len__(self):
return self._l
def is_empty(self):
return self._data is None
def is_scalar(self):
return not isinstance(self._data, dict)
def is_valid(self):
return ( # self._id is not None and
not self.is_empty() and
self.nrows_valid() and
self.ncols_valid() and
self.names_valid() and
self.types_valid())
def fill(self, rows=10, rows_offset=0, cols=-1, full_cols=-1, cols_offset=0, light=False):
assert self._id is not None
if self._data is not None:
if rows <= len(self):
return
req_params = {
"row_count": rows,
"row_offset": rows_offset,
"column_count" : cols,
"full_column_count" : full_cols,
"column_offset" : cols_offset
}
if light:
endpoint = "/3/Frames/%s/light"
else:
endpoint = "/3/Frames/%s"
res = h2o.api("GET " + endpoint % self._id, data=req_params)["frames"][0]
self._l = rows
self._nrows = res["rows"]
self._ncols = res["total_column_count"]
self._names = [c["label"] for c in res["columns"]]
self._types = dict(zip(self._names, [c["type"] for c in res["columns"]]))
self._fill_data(res)
def _fill_data(self, json):
self._data = collections.OrderedDict()
for c in json["columns"]:
c.pop('__meta') # Redundant description ColV3
c.pop('domain_cardinality') # Same as len(c['domain'])
sdata = c.pop('string_data')
if sdata:
c['data'] = sdata # Only use data field; may contain either [str] or [real]
# Data (not string) columns should not have a string in them. However,
# our NaNs are encoded as string literals "NaN" as opposed to the bare
# token NaN, so the default python json decoder does not convert them
# to math.nan. Do that now.
else:
if c['data'] and (len(c['data']) > 0): # orc file parse can return frame with zero rows
c['data'] = [float('nan') if x == "NaN" else x for x in c['data']]
if c['data']:
self._data[c.pop('label')] = c # Label used as the Key
return self
#---- pretty printing ----
def _tabulate(self, tablefmt="simple", rollups=False, rows=10):
"""Pretty tabulated string of all the cached data, and column names"""
if not self.is_valid(): self.fill(rows=rows)
# Pretty print cached data
d = collections.OrderedDict()
# If also printing the rollup stats, build a full row-header
if rollups:
col = next(iter(viewvalues(self._data))) # Get a sample column
lrows = len(col['data']) # Cached rows being displayed
d[""] = ["type", "mins", "mean", "maxs", "sigma", "zeros", "missing"] + list(map(str, range(lrows)))
# For all columns...
for k, v in viewitems(self._data):
x = v['data'] # Data to display
t = v["type"] # Column type
if t == "enum":
domain = v['domain'] # Map to cat strings as needed
x = ["" if math.isnan(idx) else domain[int(idx)] for idx in x]
elif t == "time":
x = ["" if math.isnan(z) else time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(z / 1000)) for z in x]
if rollups: # Rollups, if requested
mins = v['mins'][0] if v['mins'] and v["type"] != "enum" else None
maxs = v['maxs'][0] if v['maxs'] and v["type"] != "enum" else None
#Cross check type with mean and sigma. Set to None if of type enum.
if v['type'] == "enum":
v['mean'] = v['sigma'] = v['zero_count'] = None
x = [v['type'], mins, v['mean'], maxs, v['sigma'], v['zero_count'], v['missing_count']] + x
d[k] = x # Insert into ordered-dict
return tabulate.tabulate(d, headers="keys", tablefmt=tablefmt)
def flush(self): # flush everything but the frame_id
fr_id = self._id
self.__dict__ = H2OCache().__dict__.copy()
self._id = fr_id
return self
def fill_from(self, cache):
assert isinstance(cache, H2OCache)
cur_id = self._id
self.__dict__ = copy.copy(cache.__dict__) # copy.deepcopy is buggy :( https://bugs.python.org/issue16251
self._data = None
self._id = cur_id
def dummy_fill(self):
self._id = "dummy"
self._nrows = 0
self._ncols = 0
self._names = []
self._types = {}
self._data = {}
|
h2oai/h2o-dev
|
h2o-py/h2o/expr.py
|
Python
|
apache-2.0
| 16,556
|
import string
import random
import math
import itertools
import re
import datetime
from django.utils import timezone
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db import IntegrityError
from django.db.models import ObjectDoesNotExist
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
import django.utils.simplejson as json
from jsonfield import JSONField
from django_extensions.db.models import TimeStampedModel
from organization.models import Organization
from history.models import History
# from protocols.helpers import settify, unify
# from protocols.settify import settify
# from protocols.utils import VERB_FORM_DICT
from protocols.utils import MACHINE_VERBS, COMPONENT_VERBS, THERMOCYCLER_VERBS, MANUAL_LAYER, MANUAL_VERBS, settify, labeler, get_timeunit, eval_time, ProtocolChangeLog, DataDiffer
COMPONENT_KEY = "components"
#MACHINE_VERBS = ['heat', 'chill', 'centrifuge', 'agitate', 'collect', 'cook', 'cool', 'electrophorese', 'incubate', 'shake', 'vortex']
REFERENCE_TYPES = [('pmid',"PMID"), ('doi',"DOI")]
class Protocol(TimeStampedModel):
# STATUS_DRAFT = "draft"
# STATUS_PUBLISHED = "published"
# STATUS = (
# (STATUS_DRAFT, _(STATUS_DRAFT)),
# (STATUS_PUBLISHED, _(STATUS_PUBLISHED)),
# )
parent = models.ForeignKey("self", blank=True, null=True)
author = models.ForeignKey(User, blank=True, null=True)
owner = models.ForeignKey(Organization)
name = models.CharField(_("Name"), max_length=255, unique=True)
slug = models.SlugField(_("Slug"), blank=True, null=True, max_length=255)
duration_in_seconds = models.IntegerField(_("old Duration in seconds"), blank=True, null=True)
duration = models.CharField(_("Duration in seconds"), blank=True, null=True, max_length=30)
raw = models.TextField(blank=True, null=True)
data = JSONField(blank=True, null=True)
description = models.TextField(_("Description"), blank=True, null=True)
note = models.TextField(_("Notes"), blank=True, null=True)
# protocol_input = models.CharField(_("Input"), max_length=255, unique=True)
# protocol_output = models.CharField(_("Output"), max_length=255, unique=True)
published = models.BooleanField(_("Published"), default=False)
public = models.BooleanField(_("Public"), default=False)
# status = models.CharField(_("Status"), max_length=30, default=STATUS_DRAFT, choices=STATUS)
# version = models.CharField(_("Version"), max_length=100, blank=True, null=True)
# reference fields -> MOVING TO NEW MODEL
# url = models.URLField(_("URL"), max_length=255, null=True, blank=True)
# pmid = models.CharField(_("PMID"), max_length=255, null=True, blank=True)
# doi_id = models.CharField(_("DOI"), max_length=255, null=True, blank=True)
# document_id = models.CharField(_("Document ID"), max_length=255, null=True, blank=True)
def __init__(self, *args, **kwargs):
self.data = {}
super(Protocol, self).__init__(*args, **kwargs)
self.rebuild_steps()
def __unicode__(self):
if self.name:
return self.name
else:
return None
def clone(self, name=None, owner=None, author=None):
'''Turns the current instance into a clone of the previous.
This instance still need to be saved to be committed.'''
# CAPTURE PK VALUE, SET PARENT TO PK
parentid = self.pk
# SET PK TO None
self.pk = None
if name:
self.name = self.generate_name(name)
else:
# self.name = self.generate_name(self.owner.name + " " + self.name)
self.name = self.generate_name(self.name)
self.slug = self.generate_slug()
self.published = False
self.private = True
self.created = timezone.now()
self.modified = timezone.now()
# NEED TO SET THE ORGANIZATION
if owner:
self.owner = owner
if author:
self.author = author
self.parent = Protocol.objects.get(pk=parentid)
def save(self, *args, **kwargs):
if not self.name:
if self.data['Name']:
self.name = self.data['Name']
self.update_duration()
if 'editor' in kwargs:
user = kwargs.pop('editor')
else:
user = self.author
# DIFF DATA
# print 'determine old'
if not self.pk and not self.parent_id: # protocol is new
old_state = None
elif not self.pk and self.slug: # protocol is cloned
old_state = Protocol.objects.get(pk = self.parent_id)
else:
old_state = Protocol.objects.get(pk = self.pk) # JUST A PROTOCOL
# print old_state
super(Protocol, self).save(*args, **kwargs) # Method may need to be changed to handle giving it a new name.
# print 'triggered first save'
new_slug = self.generate_slug()
if not new_slug == self.slug: # Triggered when its a clone method
self.slug = new_slug
super(Protocol, self).save(*args, **kwargs) # Method may need to be changed to handle giving it a new name.
# print 'triggered second save'
# print 'determine new'
new_state = self
diff = None
diff = ProtocolChangeLog(old_state, new_state)
# LOG THIS HISTORY OBJECT HERE IF THERE IS A DIFF, CURRENTLY MAKE ASSUMPTION THAT THE AUTHOR IS MAKING THE EDITS
if diff.hdf:
History.objects.create(org=self.owner, user=user, protocol=self, htype="EDIT", data=diff.hdf)
def user_has_access(self, user):
if self.published and self.public: # IF IT IS A PUBLIC PUBLISHED PROTOCOL THEN YES
# print "PUBLISHED-PUBLIC"
return True
pk = getattr(user, "pk", None)
if not pk: # NO ANONYMOUS USER ACCESS EXCEPT FOR PUBLIC PROTOCOLS?
return False
if self.author:
if pk == self.author.pk: # IF THEY ARE THE AUTHOR THEN YES
return True
if self.published:
return bool( user.organization_set.filter( pk=self.owner.pk ) ) # IF IT IS PUBLISHED ARE THEY ARE THEY A MEMBER OF THE ORG THEN YES
return False
##########
# URLs
def get_absolute_url(self):
return reverse("protocol_detail", kwargs={'owner_slug':self.owner.slug, 'protocol_slug': self.slug})
def protocol_update_url(self):
return reverse("protocol_update", kwargs={'protocol_slug':self.slug, 'owner_slug':self.owner.slug})
def step_create_url(self):
return reverse("step_create", kwargs={'protocol_slug':self.slug, 'owner_slug':self.owner.slug})
def protocol_publish_url(self):
return reverse("protocol_publish", kwargs={'protocol_slug':self.slug, 'owner_slug':self.owner.slug})
def protocol_public_url(self):
return reverse("protocol_public", kwargs={'protocol_slug':self.slug, 'owner_slug':self.owner.slug})
def protocol_duplicate_url(self):
return reverse("protocol_duplicate", kwargs={'protocol_slug':self.slug, 'owner_slug':self.owner.slug})
def protocol_clone_url(self):
return reverse("clone_layout_single_view", kwargs={'protocol_a_slug':self.slug})
def protocol_outline_url(self):
return reverse("layout_sinlge_view", kwargs={'protocol_a_slug':self.slug})
##########
# Generators
def as_dict(self, compressed=False):
'''
Similar to the __dict__ method but cleans up fields and properly formats the JSONField
Compressed will ignore root-level fields that are empty
'''
result = {}
tmp_dict = self.__dict__
for key, value in tmp_dict.items():
if compressed and not value:
continue
if isinstance(value, datetime.datetime):
result[key] = value.isoformat()
elif key[0] != "_":
result[key] = value
# result['data'] = dict(self.data)
# result['pk'] = self.pk
return result
# GET A LIST OF ALL THE ATTRS ON THE MODEL
# SERIALIZE THE ALLOWED ATTRS INTO A DICT
# ADD THE ID FIELD
# result['id'] = self.pk
# ADD THE DATA TO THE DICT
# RETURN
# return result
def generate_name(self, name, count=0):
if count:
new_name = "%s-%d" % (name, count)
else:
new_name = "%s" % (name)
try:
Protocol.objects.get(name=new_name)
return self.generate_name(name, count=count + 1)
except ObjectDoesNotExist:
return new_name
def generate_slug(self):
slug = slugify(self.name)
#try:
# Protocol.objects.get(slug=slug)
# return "%s-%d" % (slug, self.pk)
#except ObjectDoesNotExist:
# return slug
if self.pk:
return "%d-%s" % (self.pk, slug)
else:
return slug
def get_hash_id(self, size=6, chars=string.ascii_lowercase + string.digits):
'''Always returns a unique ID in the protocol'''
uid_list = []
uid = ''.join(random.choice(chars) for x in range(size))
for step in self.data['steps']:
if hasattr(step, 'objectid'):
if step['objectid']:
uid_list.append(step.objectid)
for action in step['actions']:
if hasattr(action, 'objectid'):
if action['objectid']:
uid_list.append(action['objectid'])
if COMPONENT_KEY in action.keys():
for reagent in action[COMPONENT_KEY]:
if 'objectid' in reagent: # hasattr doesn't work here I think because of unicode
uid_list.append(reagent['objectid'])
# print "\nUID: %s" % uid
if uid not in uid_list:
return uid
return self.get_hash_id(size, chars)
def rebuild_steps(self):
if self.data and 'steps' in self.data:
self.data['steps'] = [ Step(protocol=self, data=s) for s in self.data['steps'] ]
#self.steps_data = [ Step(protocol=self, data=s) for s in self.data['steps'] ]
else:
if not self.data:
self.data={'steps':[]}
# def add_step(self, step):
# if not step['objectid'] in [ s['objectid'] for s in self.data['steps'] ]:
# print "STEP NOT THERE, ADDING"
# #print type(step)
# print "IS STEP: %s" % isinstance(step, Step)
# self.data['steps'].append(step)
# self.rebuild_steps()
# else:
# print "ALREADY THERE"
def add_node(self, node):
'''
Every node needs to register it's self with a protocol here. If it's a step it get_steps
added to the list of steps. It's written this way to handle other types of Nodes being
added with special needs.
'''
if not node['objectid'] in self.nodes:
if isinstance(node, Step): # IF IT IS A STEP GIVE IT THIS EXTRA STEP
#print "STEP NOT THERE, ADDING"
self.data['steps'].append(node)
self.rebuild_steps()
# else:
# print "NODE NOT THERE, ADDING"
# # IN THIS CASE JUST REGISTER IS WITH THE NODE DICTIONARY
# else:
# print "ALREADY THERE"
# NEED TO ADD ACTIONS TO THE PROTOCOL
###########
# Validators
# def has_changed(self, field):
# if not self.pk:
# return False
# old_value = self.__class__._default_manager.filter(pk=self.pk).values(field).get()[field]
# return not getattr(self, field) == old_value
###########
# Properties
@property
def title(self):
return self.name
# NEED TO CREATE add AND delete METHODS FOR THE PROPERTY
@property
def steps(self):
if not self.data:
self.rebuild_steps()
# if not 'steps' in self.data:
# return []
# if not 'steps' in self.data or not self.data['steps']:
# self.rebuild_steps()
# self.steps_data = self.data['steps']
# return self.steps_data
return self.data['steps']
@property
def status(self):
if self.public:
prefix = "Public - "
else:
prefix = "Private - "
if self.published:
return prefix + "Published"
else:
return prefix + "Draft"
# NEED TO CREATE add AND delete METHODS FOR THE PROPERTY
@property
def nodes(self):
''' Returns a dictionary containing the '''
result = {}
for step in self.steps:
result[step['objectid']] = step
for action in step['actions']:
result[action['objectid']] = action
for key in ['thermocycle', 'components']:
if key in action:
for item in action[key]:
result[item['objectid']] = item
if 'machine' in action:
result[action['machine']['objectid']] = action['machine']
return result
def get_machines(self):
return [r for r in self.get_actions() if self.nodes[r].has_machine()]
def get_actions(self):
return [r[2] for r in self.get_action_tree('objectid')]
def get_steps(self):
return [r['objectid'] for r in self.steps]
def get_action_durations(self):
return [a['actions'][0]['duration'] for a in self.steps]
def get_action_verbs(self):
return [a['actions'][0]['verb'] for a in self.steps]
def get_action_names(self):
return [a['actions'][0]['name'] for a in self.steps]
def get_components(self):
return [r for r in self.get_actions() if self.nodes[r].has_components()]
def get_thermocycle(self):
return [r for r in self.get_actions() if self.nodes[r].has_thermocycler()]
def get_manual(self):
return [self.nodes[r]['objectid'] for r in self.get_actions() if self.nodes[r].has_manual()]
###########
# delete node properties:
def delete_node(self, node_id):
""" This will remove a child node form a hierarchy """
node = self.nodes[node_id]
#print node.__class__.__str__
parent = node.parent
parent.delete_child_node(node_id)
#self.save()
def delete_child_node(self, node_id):
""" Removes a Child Node with the given name from the list of nodes """
#print "%s (%s): REMOVING -> %s" % (self.__class__, self.pk, node_id)
self.data['steps'] = [ x for x in self.data['steps'] if not x['objectid'] == node_id ]
# def levels()
###########
# Methods
@property
def get_num_steps(self):
return len(self.steps)
def get_num_actions(self):
return [len(s['actions']) for s in self.steps]
def get_actions_by_step(self):
actions_by_step = []
# num_actions = self.get_num_actions()
for stepnum in range(0, self.get_num_steps):
tmp = [self.data['steps'][stepnum]['actions'][r]['verb'] for r in range(0, self.get_num_actions()[stepnum])]
actions_by_step.append(tmp)
return actions_by_step
def get_action_tree(self, display = None):
action_tree = []
for stepnum in range(0, self.get_num_steps): # traversign all steps
for actionnum in range(0, len(self.steps[stepnum]['actions'])): # traversing all actions per step
if display == 'objectid':
action_tree.append([stepnum, actionnum, self.steps[stepnum]['actions'][actionnum]['objectid']])
else:
action_tree.append([stepnum, actionnum, self.steps[stepnum]['actions'][actionnum]['verb']])
return action_tree
def update_duration_actions(self):
min_time = []
delta_time = []
for item in self.get_actions():
if self.nodes[item]['name'] =='store':
continue
action_time = self.nodes[item].get_children_times()
min_time.append(action_time[0])
if len(action_time) >3:
delta_time.append(action_time[1]-action_time[0])
min_duration = sum(min_time)
delta_duration = sum(delta_time)
if delta_duration == 0:
return str(min_duration)
else:
return str(min_duration) + '-' + str(min_duration + delta_duration)
def update_duration_steps(self):
min_time = []
delta_time = []
total = []
for step in self.steps:
value = step.update_duration()
if '-' in value:
min_time_temp = float(value[:value.index('-')])
min_time.append(min_time_temp)
max_time_temp = float(value[value.index('-')+1:])
# max_time.append(float(temp))
delta_time.append(max_time_temp - min_time_temp)
else:
min_time.append(float(value))
# max_time.append(float(value))
min_duration = sum(min_time)
delta_duration = sum(delta_time)
# if min_duration == max_duration:
if delta_duration == 0:
# self.duration = str(min_duration)
return str(min_duration)
else:
# self.duration = str(min_duration) + '-' + str(max_duration)
# print str(min_duration) + '-' + str(max_duration)
return str(min_duration) + '-' + str(min_duration + delta_duration)
def update_duration(self, debug = False):
min_time = 0
max_time = 0
for step in self.steps:
step_min_time = 0
step_max_time = 0
if debug:
print "Step: %s" % step['name']
for action in step['actions']:
if action['name'] == 'store':
continue
action_min_time = 0
action_max_time = 0
auto_update = False
if not 'duration' in action:
action['duration'] = ""
if action['verb'] in MANUAL_VERBS: # if it should be a manual action, update
print action['verb']
if 'duration' in action and 'min_time' in action['verb']:
time = action['duration'].split('-')
if time and time[0]:
action_min_time = float(time[0])
action_max_time = float(time[1])
print '\t input time before method %s-%s' %(action_min_time, action_max_time)
else:
action_min_time = eval_time(action, value = 'min_time')
action_max_time = eval_time(action, value = 'max_time')
print '\t input time after method %d' %action_min_time
# debuggin Clause:
# if action_max_time ==0:
# print action['name'], action['objectid']
print "MANUAL TRIGGERED"
auto_update = True
# Total Up Machine Time Values Here from the DICT
else:
if 'components' in action and action['verb'] in COMPONENT_VERBS: # if it should have components, update
action_min_time = float(len(action['components']) * 30 )
action_max_time = float(len(action['components']) * 60 )
print "COMPONENTS TRIGGERED"
auto_update = True
# Total Up Component Time Values Here from the DICT
if 'thermocycle' in action and action['verb'] in THERMOCYCLER_VERBS: # if it should have a thermocycle, update
min_time_temp = []
max_time_temp = []
cycles = [r['cycles'] for r in action['thermocycle']]
cycle_back_to = [r['cycle_back_to'] for r in action['thermocycle']]
for cnt, (C, B) in enumerate(zip(cycles, cycle_back_to)):
# Append times of single-phase cycles
if C and not B:
min_time_temp.append(eval_time(action['thermocycle'][cnt], value = 'min_time'))
max_time_temp.append(eval_time(action['thermocycle'][cnt], value = 'max_time'))
# Append times of multi-phased cycles
if C and B:
phases_in_cycle_min = [eval_time(r, value='min_time') for r in action['thermocycle'][int(B)-1:int(cnt)+1]]
phases_in_cycle_max = [eval_time(r, value='max_time') for r in action['thermocycle'][int(B)-1:int(cnt)+1]]
# Multiply the cycle number for multi-phased cycle:
sum_of_cycles_min = sum(phases_in_cycle_min) * C
sum_of_cycles_max = sum(phases_in_cycle_max) * C
# append repeating cycle to single cycle phases:
min_time_temp.append(sum_of_cycles_min)
max_time_temp.append(sum_of_cycles_max)
action_min_time = float(sum(min_time_temp))
action_max_time = float(sum(max_time_temp))
auto_update = True
# Total Up Machine Time Values Here from the DICT
if 'machine' in action and 'verb' in action and action['verb'] in MACHINE_VERBS: # Make sure this action is supposed to have a "machine" attribute
action_min_time = eval_time(action['machine'], value = 'min_time')
action_max_time = eval_time(action['machine'], value = 'max_time')
# Debuggin Clause
# if debug:
# if action_max_time ==0:
# print action['name'], action['objectid']
print "MACHINE TRIGGERED"
auto_update = True
# Total Up Machine Time Values Here from the DICT
if auto_update or not action['duration']: # If this is an autoupdating action or there is no previous manually entered value...
action['duration'] = "%d-%d" % ( action_min_time, action_max_time )
if debug:
print "\t\tAction Duration: %s, %s" % (action['verb'], action['duration'])
step_min_time += action_min_time
step_max_time += action_max_time
step['duration'] = "%d-%d" % ( step_min_time, step_max_time )
if debug:
print "\tStep Duration: %s" % (step['duration'])
min_time += step_min_time
max_time += step_max_time
self.duration = "%d-%d" % ( min_time, max_time)
# print self.duration
def get_item(self, objectid, item, return_default = None, **kwargs):
out = None
call = False
try:
call = self.nodes[objectid]
except KeyError:
return None
if call and item in call.keys():
out = call[item]
if item not in call.keys():
try:
out = getattr(call, item)()
except TypeError:
out = getattr(call, item)
except AttributeError:
if return_default:
out = None
else:
out = []
return out
def action_children_json(self, select = None, **kwargs):
out = []
switch = {
'components': self.get_components(),
'machine': self.get_machines(),
'manual': self.get_steps(),
'thermocycle': self.get_thermocycle()
}
selection = self.get_actions()
if select:
selection = switch[select]
for action in selection:
children = self.nodes[action].children
if children:
temp = []
for child in children:
temp.append(child['objectid'])
out.append({action: temp})
else:
out.append({action: None})
return out
def protocol_tree_json(self):
out = []
for step in self.get_steps():
step_dict={}
step_dict[step] = []
actions = [r['objectid'] for r in self.nodes[step].children]
for action in actions:
action_dict = {}
children = self.nodes[action].children
if children:
action_dict[action] = [r['objectid'] for r in self.nodes[action].children]
else:
action_dict[action] = None
step_dict[step].append(action_dict)
out.append(step_dict)
return out
def get_verbatim_text(self, numbers = False):
'''this method returns a list with the verbatim text'''
# Validate if the protocol has verbatim text for each step:
if numbers:
verbatim = ["%d. "%(cnt+1) + item for cnt, item in enumerate(self.get_verbatim_text())]
else:
verbatim = []
for step in self.steps:
if 'verbatim_text' in step:
verbatim.append( step['verbatim_text'] )
else:
verbatim.append( "" )
if len(verbatim) == len(self.steps):
return verbatim
else:
return None
class Reference(models.Model):
protocol = models.ManyToManyField(Protocol)
data = models.CharField(_("Data"), max_length=255, default="#NDF")
typ = models.CharField(_("Type"), max_length=255, choices=REFERENCE_TYPES)
################
# NODES
################
class NodeBase(dict):
"""Base class for the protocol components"""
parent_key_name = None # NAME THE PARENT OBJECT USES TO HOLD THIS OBJECT
parent_key_plural = True # WETHER OR NOT THERE ARE MULTIPLE OF THESE OBJECTS UNDER THE PARENT (string vs list)
# keylist = ['name','objectid'] # <- REQUIRED OBJECTS FOR ALL NODES
# ADD _meta CLASS TO USE SOME EXTRA DB-LIKE FUNCTIONALITY
default_attrs = ['name', 'objectid']
class Meta:
def __init__(self, node):
self.node = node
def get_all_field_names(self):
result = self.node.keys() #[x for x in self.node.keys() if x not in ['components', 'machine', 'termocycler'] ]
result.sort()
return result
def __init__(self, protocol, parent=None, data={}, **kwargs):
super(NodeBase, self).__init__(**kwargs)
if not self.parent_key_name:
self.parent_key_name = self.__class__.__name__.lower()
self.protocol = protocol
if parent:
self.parent = parent
else:
self.parent = self.protocol
data = self.clean_data(data)
self._meta = NodeBase.Meta(self)
# IT SHOULD APPEND IT'S SELF TO THE PARENT
# for item in self.keylist: # REQUIRED ATTRIBUTES
# self[item] = None
self.update_data(data)
# self.set_defaults()
def register_with_parent(self):
if self.parent_key_name in self.parent and self.parent[self.parent_key_name]: # CHECK TO SEE IF THE KEY EXISTS
if self.parent_key_plural: # FALL THROUGH IF NOT PLURAL
if self['objectid'] not in [x['objectid'] for x in self.parent[self.parent_key_name]]: # CHECK IF THIS IS ALREADY A CHILD OF THE PARENT
self.parent[self.parent_key_name].append(self) # IF NOT APPEND
return # RETURN
# elif parent[self.parent_key_name]['objectid'] == self['objectid']: # IF IT IS ALREADY THE CHILD, RETURN
# return
if self.parent_key_plural:
self.parent[self.parent_key_name] = [self] # ANY OTHER CASE, MAKE SURE THIS IS REGISTERED WITH THE PARENT
else:
self.parent[self.parent_key_name] = self # NO HARM IN RE-ASSIGNING IF IT'S ALREADY THE CHILD?
def clean_data(self, data):
# OBJECT KEY GENERATOR IF MISSING
# if not self['objectid']:
# self['objectid'] = self.protocol.get_hash_id()
if data == None:
data = {}
if not 'objectid' in data or not data['objectid']:
data['objectid'] = self.protocol.get_hash_id()
if not 'name' in data or not data['name']:
data['name'] = data['objectid']
if not 'slug' in data or not data['slug']:
data['slug'] = slugify(data['objectid'])
return data
@property
def pk(self):
return "%d-%s" % (self.protocol.pk, self['objectid'])
@property
def id(self):
return self['objectid']
@property
def slug(self):
#if not self['slug']:
# self['slug'] = slugify(self['name'])
return self['slug']
@property
def graph_label(self):
return self['name']
@property
def node_type(self):
return self.__class__.__name__
def update_data(self, data={}, **kwargs):
if data:
for key in data:
self[key] = data[key]
if not 'name' in self or not self['name']:
self['name'] = self['slug']
def __unicode__(self):
return self['slug']
@property
def title(self):
if self.parent:
return "%s - %s" % (self.parent.title, self['name'])
else:
return "%s - %s" % (self.protocol.name, self['name'])
# @property
# def parent(self):
# return self.protocol
def delete_child_node(self, node_id):
""" Removes a Child Node with the given name from the list of nodes """
print "NOT YET IMPLETMENTED FOR %s (%s): REMOVING -> %s" % (self.__class__, self['objectid'], node_id)
@property
def children(self):
print 'object does not have children'
# def update_duration(self):
# pass
class Component(NodeBase):
parent_key_name = "components"
def __init__(self, protocol, parent=None, data=None, **kwargs):
#self.parent = parent
super(Component, self).__init__(protocol, parent=parent, data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
if 'name' in self and not['name'] and 'reagent_name' in self:
self['name'] = self.pop("reagent_name")
self.register_with_parent() # REPLACE THE ABOVE WITH THIS
def get_absolute_url(self):
return reverse("component_detail", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'component_slug':self.slug })
def get_update_url(self):
return reverse('component_edit', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'component_slug':self.slug })
def get_delete_url(self):
return reverse('component_delete', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'component_slug':self.slug })
# @property
# def title(self):
# return "%s - %s - %s" % (self.protocol.name, self.action.step['name'], self.action['name'], self['name'])
# @property
# def parent(self):
# return self.action
@property
def label(self):
return settify(self, summary = False)
@property
def summary(self):
''' takes self.label as a list and turns it into a dict:
u'25 degrees Celsius', u'2 minutes' ->
{temp: '25C', time: '2 min'}'''
result = settify(self, shorthand = True, summary = True)
result['name'] = self['name']
return result
class Machine(NodeBase):
parent_key_name = "machine"
parent_key_plural = False
default_attrs = ['name', 'objectid', 'min_time', 'max_time', 'time_comment', 'time_units', 'min_temp', 'max_temp', 'temp_comment', 'temp_units', 'min_speed', 'max_speed', 'speed_comment', 'speed_units']
def __init__(self, protocol, parent=None, data=None, **kwargs):
super(Machine, self).__init__(protocol, parent=parent, data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
self.register_with_parent() # REPLACE THE ABOVE WITH THIS
def get_absolute_url(self):
return reverse('machine_detail', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'machine_slug':self.slug })
#return reverse("machine_detail", kwargs={'protocol_slug': self.protocol.slug, 'step_slug':self.action.step.slug, 'action_slug':self.action.slug, 'machine_slug':self.slug })
def get_update_url(self):
return reverse('machine_edit', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'machine_slug':self.slug })
def get_delete_url(self):
return reverse('machine_delete', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'machine_slug':self.slug })
# @property
# def title(self):
# return "%s - %s - %s" % (self.protocol.name, self.action.step['name'], self.action['name'], self['name'])
# @property
# def parent(self):
# return self.action
@property
def label(self):
return settify(self, shorthand = True)
@property
def summary(self):
''' takes self.label as a list and turns it into a dict:
u'25 degrees Celsius', u'2 minutes' ->
{temp: '25C', time: '2 min'}'''
result = settify(self, shorthand = True, summary = True)
result['name'] = self['name']
return result
class Thermocycle(NodeBase):
parent_key_name = "thermocycle"
def __init__(self, protocol, parent=None, data=None, **kwargs):
#self.parent = parent
super(Thermocycle, self).__init__(protocol, parent=parent, data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
# if self.parent_key_name in parent:
# if parent[self.parent_key_name]:
# if self['objectid'] not in [x['objectid'] for x in parent[self.parent_key_name]]:
# parent[self.parent_key_name].append(self)
# return
# parent[self.parent_key_name] = [self] # ANY OTHER CASE, MAKE SURE THIS IS REGISTERED WITH THE PARENT
self.register_with_parent() # REPLACE THE ABOVE WITH THIS
# if 'reagent_name' in self:
# self['name'] = self.pop("reagent_name")
def get_absolute_url(self):
return reverse("thermocycle_detail", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'thermocycle_slug':self.slug })
def get_update_url(self):
return reverse('thermocycle_update', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'thermocycle_slug':self.slug })
def get_delete_url(self):
return reverse('thermocycle_delete', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.parent.slug, 'action_slug':self.parent.slug, 'component_slug':self.slug })
# def get_absolute_url(self):
# return "#NDF"
# #return reverse("thermocycle_detail", kwargs={'protocol_slug': self.protocol.slug, 'step_slug':self.action.step.slug, 'action_slug':self.action.slug, 'thermocycler_slug':self.slug })
# def update_data(self, data={}, **kwargs):
# super(Thermocycle, self).update_data(data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
# if 'phases' in data:
# self['phases'] = [ Phase(self.protocol, parent=self, data=a) for a in data['settings'] ]
# else:
# self['phases'] = []
@property
def label(self):
return settify(self, shorthand = True)
@property
def summary(self):
tmp = settify(self, shorthand = True, summary = True)
tmp['name'] = self['name']
return tmp
class Action(NodeBase):
parent_key_name = "actions"
def __init__(self, protocol, parent=None, data=None, **kwargs):
#self.step = step
# self.parent = parent
super(Action, self).__init__(protocol, parent=parent, data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
# REGISTER SELF WITH PARENT?
self.register_with_parent()
def update_data(self, data={}, **kwargs):
super(Action, self).update_data(data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
if 'component - list' in data: # rename "componet - list" to "components"
data['components'] = data.pop("component - list")
if 'components' in data: # Convert dictionaries into Component Objects
self['components'] = [ Component(self.protocol, parent=self, data=c) for c in data['components'] ]
if 'thermocycle' in data: # Convert dictionaries into Thermocycle Objects
self['thermocycle'] = [ Thermocycle(self.protocol, parent=self, data=c) for c in data['thermocycle'] ]
if 'machine' in data and 'verb' in data and data['verb'] in MACHINE_VERBS: # Make sure this action is supposed to have a "machine" attribute
self['machine'] = Machine(self.protocol, parent=self, data=data['machine'])
if not self['name']: # Action default name should be the same as the verb
self['name'] = self['verb']
if self['name'] == self['objectid']: # CORRECT THIS DATA
self['name'] = self['verb']
# NEEDS APPEND TO THE PARENT LIKE STEP DOES
# if self.parent and not self['objectid'] in self.protocol.nodes: # THIS WORKS BUT COMMENTED OUT FOR TESTING AGAINST EXISTING CODE
# self.parent.add_child_node(self) # SOMETHING SIMILAR SHOULD WORK FOR OTHER NODES, CAN MAKE MORE GENERIC
def get_absolute_url(self):
return reverse("action_detail", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
def action_update_url(self):
return reverse("action_update", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
def action_delete_url(self):
return reverse("action_delete", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
def machine_create_url(self):
return reverse("machine_create", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
def thermocycle_create_url(self):
return reverse("thermocycle_create", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
def component_create_url(self):
return reverse("component_create", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug })
# def machine_update_url(self):
# return reverse('machine_edit', kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.parent.slug, 'action_slug':self.slug, 'machine_slug':self.machine.slug })
# @property
# def title(self):
# return "%s - %s - %s" % (self.protocol.name, self.step['name'], self['name'])
# @property
# def parent(self):
# return self.step
@property
def components(self):
if 'components' in self:
return self['components']
else:
return None
@property
def machine(self):
if 'machine' in self:
return self['machine']
else:
return None
@machine.setter
def machine(self, value):
if value.__class__ == Machine:
self['machine'] = value
else:
raise ValueError("Action's machine attribute can only accept a Machine object")
@property
def thermocycle(self):
if 'thermocycle' in self:
return self['thermocycle']
else:
return None
@property
def summary(self):
''' returns a summary for manual objects'''
return labeler(self)
@property
def children(self):
if type(self.components) == 'list' or 'machine' in self:
return [self['machine']]
if type(self.machine) == 'NoneType' and 'components' in self:
return self['components']
if 'components' in self:
return self['components']
if 'machine' in self:
return [self['machine']]
if 'thermocycle' in self:
return self['thermocycle']
else:
return None
def delete_child_node(self, node_id):
"""
Removes a Child Node with the given name from the list of nodes
Though it can be called directly it is meant to be called from the protocol and trickle down
"""
#print "%s (%s): REMOVING -> %s" % (self.__class__.__name__, self['objectid'], node_id)
print "ACTION DELETE"
if 'machine' in self:
print "HAS MACHINE"
if self['machine']['objectid'] == node_id:
print "REMOVE MACHINE"
del( self['machine'] )
return
if 'thermocycle' in self and node_id in [r['objectid'] for r in self['thermocycle']]:
self['thermocycle'] = [ x for x in self['thermocycle'] if x['objectid'] is not node_id ]
return
if 'components' in self and node_id in [r['objectid'] for r in self['components']]:
self['components'] = [ x for x in self['components'] if x['objectid'] is not node_id ]
def has_components(self):
if 'verb' in self:
return self['verb'] in COMPONENT_VERBS
return False
def has_machine(self):
if 'verb' in self:
return self['verb'] in MACHINE_VERBS
return False
def has_thermocycler(self):
if 'verb' in self:
return self['verb'] in THERMOCYCLER_VERBS
return False
def has_manual(self):
if 'verb' in self:
return self['verb'] in MANUAL_VERBS
return False
def get_children_times(self, desired_unit = 'sec'):
''' method returns a tuple for each action:
(float(min_time), [,float(max_time)], output_untis, input_units)
In further versions the time related items will be integrated into a get_time object.
'''
if not self.children and not self.childtype()== 'manual':
return (0, 'sec', 'sec')
# get children times:
children_time = 0
if self.childtype() == "components":
if self.children:
children_time = (len(self.children) * 30, 'sec', 'sec')
if self.childtype() == "manual":
children_time = get_timeunit(self.summary['time'])
if self.childtype() == "machine":
children_time = get_timeunit(self.children[0].summary['time'])
if self.childtype() == "thermocycle":
tmp_time =[0, 'sec']
cycles = [r.summary['cycles'] for r in self.children]
cycle_back_to = [r.summary['cycle_back_to'] for r in self.children]
for cnt, (cycle, cycle_back_to) in enumerate(zip(cycles, cycle_back_to)):
if cycle and not cycle_back_to:
tmp = get_timeunit(self.children[cnt].summary['time'])
tmp_time[0] = tmp_time[0] + tmp[0]
if cycle and cycle_back_to:
phases_in_cycle = [get_timeunit(r.summary['time']) for r in self.children[int(cycle_back_to)-1:int(cnt)]]
sum_of_cycles = sum(t[0] for t in phases_in_cycle)
tmp_time[0] = tmp_time[0] + (float(sum_of_cycles) * float(cycle))
children_time = tuple(tmp_time)
return children_time
def update_duration(self):
max_duration = None
value = self.get_children_times()
min_duration = str(value[0])
if len(value) >3:
max_duration = str(value[1])
if max_duration:
# self['duration'] = str(min_duration) + '-' + str(max_duration)
return str(min_duration) + '-' + str(max_duration)
else:
# self['duration'] = str(min_duration)
return str(min_duration)
def childtype(self):
if 'verb' in self:
if self['verb'] in COMPONENT_VERBS:
return 'components'
if self['verb'] in MACHINE_VERBS:
return 'machine'
if self['verb'] in THERMOCYCLER_VERBS:
return 'thermocycle'
if self['verb'] in MANUAL_VERBS:
return 'manual'
return None
class Step(NodeBase):
parent_key_name = "steps"
# NEED TO TEST BELOW AND REMOVE THE self.protocol.add_node FROM THE update_data METHOD
# def __init__(self, protocol, parent=None, data=None, **kwargs):
# super(Step, self).__init__(protocol, parent=parent, data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
# self.register_with_parent()
def update_data(self, data={}, **kwargs):
super(Step, self).update_data(data=data, **kwargs) # Method may need to be changed to handle giving it a new name.
if 'actions' in data:
self['actions'] = [ Action(self.protocol, parent=self, data=a) for a in data['actions'] ]
else:
self['actions'] = []
# UPDATE DURATION AT THE SAME TIME
# self['duration'] = duration
#print self.protocol.nodes
# if not data['objectid'] in self.protocol.nodes:
# print "STEP NOT THERE, ADDING"
self.protocol.add_node(self)
# else:
# print "ALREADY THERE"
def register_with_parent(self):
self.protocol.add_node(self)
def get_absolute_url(self):
return reverse("step_detail", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.slug })
def step_update_url(self):
return reverse("step_update", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.slug })
def add_action_url(self):
return reverse("action_create", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.slug })
def action_verb_list_url(self):
return reverse("action_verb_list", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.slug })
def step_delete_url(self):
return reverse("step_delete", kwargs={'owner_slug':self.protocol.owner.slug, 'protocol_slug': self.protocol.slug, 'step_slug':self.slug })
# @property
# def title(self):
# return "%s - %s" % (self.protocol.name, self['name'])
def add_child_node(self, action):
self['actions'].append(action)
def delete_child_node(self, node_id):
"""
Removes a Child Node with the given name from the list of nodes
Though it can be called directly it is meant to be called from the protocol and trickle down
"""
#print "%s (%s): REMOVING -> %s" % (self.__class__, self['objectid'], node_id)
self['actions'] = [ x for x in self['actions'] if not x['objectid'] == node_id ]
@property
def actions(self):
if 'actions' in self:
return self['actions']
else:
return None
@property
def children(self):
if 'actions' in self:
return self['actions']
else:
return None
def update_duration(self):
min_time = []
delta_time = []
for item in self.children:
if item['name'] =='store':
continue
action_time = item.get_children_times()
min_time.append(action_time[0])
if len(action_time) >3:
delta_time.append(action_time[1]-action_time[0])
min_duration = sum(min_time)
delta_duration = sum(delta_time)
if delta_duration == 0:
return str(min_duration)
else:
return str(min_duration) + '-' + str(min_duration + delta_duration)
# NEED TO UPDATE URLS TO USE THE BELOW METHOD
# def __getitem__(self, key):
# val = dict.__getitem__(self, key)
# if key == "slug":
# val = slugify(dict.__getitem__(self, 'name'))
# return val
#def get_hash_id(self, size=6, chars=string.ascii_lowercase + string.digits):
# '''Always returns a unique ID in the protocol'''
# uid_list = []
# uid = ''.join(random.choice(chars) for x in range(size))
# return uid
# @property
# def actions(self):
# return
class ProtocolHistoryDiffer(object):
'''
[
{'id':"XXXXXX", 'event':"add", data: {} },
{'id':"XXXXXX", 'event':"update", data: {} },
{'id':"XXXXXX", 'event':"delete" },
]
'''
add = []
update = []
delete = []
def parse_changes(self, protocol):
pass
# DIFF THE GIVEN PROTOCOL OBJECT INTO PARTS
# PSEUDO CODE
# for node in parsed_changes:
# if add:
# self.add.append( {'id':node.node_id, 'data':node.new_data_dict })
# elif delete:
# self.delete.append( {'id':node.node_id, 'data':node.new_data_dict })
# else:
# self.update.append( {'id':node.node_id, 'data':node.new_data_dict })
|
Bionetbook/bionetbook
|
bnbapp/protocols/models.py
|
Python
|
mit
| 52,610
|
import unittest
import os.path
import pandas as pd
from multiplate import multiplateIO
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "test_data")
class TestIsInstance(unittest.TestCase):
def test_enspire_csv_parser(self):
"""Check that parsed EnSpire csv matches expected data"""
test_dir = os.path.join(TEST_DATA_DIR, "EnSpire")
input_data = multiplateIO.parse_csv(os.path.join(test_dir, "raw_plate_data_enspire.csv"), "enspire")
expected_data = pd.read_pickle(os.path.join(test_dir, "expected_plate_data.pkl"))
# input_data is an iterator with one item
for data_set in input_data:
self.assertTrue(data_set.equals(expected_data))
if __name__ == '__main__':
unittest.main()
|
campenr/multiplate
|
multiplate/test/test.py
|
Python
|
bsd-3-clause
| 758
|
# decay learning rate test
import tensorflow as tf # neural network for function approximation
import gym # environment
import numpy as np # matrix operation and math functions
from gym import wrappers
import gym_morph # customized environment for cart-pole
import matplotlib.pyplot as plt
import time
for test_num in range(1,11):
# Hyperparameters
RANDOM_NUMBER_SEED = test_num
ENVIRONMENT1 = "morph-v0"
MAX_EPISODES = 8000 # number of episodes
EPISODE_LENGTH = 500 # single episode length
HIDDEN_SIZE = 24
DISPLAY_WEIGHTS = False # Help debug weight update
gamma = 0.99 # Discount per step
RENDER = False # Render the cart-pole system
VIDEO_INTERVAL = 100 # Generate a video at this interval
CONSECUTIVE_TARGET = 100 # Including previous 100 rewards
CONST_LR = False # Constant or decaying learing rate
# Constant learning rate
const_learning_rate_in = 0.003
# Decay learning rate
start_learning_rate_in = 0.003
decay_steps_in = 100
decay_rate_in = 0.95
DIR_PATH_SAVEFIG = "/home/yh/cartpole_mc_ann/"
if CONST_LR:
learning_rate = const_learning_rate_in
file_name_savefig = "el" + str(EPISODE_LENGTH) \
+ "_hn" + str(HIDDEN_SIZE) \
+ "_clr" + str(learning_rate).replace(".", "p") \
+ "_test" + str(test_num) \
+ ".png"
else:
start_learning_rate = start_learning_rate_in
decay_steps = decay_steps_in
decay_rate = decay_rate_in
file_name_savefig = "el" + str(EPISODE_LENGTH) \
+ "_hn" + str(HIDDEN_SIZE) \
+ "_dlr_slr" + str(start_learning_rate).replace(".", "p") \
+ "_ds" + str(decay_steps) \
+ "_dr" + str(decay_rate).replace(".", "p") \
+ "_test" + str(test_num) \
+ ".png"
env = gym.make(ENVIRONMENT1)
env.seed(RANDOM_NUMBER_SEED)
np.random.seed(RANDOM_NUMBER_SEED)
tf.set_random_seed(RANDOM_NUMBER_SEED)
# Input and output sizes
input_size = env.observation_space.shape[0]
try:
output_size = env.action_space.shape[0]
except AttributeError:
output_size = env.action_space.n
# Tensorflow network setup
x = tf.placeholder(tf.float32, shape=(None, input_size))
y = tf.placeholder(tf.float32, shape=(None, 1))
if not CONST_LR:
# decay learning rate
global_step = tf.Variable(0, trainable=False)
learning_rate = tf.train.exponential_decay(start_learning_rate, global_step, decay_steps, decay_rate, staircase=False)
expected_returns = tf.placeholder(tf.float32, shape=(None, 1))
# Xavier (2010) weights initializer for uniform distribution:
# x = sqrt(6. / (in + out)); [-x, x]
w_init = tf.contrib.layers.xavier_initializer()
hidden_W = tf.get_variable("W1", shape=[input_size, HIDDEN_SIZE],
initializer=w_init)
hidden_B = tf.Variable(tf.zeros(HIDDEN_SIZE))
dist_W = tf.get_variable("W2", shape=[HIDDEN_SIZE, output_size],
initializer=w_init)
dist_B = tf.Variable(tf.zeros(output_size))
hidden = tf.nn.elu(tf.matmul(x, hidden_W) + hidden_B)
dist = tf.tanh(tf.matmul(hidden, dist_W) + dist_B)
dist_soft = tf.nn.log_softmax(dist)
dist_in = tf.matmul(dist_soft, tf.Variable([[1.], [0.]]))
pi = tf.contrib.distributions.Bernoulli(dist_in)
pi_sample = pi.sample()
log_pi = pi.log_prob(y)
if CONST_LR:
optimizer = tf.train.RMSPropOptimizer(learning_rate)
train = optimizer.minimize(-1.0 * expected_returns * log_pi)
else:
optimizer = tf.train.RMSPropOptimizer(learning_rate)
train = optimizer.minimize(-1.0 * expected_returns * log_pi, global_step=global_step)
# saver = tf.train.Saver()
# Create and initialize a session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
def run_episode(environment, ep, render=False):
raw_reward = 0
discounted_reward = 0
cumulative_reward = []
discount = 1.0
states = []
actions = []
obs = environment.reset()
done = False
while not done:
states.append(obs)
cumulative_reward.append(discounted_reward)
if render and ((ep % VIDEO_INTERVAL) == 0):
environment.render()
action = sess.run(pi_sample, feed_dict={x: [obs]})[0]
actions.append(action)
obs, reward, done, info = env.step(action[0])
raw_reward += reward
if reward > 0:
discounted_reward += reward * discount
else:
discounted_reward += reward
discount *= gamma
return raw_reward, discounted_reward, cumulative_reward, states, actions
def display_weights(session):
w1 = session.run(hidden_W)
b1 = session.run(hidden_B)
w2 = session.run(dist_W)
b2 = session.run(dist_B)
print(w1, b1, w2, b2)
returns = []
mean_returns = []
for ep in range(MAX_EPISODES):
raw_G, discounted_G, cumulative_G, ep_states, ep_actions = \
run_episode(env, ep, RENDER)
expected_R = np.transpose([discounted_G - np.array(cumulative_G)])
sess.run(train, feed_dict={x: ep_states, y: ep_actions,
expected_returns: expected_R})
if DISPLAY_WEIGHTS:
display_weights(sess)
returns.append(raw_G)
running_returns = returns[max(0, ep-CONSECUTIVE_TARGET):(ep+1)]
mean_return = np.mean(running_returns)
mean_returns.append(mean_return)
if CONST_LR:
msg = "Test: {}, Episode: {}, Learning rate: {}, Return: {}, Last {} returns mean: {}"
msg = msg.format(test_num, ep+1, learning_rate, raw_G, CONSECUTIVE_TARGET, mean_return)
print(msg)
else:
msg = "Test: {}, Episode: {}, Learning rate: {}, Return: {}, Last {} returns mean: {}"
msg = msg.format(test_num, ep+1, sess.run(learning_rate), raw_G, CONSECUTIVE_TARGET, mean_return)
print(msg)
env.close() # close openai gym environment
tf.reset_default_graph() # clear tensorflow graph
# Plot
# plt.style.use('ggplot')
plt.style.use('dark_background')
episodes_plot = np.arange(MAX_EPISODES)
fig = plt.figure()
ax = fig.add_subplot(111)
fig.subplots_adjust(top=0.85)
if CONST_LR:
ax.set_title("The Cart-Pole Problem Test %i \n \
Episode Length: %i \
Discount Factor: %.2f \n \
Number of Hidden Neuron: %i \
Constant Learning Rate: %.5f" % (test_num, EPISODE_LENGTH, gamma, HIDDEN_SIZE, learning_rate))
else:
ax.set_title("The Cart-Pole Problem Test %i \n \
EpisodeLength: %i DiscountFactor: %.2f NumHiddenNeuron: %i \n \
Decay Learning Rate: (start: %.5f, steps: %i, rate: %.2f)" % (test_num, EPISODE_LENGTH, gamma, HIDDEN_SIZE, start_learning_rate, decay_steps, decay_rate))
ax.set_xlabel("Episode")
ax.set_ylabel("Return")
ax.set_ylim((0, EPISODE_LENGTH))
ax.grid(linestyle='--')
ax.plot(episodes_plot, returns, label='Instant return')
ax.plot(episodes_plot, mean_returns, label='Averaged return')
legend = ax.legend(loc='best', shadow=True)
fig.savefig(DIR_PATH_SAVEFIG + file_name_savefig, dpi=500)
# plt.show()
|
GitYiheng/reinforcement_learning_test
|
test03_monte_carlo/t21_cartpole_dnn_local.py
|
Python
|
mit
| 7,348
|
#!/usr/bin/env python
# -*- coding : utsf8 -*-
DEFAULT_HOST, DEFAULT_PORT = "localhost", 37100
import sys, json
sys.path.append('./gen-py')
from DataAccess import QueryProcessorService
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from hbase import Hbase
from hbase.ttypes import *
try:
transport = TTransport.TBufferedTransport(TSocket.TSocket(DEFAULT_HOST, DEFAULT_PORT))
client = QueryProcessorService.Client(TBinaryProtocol.TBinaryProtocol(transport))
transport.open()
for i in json.loads(client.Process(json.dumps({
'type' : 'abc',
'version' : '1.0',
'parameters' : {
'area' : '20',
'channel' : ['1000', '2000', '3000'],
'dayslot' : ['20130913', '20130916'],
'timeslot' : ['082700', '082799'],
},
})))['Result']['data']: print i
transport.close()
except Thrift.TException, ex:
print "%s" % (ex.message)
|
573719929/Common-Processor
|
Client.py
|
Python
|
apache-2.0
| 1,058
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'lkdconfigdetailsform.ui'
#
# Created: Mon Jun 8 21:00:01 2009
# by: PyQt4 UI code generator 4.4.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(488, 739)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtGui.QGroupBox(Dialog)
self.groupBox.setObjectName("groupBox")
self.formLayout = QtGui.QFormLayout(self.groupBox)
self.formLayout.setObjectName("formLayout")
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.label_2)
self.label_3 = QtGui.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_3)
self.label_4 = QtGui.QLabel(self.groupBox)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.label_4)
self.label_5 = QtGui.QLabel(self.groupBox)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_5)
self.label_6 = QtGui.QLabel(self.groupBox)
self.label_6.setObjectName("label_6")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.label_6)
self.label_7 = QtGui.QLabel(self.groupBox)
self.label_7.setObjectName("label_7")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_7)
self.label_8 = QtGui.QLabel(self.groupBox)
self.label_8.setObjectName("label_8")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.label_8)
self.label_9 = QtGui.QLabel(self.groupBox)
self.label_9.setObjectName("label_9")
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_9)
self.label_10 = QtGui.QLabel(self.groupBox)
self.label_10.setObjectName("label_10")
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.label_10)
self.label_11 = QtGui.QLabel(self.groupBox)
self.label_11.setObjectName("label_11")
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_11)
self.label_12 = QtGui.QLabel(self.groupBox)
self.label_12.setObjectName("label_12")
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.label_12)
self.verticalLayout.addWidget(self.groupBox)
spacerItem = QtGui.QSpacerItem(20, 228, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Dialog", "Üye Detayları", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", "Üye Numarası:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Dialog", "Adı Soyadı:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Dialog", "E-posta Adresi:", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("Dialog", "Yaşadığı Şehir:", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("Dialog", "Üyelik Başlangıç Yılı:", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("Dialog", "Aidat Borcu:", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("Dialog", "--", None, QtGui.QApplication.UnicodeUTF8))
|
efeciftci/lkd-uye-plasmoid
|
contents/code/lkdconfigdetailsform.py
|
Python
|
gpl-2.0
| 4,880
|
from nltk.tokenize import RegexpTokenizer
concept = 'A firewall is a part of a computer system or network that is designed to block unauthorized access while permitting authorized communications. It is a device or set of devices which is configured to permit or deny computer based application upon a set of rules and other criteria.'
print len(concept)
tokenizer = RegexpTokenizer(r'\w+')
words = tokenizer.tokenize(concept)
newString = " ".join(words)
#for word in words:
##newString += word
#newString += " "
print len(newString)
|
tjohn351/Termediator
|
Tests/removePunctuation.py
|
Python
|
mit
| 548
|
import pytest
import numpy as np
from context import Runner, ExecutionType, get_configs, docker_available
class MockContext():
def __init__(self):
self.obj = {}
@pytest.mark.skipif(not docker_available(), reason='Docker is not available')
def test_runner_langermann():
internal_conf = get_configs('csaopt/internal/csaopt-internal.conf')
ctx = {}
ctx['internal_conf'] = internal_conf
runner = Runner(['examples/ackley/ackley_opt.py'], ['examples/ackley/ackley.conf'], ctx)
runner.run()
if len(runner.failures) > 0:
raise Exception('Runner had failures: %s' % runner.failures)
assert runner.best_value == pytest.approx(0, abs=0.2)
|
d53dave/cgopt
|
tests/test_runner.py
|
Python
|
mit
| 685
|
import asyncio
import functools
from django.core.exceptions import SynchronousOnlyOperation
def async_unsafe(message):
"""
Decorator to mark functions as async-unsafe. Someone trying to access
the function while in an async context will get an error message.
"""
def decorator(func):
@functools.wraps(func)
def inner(*args, **kwargs):
# Detect a running event loop in this thread.
try:
event_loop = asyncio.get_event_loop()
except RuntimeError:
pass
else:
if event_loop.is_running():
raise SynchronousOnlyOperation(message)
# Pass onwards.
return func(*args, **kwargs)
return inner
# If the message is actually a function, then be a no-arguments decorator.
if callable(message):
func = message
message = 'You cannot call this from an async context - use a thread or sync_to_async.'
return decorator(func)
else:
return decorator
|
georgemarshall/django
|
django/utils/asyncio.py
|
Python
|
bsd-3-clause
| 1,059
|
#
# Copyright 2015, 2017, 2021 Lars Pastewka (U. Freiburg)
# 2020 Johannes Hoermann (U. Freiburg)
# 2014 James Kermode (Warwick U.)
#
# matscipy - Materials science with Python at the atomic-scale
# https://github.com/libAtoms/matscipy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
import logging
from io import StringIO
import numpy as np
def string_to_array(s):
return np.loadtxt(StringIO(s)).T
class MatSciPyTestCase(unittest.TestCase):
"""
Subclass of unittest.TestCase with extra methods for comparing arrays and dictionaries
"""
def assertDictionariesEqual(self, d1, d2, skip_keys=[], ignore_case=True):
def lower_if_ignore_case(k):
if ignore_case:
return k.lower()
else:
return k
d1 = dict([(lower_if_ignore_case(k),v) for (k,v) in d1.iteritems() if k not in skip_keys])
d2 = dict([(lower_if_ignore_case(k),v) for (k,v) in d2.iteritems() if k not in skip_keys])
if sorted(d1.keys()) != sorted(d2.keys()):
self.fail('Dictionaries differ: d1.keys() (%r) != d2.keys() (%r)' % (d1.keys(), d2.keys()))
for key in d1:
v1, v2 = d1[key], d2[key]
if isinstance(v1, np.ndarray):
try:
self.assertArrayAlmostEqual(v1, v2)
except AssertionError:
print(key, v1, v2)
raise
else:
if v1 != v2:
self.fail('Dictionaries differ: key=%s value1=%r value2=%r' % (key, v1, v2))
def assertEqual(self, a, b):
if a == b:
return
# Repeat comparison with debug-level logging
import logging
level = logging.root.level
logging.root.setLevel(logging.DEBUG)
a == b
logging.root.setLevel(level)
self.fail('%s != %s' % (a,b))
def assertArrayAlmostEqual(self, a, b, tol=1e-7):
a = np.array(a)
b = np.array(b)
self.assertEqual(a.shape, b.shape)
if np.isnan(a).any() or np.isnan(b).any():
print('a')
print(a)
print('b')
print(b)
self.fail('Not a number (NaN) found in array')
if a.dtype.kind != 'f':
self.assertTrue((a == b).all())
else:
absdiff = abs(a-b)
if absdiff.max() > tol:
loc = np.unravel_index(absdiff.argmax(), absdiff.shape)
print('a')
print(a)
print()
print('b')
print(b)
print()
print('Absolute difference')
print(absdiff)
self.fail('Maximum abs difference between array elements is %e at location %r' %
(absdiff.max(), loc))
def assertAtomsAlmostEqual(self, a, b, tol=1e-7):
self.assertArrayAlmostEqual(a.positions, b.positions, tol)
self.assertArrayAlmostEqual(a.numbers, b.numbers)
self.assertArrayAlmostEqual(a._cell, b._cell)
self.assertArrayAlmostEqual(a._pbc, b._pbc)
def skip(f):
"""
Decorator which can be used to skip unit tests
"""
def g(self):
logging.warning('skipping test %s' % f.__name__)
return g
|
libAtoms/matscipy
|
tests/matscipytest.py
|
Python
|
lgpl-2.1
| 4,902
|
import __builtin__
import abc
import math
from natsort import natsorted, ns
import os
import subprocess
import re
import threading
import time
import xml.etree.ElementTree as ET
"""
This is the abstract class for all PlaybackModules.
All PlaybackModules need to implement the @abstractmethods in order to function.
"""
class PlaybackModule(object):
__metaclass__ = abc.ABCMeta
# self.track alias to __builtin__.Status['TrackInfo']
def track_get(self):
return __builtin__.Status['TrackInfo']
def track_set(self, value):
__builtin__.Status['TrackInfo'] = value
track = property(track_get, track_set)
# self.playlist alias to __builtin__.Status['Playlist']
def playlist_get(self):
return __builtin__.Status['Playlist']
def playlist_set(self, value):
__builtin__.Status['Playlist'] = value
playlist = property(playlist_get, playlist_set)
def __init__(self, *args):
pass
# Cleanly stop and exit the module
# Note: child functions should call this at the end
@abc.abstractmethod
def Exit(self):
# Reset track and playlist information
self.track = self.FormatInfo({})
self.playlist = []
# Add an item to the current playlist
# Note: child functions should probably call self.RefreshPlaylist() as a result of this
@abc.abstractmethod
def Add(self, filename):
pass
# Add a list of items to the current playlist and refresh playlist information
def AddList(self, items):
for item in items:
self.Add(item)
self.RefreshPlaylist()
# Remove all items in the current playlist
@abc.abstractmethod
def RemoveAll(self):
pass
# Play/resume playback of the current track or specified track
@abc.abstractmethod
def Play(self, index=None):
pass
# Pause playback of the current track
@abc.abstractmethod
def Pause(self):
pass
# Toggle play/pause playback of the current track
def Toggle(self):
if self.IsPlaying():
self.Pause()
else:
self.Play()
# Stop playback
@abc.abstractmethod
def Stop(self):
pass
# Play the previous song in the playlist
@abc.abstractmethod
def Prev(self):
pass
# Play the next song in the playlist
@abc.abstractmethod
def Next(self):
pass
# Seek to a specific point in the current track (in seconds)
@abc.abstractmethod
def Seek(self, sec):
pass
# Set the playback volume
@abc.abstractmethod
def SetVol(self, vol):
pass
# Refresh metadata for the current track
def RefreshTrack(self):
if self.IsLoaded():
self.track = self.QueryTrack()
# Refresh metadata for the current playlist
def RefreshPlaylist(self):
if self.IsLoaded():
self.playlist = self.QueryPlaylist(self)
# Query metadata for the current track
@abc.abstractmethod
def QueryTrack(self):
pass
# Query metadata for the current playlist
@abc.abstractmethod
def QueryPlaylist(self):
pass
# Format/force/clamp metadata for a track
def FormatInfo(self, info):
return PlaybackModule._FormatInfo(info)
@staticmethod
def _FormatInfo(info):
def FormatTime(sec):
sec = int(sec)
if sec < 0: sec = 0
out = ""
if sec >= 3600: # >=1 hour
out += str(sec/3600)
sec -= (sec/3600) * 3600
out += ":" + str(sec/60).zfill(2)
else: # <1 hour
out += str(sec/60)
out += ":" + str(sec%60).zfill(2)
return out
if not 'playing' in info or info['playing'] == None: info['playing'] = False
if not 'artist' in info or info['artist'] == None: info['artist'] = 'Unknown Artist'
if not 'title' in info or info['title'] == None: info['title'] = 'Unknown Track'
if not 'album' in info or info['album'] == None: info['album'] = 'Unknown Album'
if not 'index' in info or info['index'] == None: info['index'] = -1
if not 'length' in info or info['length'] == None: info['length'] = 0
if info['length'] < 0: info['length'] = 0
info['length_display'] = FormatTime(info['length'])
if not 'elapsed' in info or info['elapsed'] == None: info['elapsed'] = 0
if info['elapsed'] < 0: info['elapsed'] = 0
if info['length'] > 0 and info['elapsed'] > info['length']: info['elapsed'] = info['length']
info['elapsed_display'] = FormatTime(info['elapsed'])
return info
# Is the module currently playing a track
@abc.abstractmethod
def IsPlaying(self):
pass
# Are there tracks loaded in the current playlist
@abc.abstractmethod
def IsLoaded(self):
pass
"""
This is the PlaybackModule for VLC/libvlc (via vlc.py).
This module should support the majority of common file formats.
"""
from lib import vlc
class VLCPlayback(PlaybackModule):
def __init__(self, *args):
# Note: After a clean install of pulseaudio on raspbian VLC stops working. Force ALSA to potentially fix.
self.vlc_instance = vlc.Instance('--aout alsa')
# vlc.MediaListPlayer used for Play()/Pause()/Stop()/Prev()/Next()
self.vlc_list_player = self.vlc_instance.media_list_player_new()
# vlc.MediaPlayer used for SetVol()
self.vlc_player = self.vlc_instance.media_player_new()
self.vlc_list_player.set_media_player(self.vlc_player)
# vlc.MediaList used for Add()
self.vlc_playlist = self.vlc_instance.media_list_new()
self.vlc_list_player.set_media_list(self.vlc_playlist)
# vlc.MediaPlayer vlc.EventManager used for events
self.vlc_player__events = self.vlc_player.event_manager()
self.vlc_player__events.event_attach(vlc.EventType.MediaPlayerLengthChanged, self.OnLengthChanged, None)
self.vlc_player__events.event_attach(vlc.EventType.MediaPlayerMediaChanged, self.OnRefreshTrack, None)
self.vlc_player__events.event_attach(vlc.EventType.MediaPlayerPositionChanged, self.OnRefreshTrack, None)
self.vlc_player__events.event_attach(vlc.EventType.MediaPlayerPlaying, self.OnRefreshTrack, None)
self.vlc_player__events.event_attach(vlc.EventType.MediaPlayerPaused, self.OnRefreshTrack, None)
super(VLCPlayback, self).__init__(*args)
def Exit(self):
self.Stop()
# Unregister vlc.MediaPlayer events
self.vlc_player__events.event_detach(vlc.EventType.MediaPlayerLengthChanged)
self.vlc_player__events.event_detach(vlc.EventType.MediaPlayerMediaChanged)
self.vlc_player__events.event_detach(vlc.EventType.MediaPlayerPositionChanged)
self.vlc_player__events.event_detach(vlc.EventType.MediaPlayerPlaying)
self.vlc_player__events.event_detach(vlc.EventType.MediaPlayerPaused)
super(VLCPlayback, self).Exit()
def OnLengthChanged(self, *args, **kwds):
self.RefreshPlaylist() # a length changed, update full playlist
self.RefreshTrack()
def OnRefreshTrack(self, *args, **kwds):
self.RefreshTrack()
# Add an item to the current playlist
def Add(self, mrl):
media = self.vlc_instance.media_new(mrl)
self.vlc_playlist.add_media(media)
# Remove all items in the current playlist
def RemoveAll(self):
self.vlc_playlist.lock()
for i in range(self.vlc_playlist.count()):
self.vlc_playlist.remove_index(0)
self.vlc_playlist.unlock()
# Play/resume playback of the current track or specified track
def Play(self, index=None):
if index == None:
self.vlc_list_player.play()
else:
# Note: Can't use play_item_at_index() because it doesn't take playlists into account
self.vlc_list_player.play_item( self.GetMediaList()[int(index)] )
# Pause playback of the current track
def Pause(self):
if self.IsPlaying(): # pause() toggles playback, check is necessary
self.vlc_list_player.pause()
# Stop playback
def Stop(self):
self.vlc_list_player.stop()
# Play the previous song in the playlist, stop if fail
def Prev(self):
if self.vlc_list_player.previous() == -1:
self.Stop()
# Play the next song in the playlist, stop if fail
def Next(self):
if self.vlc_list_player.next() == -1:
self.Stop()
# Seek to a specific point in the current track (in seconds)
def Seek(self, sec):
self.vlc_player.set_time( int(math.floor(sec*1000)) )
# Set the playback volume (0-100)
def SetVol(self, vol):
self.vlc_player.audio_set_volume(vol)
# Query metadata for the current track
def QueryTrack(self):
media = self.vlc_player.get_media()
info = self.GetMeta(media)
if self.vlc_player != None:
info['elapsed'] = int(math.floor(self.vlc_player.get_time() / 1000))
info['index'] = self.vlc_playlist.index_of_item(media)
if info['index'] == -1: # media is probaly a subitem, get index based on MRL
items = self.GetMediaList()
for idx, item in enumerate(items):
if item.get_mrl() == media.get_mrl():
info['index'] = idx
break
return self.FormatInfo(info)
# Query metadata for the given track
def GetMeta(self, media):
info = {}
if media != None:
if not media.is_parsed():
media.parse()
info['playing'] = (media.get_state() == vlc.State.Playing)
info['artist'] = None
info['title'] = None
info['album'] = None
# Parse vlc.Meta.NowPlaying (if active song)
if media.get_state() != vlc.State.NothingSpecial:
now_playing = media.get_meta(vlc.Meta.NowPlaying)
if now_playing != None:
now_playing_split = now_playing.split(' - ')
if len(now_playing_split) >= 2:
info['artist'] = now_playing_split[0]
info['title'] = ' - '.join(now_playing_split[1:])
now_playing = None
else:
info['title'] = now_playing
# Parse other meta tags
info['artist'] = info['artist'] or media.get_meta(vlc.Meta.Artist)
info['title'] = info['title'] or media.get_meta(vlc.Meta.Title)
info['album'] = media.get_meta(vlc.Meta.Album)
info['length'] = int(math.floor(media.get_duration() / 1000))
return self.FormatInfo(info)
# Call GetMeta() for all playlist items
def QueryPlaylist(self, playlist=None):
items = self.GetMediaList()
for idx, media in enumerate(items):
if not media.is_parsed():
media.parse()
items[idx] = self.GetMeta(media)
return items
# Get all items in the playlist
def GetMediaList(self, playlist=None):
if playlist == None: # start processing with current playlist
playlist = self.vlc_playlist
items = []
for i in range(0, playlist.count()):
media = playlist.item_at_index(i)
if media.subitems() != None: # item has subitems (playlist?) (doesn't get parsed until played)
items.extend( self.GetMediaList(media.subitems()) )
else: # item is standalone
items.append( media )
return items
# Is the module currently playing a track
def IsPlaying(self):
return self.vlc_list_player.is_playing()
# Are there tracks loaded in the current playlist
def IsLoaded(self):
count = self.vlc_playlist.count() # should be .lock()ed first, seems to cause infinite hang
return (count > 0)
browse_path = '/'
# Display a list of: root folder, attached USB drives
def Menu_Local(item):
# A folder/file was selected
def Menu_Browse(item):
if item[:2] == '..': return 1 # quit menu (go up a level)
item_path = os.path.abspath(os.path.join(VLCPlayback.browse_path, item))
# A folder was selected, list its contents
if os.path.isdir(item_path):
VLCPlayback.browse_path = item_path
menu = [('../',Menu_Browse)]
for dirpath, dirnames, filenames in os.walk(VLCPlayback.browse_path):
files = dirnames + filenames
files = natsorted(files,alg=ns.PATH)
for file in files:
if file[:1] == '.': continue # hidden file
if os.path.isdir(os.path.join(dirpath,file)):
menu.append( (file+'/',Menu_Browse) )
else:
if re.search('(?i).+\.((3gp|aiff|aac|au|flac|m4a|m4p|mid|mka|mp3|mpc|oga|ogg|ra|rm|snd|tta|wav|wma|wv)|(asx|m3u8?|pls|sa?mi|wpl|xspf))$', file) == None: continue # regex: ((files)|(playlists))
menu.append( (file,Menu_Browse) )
break
__builtin__.OutputDisplay.DisplayMenu(menu, 1)
VLCPlayback.browse_path = os.path.abspath(os.path.join(VLCPlayback.browse_path, '..'))
# A file was selected, start playback of it
else:
if not type(__builtin__.PlaybackModule) is VLCPlayback:
# Switch global playback module
if __builtin__.PlaybackModule != None:
__builtin__.PlaybackModule.Exit()
__builtin__.PlaybackModule = VLCPlayback()
else:
# Stop and clear the current playlist
__builtin__.PlaybackModule.Stop()
__builtin__.PlaybackModule.RemoveAll()
# Find all other media files in the same directory
files = []
for (dirpath, dirnames, filenames) in os.walk(VLCPlayback.browse_path):
for file in filenames:
if file[:1] == '.': continue
if re.search('(?i).+\.(3gp|aiff|aac|au|flac|m4a|m4p|mid|mka|mp3|mpc|oga|ogg|ra|rm|snd|tta|wav|wma|wv)$', file) == None: continue # regex: (files)
files.append(os.path.abspath(os.path.join(VLCPlayback.browse_path, file)))
break
files = natsorted(files,alg=ns.PATH)
if item_path in files: # file selected, add all media files to current playlist
__builtin__.PlaybackModule.AddList(files)
__builtin__.PlaybackModule.Play(files.index(item_path))
else: # playlist selected, add only it to the current playlist
__builtin__.PlaybackModule.AddList([item_path])
__builtin__.PlaybackModule.Play()
__builtin__.OutputDisplay.DisplayTrack()
# A USB device was selected, mount it and browse into it
def Menu_USB(dev):
# Get 'mounted' status
mounted = False
try:
mounted = (subprocess.check_output('mount | grep ^'+dev, shell=True) != '')
except subprocess.CalledProcessError:
pass
mount = os.path.join('/mnt',os.path.basename(dev))
# Unmount
if os.path.exists(mount):
try:
subprocess.check_output('sudo umount '+mount, shell=True)
except subprocess.CalledProcessError:
pass
else:
os.makedirs(mount)
# Mount
try:
subprocess.check_output('mount '+dev+' '+mount, shell=True)
except subprocess.CalledProcessError:
pass
# Browse
return Menu_Browse(mount)
menu = [('/',Menu_Browse)]
# --- Enumerate /dev/sd* USB drives ---
def cat(file):
f = open(file, 'r')
s = f.read().rstrip()
f.close()
return s
def sizeof_fmt(num, suffix='B'): # http://stackoverflow.com/a/1094933
for unit in ['','K','M','G','T','P','E','Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
# Find /sys/block/sd* devices
for syspath, blocks, null in os.walk('/sys/block'):
for block in sorted(blocks):
if block[:2] == 'sd':
vendor = cat(os.path.join(syspath,block,'device/vendor'))
model = cat(os.path.join(syspath,block,'device/model'))
# Find /sys/block/sd*/sd* partitions
for blockpath, parts, null in os.walk(os.path.join(syspath,block)):
for part in sorted(parts):
if part[:len(block)] == block:
size = sizeof_fmt(float(cat(os.path.join(blockpath,part,'size')))*512)
label = vendor+' '+model+' ('+size+')'
menu.append( (label,Menu_USB,os.path.join('/dev',part)) )
__builtin__.OutputDisplay.DisplayMenu(menu)
Menu = [('Local Media',Menu_Local)]
"""
This is the PlaybackModule for Pandora (via python-pianobar).
This module supports playback of saved stations for all types of accounts (including free).
"""
from lib import pianobar
class PandoraPlayback(PlaybackModule):
def __init__(self, *args):
# Start pianobar (launch process)
self.pianobar = pianobar.pianobar()
self.pianobar.Start()
# Pianobar does not provide events, this thread polls for changed track and playlist
self.stop_refresh = threading.Event()
class Refresh(threading.Thread):
def __init__(self, pandora_playback):
self.pandora_playback = pandora_playback
threading.Thread.__init__(self)
def run(self):
track_old = self.pandora_playback.track
while not self.pandora_playback.stop_refresh.is_set():
self.pandora_playback.RefreshTrack()
if self.pandora_playback.CmpTrack(self.pandora_playback.track, track_old): # refresh playlist on track change
self.pandora_playback.RefreshPlaylist()
track_old = self.pandora_playback.track
time.sleep(0.1)
refresh = Refresh(self)
refresh.start()
super(PandoraPlayback, self).__init__(*args)
# Cleanly stop and exit the module
def Exit(self):
# Stop the track/playlist polling thread
self.stop_refresh.set()
# Clean exit pianobar
self.pianobar.Exit()
super(PandoraPlayback, self).Exit()
# Add an item to the current playlist (N/A for Pandora)
def Add(self, item):
pass
# Remove all items in the current playlist (N/A for Pandora)
def RemoveAll(self):
pass
# Play/resume playback of the current track
def Play(self, index=None):
self.pianobar.Play()
# Pause playback of the current track
def Pause(self):
self.pianobar.Pause()
# Stop playback (not supported by pianobar)
def Stop(self):
pass
# Play the previous song in the playlist (N/A for Pandora)
def Prev(self):
pass
# Play the next song in the playlist
def Next(self):
self.pianobar.Next()
# Seek to a specific point in the current track (in seconds) (N/A for Pandora)
def Seek(self, sec):
pass
# Set the playback volume (not implemented)
def SetVol(self, vol):
pass
# Compare two tracks for artist/title/album
def CmpTrack(self, t1, t2):
if not 'artist' in t1 or not 'artist' in t2: return True
if not 'title' in t1 or not 'title' in t2: return True
if not 'album' in t1 or not 'album' in t2: return True
return not (t1['artist'] == t2['artist'] and t1['title'] == t2['title'] and t1['album'] == t2['album'])
# Query metadata for the current track
def QueryTrack(self):
info = self.pianobar.GetInfo()
for idx, item in enumerate(self.playlist):
if not self.CmpTrack(info, item):
info['index'] = idx
break
return self.FormatInfo(info)
# Call GetMeta() for all playlist items
def QueryPlaylist(self, playlist=None):
items = self.pianobar.GetPlaylist()
for idx, item in enumerate(items):
items[idx] = self.FormatInfo(item)
return items
# Is the module currently playing a track
def IsPlaying(self):
return self.pianobar.IsPlaying()
# Are there tracks loaded in the current playlist
def IsLoaded(self):
return self.pianobar.IsLoaded()
# Log in and display user's stations
def Menu_Login(item):
# A station was selected, start playback of it
def Menu_Station(station_id):
# LCD status message
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Starting...')
# Start station
if not __builtin__.PlaybackModule.pianobar.ChangeStation(station_id):
__builtin__.OutputDisplay.DisplayMenu( [('Station error!',None)] ) # menu so it has to be dismissed
return
# Display track
__builtin__.OutputDisplay.DisplayTrack()
if not type(__builtin__.PlaybackModule) is PandoraPlayback:
# Start pianobar
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Logging in...')
module = PandoraPlayback()
# Login to pianobar
xml_pandora = __builtin__.Config.findall('./playback_modules/pandora')[0]
if not module.pianobar.Login(xml_pandora.findall('email')[0].text, xml_pandora.findall('password')[0].text):
module = None
__builtin__.OutputDisplay.DisplayMenu( [('Login failed!',None)] ) # menu so it has to be dismissed
return
# Switch global playback module
if __builtin__.PlaybackModule != None:
__builtin__.PlaybackModule.Exit()
__builtin__.PlaybackModule = module
# Print Pandora station list
if type(__builtin__.PlaybackModule) is PandoraPlayback:
menu = []
stations = __builtin__.PlaybackModule.pianobar.ListStations()
for station in stations:
menu.append( (station[1],Menu_Station,station[0]) )
__builtin__.OutputDisplay.DisplayMenu(menu)
Menu = [('Pandora',Menu_Login)]
"""
This is the PlaybackModule for Spotify/libspotify (via pyspotify).
This module supports playback of playlists for premium accounts.
"""
import spotify
class SpotifyPlayback(PlaybackModule):
def __init__(self, *args):
self.queue_index = -1
self.queue = []
# To keep track of elapsed time because libspotify doesn't
self.time_started = 0
self.time_paused = 0
# libspotify config
self.timeout_short = 20
self.timeout_long = 60
self.config = spotify.Config()
for root, dirs, files in os.walk(__builtin__.Directory): # find spotify_appkey.key in subdirectories
if 'spotify_appkey.key' in files:
self.config.load_application_key_file( os.path.join(root,'spotify_appkey.key') )
break
# Start libspotify session / audio sink
# Only one Session can be created during the entire lifetime of this app, store it in __builtin__
if not hasattr(__builtin__, 'SpotifySession'):
__builtin__.SpotifySession = spotify.Session(config=self.config)
self.audio = spotify.AlsaSink(__builtin__.SpotifySession)
self.session = __builtin__.SpotifySession
# Register libspotify event handlers
self.session.on(spotify.SessionEvent.PLAY_TOKEN_LOST, self.OnTokenLost)
self.session.on(spotify.SessionEvent.END_OF_TRACK, self.OnTrackEnd)
self.event_loop = spotify.EventLoop(self.session)
self.event_loop.start()
# spotify.SessionEvent.METADATA_UPDATED doesn't update on playback time, have to use a polling thread
self.stop_refresh = threading.Event()
class Refresh(threading.Thread):
def __init__(self, spotify_playback):
self.spotify_playback = spotify_playback
threading.Thread.__init__(self)
def run(self):
while not self.spotify_playback.stop_refresh.is_set():
self.spotify_playback.RefreshTrack()
time.sleep(0.1)
refresh = Refresh(self)
refresh.start()
super(SpotifyPlayback, self).__init__(*args)
# Cleanly stop and exit the module
def Exit(self):
self.stop_refresh.set()
# Unregister libspotify event handlers
self.event_loop.stop()
self.session.off(spotify.SessionEvent.PLAY_TOKEN_LOST)
self.session.off(spotify.SessionEvent.END_OF_TRACK)
# Clean logout (libspotify does disk things at logout)
self.session.logout()
super(SpotifyPlayback, self).Exit()
# libspotify event handlers
def OnTokenLost(self, session):
self.Pause()
def OnTrackEnd(self, session):
self.Next()
# Add an item to the current playlist
# Types accepted: track, album, playlist
def Add(self, item):
if type(item) == spotify.Track:
self.queue += [item]
elif type(item) == spotify.Album:
browser = item.browse()
if browser.is_loaded == False:
try:
browser.load(self.timeout_short)
self.queue += browser.tracks
except spotify.error.Timeout:
pass
elif type(item) == spotify.Playlist:
self.queue += item.tracks
# Remove all items in the current playlist
def RemoveAll(self):
self.queue_index = -1
self.queue = []
self.RefreshPlaylist()
# Play/resume playback of the current track or specified track
def Play(self, index=None):
# Current queue has not been started yet, "next" to first track
if index is None and self.queue_index == -1 and len(self.queue) > 0:
return self.Next()
# Load selected track
if not index is None: index = int(index)
if not index is None and 0 <= index and index < len(self.queue):
self.queue_index = index
track = self.queue[self.queue_index]
if track.is_loaded == False:
try:
track.load(self.timeout_long)
except spotify.error.Timeout:
__builtin__.OutputDisplay.DisplayMenu( [('Error playing!',None)] ) # menu so it has to be dismissed
return
self.session.flush_caches()
if not track.availability is spotify.TrackAvailability.AVAILABLE:
return False # playback will fail
self.session.player.load(track)
# Play, handle paused elapsed time
self.session.player.play()
if self.time_paused > 0:
self.time_started = time.time() - (self.time_paused - self.time_started)
self.time_paused = 0
else:
self.time_started = time.time()
return True
# Pause playback of the current track
def Pause(self):
self.session.player.pause()
self.time_paused = time.time()
# Stop playback
def Stop(self):
self.session.player.unload()
# Play the previous song in the playlist (until it doesn't fail)
def Prev(self):
self.time_started = 0
self.time_paused = 0
if self.queue_index > 0:
if not self.Play(self.queue_index - 1):
self.Prev()
else:
self.Stop()
# Play the next song in the playlist (until it doesn't fail)
def Next(self):
self.time_started = 0
self.time_paused = 0
if self.queue_index < len(self.queue) - 1:
if not self.Play(self.queue_index + 1):
self.Next()
else:
self.Stop()
# Seek to a specific point in the current track (in seconds)
def Seek(self, sec):
self.session.player.seek( int(math.floor(sec*1000)) )
# Elapsed time calculations
if self.time_paused > 0:
self.time_started = time.time() - sec - (self.time_paused - self.time_started)
else:
self.time_started = time.time() - sec
# Set the playback volume
def SetVol(self, vol):
pass
# Query metadata for the current track
def QueryTrack(self):
info = {}
if 0 <= self.queue_index and self.queue_index < len(self.queue):
track = self.queue[self.queue_index]
info = self.GetMeta(track)
info['index'] = self.queue_index
return self.FormatInfo(info)
# Query metadata for the current track
# WARNING: This function can get expensive when called frequently, that's why GetPlaylist() caches metadata
def GetMeta(self, track):
info = {}
if track.is_loaded:
if track == self.queue[self.queue_index]:
info['playing'] = self.IsPlaying()
if self.time_paused > 0:
info['elapsed'] = int(math.floor(self.time_paused - self.time_started))
else:
info['elapsed'] = int(math.floor(time.time() - self.time_started))
if len(track.artists) > 0:
artist = track.artists[0]
if artist.is_loaded:
info['artist'] = artist.name
info['title'] = track.name
album = track.album
if album.is_loaded:
info['album'] = album.name
info['length'] = int(math.floor(track.duration / 1000))
return self.FormatInfo(info)
# Call GetMeta() for all playlist items
def QueryPlaylist(self, playlist=None):
items = []
for track in self.queue:
items.append( self.GetMeta(track) )
return items
# Get all folders/playlists within a certain folder ID (root if not provided)
def GetPlaylistFolder(self, folder_id=None):
collection = []
# Load user playlists if not loaded
if self.session.playlist_container.is_loaded == False:
try:
self.session.playlist_container.load(self.timeout_short)
except spotify.error.Timeout:
pass
# Find start position if folder ID given
start = 0
if folder_id != None:
for idx, playlist in enumerate(self.session.playlist_container):
if type(playlist) is spotify.PlaylistFolder and playlist.type is spotify.PlaylistType.START_FOLDER and playlist.id == folder_id:
start = idx + 1
break
# Iterate playlists, don't recurse folders
depth = 0
for idx in range(start, len(self.session.playlist_container)):
playlist = self.session.playlist_container[idx]
if type(playlist) is spotify.PlaylistFolder:
if playlist.type is spotify.PlaylistType.START_FOLDER:
# Folder within the current folder
if depth == 0:
collection.append( (playlist.name,playlist) )
depth += 1
if playlist.type is spotify.PlaylistType.END_FOLDER:
depth -= 1
if depth < 0: break # exiting given folder
# Playlist within the current folder
elif type(playlist) is spotify.Playlist and depth == 0:
if playlist.is_loaded == False:
try:
playlist.load(self.timeout_short)
except spotify.error.Timeout:
pass
if playlist.is_loaded == True:
collection.append( (playlist.name,playlist) )
self.session.flush_caches() # write cache to disk immediately
return collection
# Is the module currently playing a track
def IsPlaying(self):
return self.session.player.state is spotify.PlayerState.PLAYING
# Are there tracks loaded in the current playlist
def IsLoaded(self):
return len(self.queue) > 0
# Log in and display user's playlists
def Menu_Login(item):
# A folder was selected, list all items inside it
def Menu_Folder(playlist_folder=None):
if type(playlist_folder) is str and playlist_folder[:2] == '..': return 1 # quit menu (go up a level)
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Loading...')
if not playlist_folder is None:
playlists = __builtin__.PlaybackModule.GetPlaylistFolder(playlist_folder.id)
else:
playlists = __builtin__.PlaybackModule.GetPlaylistFolder()
menu = []
if not playlist_folder is None: # currently in a sub-folder
menu.append( ('../',Menu_Folder) )
for playlist in playlists:
if type(playlist[1]) is spotify.Playlist:
menu.append( (playlist[0],Play_Playlist,playlist[1]) )
if type(playlist[1]) is spotify.PlaylistFolder:
menu.append( (playlist[0]+'/',Menu_Folder,playlist[1]) )
__builtin__.OutputDisplay.DisplayMenu(menu)
# A playlist was selected, play it
def Play_Playlist(playlist):
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Buffering...')
__builtin__.PlaybackModule.RemoveAll()
__builtin__.PlaybackModule.AddList([playlist])
__builtin__.PlaybackModule.Play()
__builtin__.OutputDisplay.DisplayTrack()
if not type(__builtin__.PlaybackModule) is SpotifyPlayback:
# Start Spotify
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Logging in...')
module = SpotifyPlayback()
# Login to Spotify
xml_spotify = __builtin__.Config.findall('./playback_modules/spotify')[0]
if module.session.remembered_user_name == xml_spotify.findall('username')[0].text:
module.session.relogin()
else:
module.session.login(xml_spotify.findall('username')[0].text, xml_spotify.findall('password')[0].text, True)
module.session.process_events() # wait for login
while module.session.connection.state is spotify.ConnectionState.OFFLINE: # wait for online
module.session.process_events()
if not module.session.connection.state is spotify.ConnectionState.LOGGED_IN:
module = None
__builtin__.OutputDisplay.DisplayMenu( [('Login failed!',None)] ) # menu so it has to be dismissed
return
module.session.flush_caches() # write login blob to disk
# Load playlist library
__builtin__.OutputDisplay.Clear()
__builtin__.OutputDisplay.PrintLine(0, 'Starting...')
try:
module.session.playlist_container.load(module.timeout_short)
except spotify.error.Timeout:
pass
if module.session.playlist_container.is_loaded == False:
module = None
__builtin__.OutputDisplay.DisplayMenu( [('Start failed!',None)] ) # menu so it has to be dismissed
return
module.session.flush_caches() # write playlist cache to disk
# Switch global playback module
if __builtin__.PlaybackModule != None:
__builtin__.PlaybackModule.Exit()
__builtin__.PlaybackModule = module
# Print Spotify playlist list
if type(__builtin__.PlaybackModule) is SpotifyPlayback:
Menu_Folder()
Menu = [('Spotify',Menu_Login)]
|
emmercm/piradio
|
Player/PlaybackModules.py
|
Python
|
mit
| 31,031
|
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
#
# Generated Wed Jun 8 19:35:12 2011 by generateDS.py version 2.5a.
#
import sys
import getopt
import re as re_
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError("Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")')
return input_data
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
STRING_CLEANUP_PAT = re_.compile(r"[\n\r\s]+")
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
# First try with no namespace.
value = attrs.get(attr_name)
if value is None:
# Now try the other possible namespaces.
namespaces = node.nsmap.itervalues()
for namespace in namespaces:
value = attrs.get('{%s}%s' % (namespace, attr_name, ))
if value is not None:
break
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace,name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ServiceExceptionReport(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, version=None, ServiceException=None):
self.version = _cast(None, version)
if ServiceException is None:
self.ServiceException = []
else:
self.ServiceException = ServiceException
def factory(*args_, **kwargs_):
if ServiceExceptionReport.subclass:
return ServiceExceptionReport.subclass(*args_, **kwargs_)
else:
return ServiceExceptionReport(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ServiceException(self): return self.ServiceException
def set_ServiceException(self, ServiceException): self.ServiceException = ServiceException
def add_ServiceException(self, value): self.ServiceException.append(value)
def insert_ServiceException(self, index, value): self.ServiceException[index] = value
def get_version(self): return self.version
def set_version(self, version): self.version = version
def export(self, outfile, level, namespace_='', name_='ServiceExceptionReport', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
self.exportAttributes(outfile, level, [], namespace_, name_='ServiceExceptionReport')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ServiceExceptionReport'):
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
outfile.write(' version=%s' % (self.gds_format_string(quote_attrib(self.version).encode(ExternalEncoding), input_name='version'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ServiceExceptionReport', fromsubclass_=False):
for ServiceException_ in self.ServiceException:
ServiceException_.export(outfile, level, namespace_, name_='ServiceException')
def hasContent_(self):
if (
self.ServiceException
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ServiceExceptionReport'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
showIndent(outfile, level)
outfile.write('version = "%s",\n' % (self.version,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('ServiceException=[\n')
level += 1
for ServiceException_ in self.ServiceException:
showIndent(outfile, level)
outfile.write('model_.ServiceException(\n')
ServiceException_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('version', node)
if value is not None and 'version' not in already_processed:
already_processed.append('version')
self.version = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ServiceException':
obj_ = ServiceException.factory()
obj_.build(child_)
self.ServiceException.append(obj_)
# end class ServiceExceptionReport
class ServiceException(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, code=None, valueOf_=None, mixedclass_=None, content_=None):
self.code = _cast(None, code)
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if ServiceException.subclass:
return ServiceException.subclass(*args_, **kwargs_)
else:
return ServiceException(*args_, **kwargs_)
factory = staticmethod(factory)
def get_code(self): return self.code
def set_code(self, code): self.code = code
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='', name_='ServiceException', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
self.exportAttributes(outfile, level, [], namespace_, name_='ServiceException')
outfile.write('>')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ServiceException'):
if self.code is not None and 'code' not in already_processed:
already_processed.append('code')
outfile.write(' code=%s' % (self.gds_format_string(quote_attrib(self.code).encode(ExternalEncoding), input_name='code'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ServiceException', fromsubclass_=False):
outfile.write(self.gds_format_string(quote_xml(self.valueOf_).encode(ExternalEncoding)))
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ServiceException'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.code is not None and 'code' not in already_processed:
already_processed.append('code')
showIndent(outfile, level)
outfile.write('code = "%s",\n' % (self.code,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('code', node)
if value is not None and 'code' not in already_processed:
already_processed.append('code')
self.code = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
pass
# end class ServiceException
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ServiceExceptionReport'
rootClass = ServiceExceptionReport
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ServiceExceptionReport'
rootClass = ServiceExceptionReport
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="ServiceExceptionReport",
namespacedef_='')
return rootObj
def parseLiteral(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ServiceExceptionReport'
rootClass = ServiceExceptionReport
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('#from exception import *\n\n')
sys.stdout.write('import exception as model_\n\n')
sys.stdout.write('rootObj = model_.rootTag(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"ServiceException",
"ServiceExceptionReport"
]
|
tst-ahernandez/earthenterprise
|
earth_enterprise/src/server/wsgi/wms/ogc/xml/v111/exception.py
|
Python
|
apache-2.0
| 21,567
|
from django.conf.urls import url
from places import views
urlpatterns = [
url(r'^$', views.index, name='index')
]
|
EntilZha/docker-workshop
|
backend/places/urls.py
|
Python
|
mit
| 119
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from itertools import compress
mylist = [1,4,-5, 10, 7, 2, 3, -1]
print([n for n in mylist if n > 0])
pos = (n for n in mylist if n > 0)
for x in pos:
print(x, end = ',')
print()
values = ['1', '2', '-3', '-', '4', 'N/A', '5']
def is_int(val):
try:
x = int(val)
return True
except ValueError:
return False
ivals = list(filter(is_int, values))
print(ivals)
#条件过滤
clip_neg = [n if n > 0 else 0 for n in mylist]
print(clip_neg)
addresses = [
'5412 N CLARK',
'5148 N CLARK',
'5800 E 58TH',
'2122 N CLARK'
'5645 N RAVENSWOOD',
'1060 W ADDISON',
'4801 N BROADWAY',
'1039 W GRANVILLE',
]
counts = [ 0, 3, 10, 4, 1, 7, 6, 1]
more5 = [n > 5 for n in counts]
print(list(compress(addresses, more5)))
|
firery/live-long-and-prosper
|
Python/Python Cookbook(the 3rd edition)/ch01/p16_filter.py
|
Python
|
apache-2.0
| 820
|
'''
add.py
Takes a list of input ingredient names. Imports each if not already present.
If already present, duplicates and rotates the ingredient.
Thomas Storey
2016
'''
import sys
import argparse
import bpy
import numpy as np
import os
import bmesh
from math import *
from mathutils import *
import random
def getObjectsBySubstring(objname):
copies = []
for obj in bpy.data.objects:
if(objname in obj.name):
copies.append(obj)
return copies
def deleteObject(obj):
bpy.context.scene.objects.unlink(obj)
obj.user_clear()
bpy.data.objects.remove(obj)
def getObject(objdir, objname):
if (bpy.data.objects.get(objname) == None):
objpath = os.path.join(objdir, objname+".obj")
bpy.ops.import_scene.obj(filepath=objpath,
axis_forward='Y',axis_up='Z')
return bpy.data.objects[objname]
def setOriginToGeometry(scn, obj):
obj.select = True;
scn.objects.active = obj
bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY")
obj.select = False;
def joinObjects(scn, objs, name):
bpy.ops.object.select_all(action='DESELECT')
for obj in objs:
obj.select = True
activeobj = objs[0]
scn.objects.active = activeobj
bpy.ops.object.join()
activeobj.name = name
activeobj.data.name = name
return activeobj
def bakeObject(scn, obj):
bpy.ops.object.select_all(action='DESELECT')
obj.select = True
scn.objects.active = obj
mat = obj.material_slots[0].material
bpy.ops.texture.new()
tex = bpy.data.textures["Texture"]
tex.name = "bake"
tex_slot = mat.texture_slots.add()
tex_slot.texture = tex
bpy.ops.image.new()
img = bpy.data.images["Untitled"]
img.name = "bake_img"
tex.image = img
img.generated_color = (1.0, 0.711, 0.540, 1.00)
tex_slot.blend_type = 'SOFT_LIGHT'
# bpy.ops.texture.new()
# baked_tex = bpy.data.textures["Texture"]
# baked_tex.name = "baked"
# baked_tex_slot = mat.texture_slots.create(2)
# baked_tex_slot.texture = baked_tex
# bpy.ops.image.new()
# baked_img = bpy.data.images["Untitled"]
# baked_img.name = "baked_img"
# mat.active_texture_index = 2
# mat.active_texture = baked_tex
#
# bpy.ops.object.mode_set(mode="EDIT")
# bpy.data.scenes["Scene"].render.bake_type = "TEXTURE"
# for area in bpy.context.screen.areas :
# if area.type == 'IMAGE_EDITOR' :
# area.spaces.active.image = baked_img
# bpy.ops.object.bake_image()
# mat.texture_slots[0].texture.image = baked_img
def execute(inputs, output):
ctx = bpy.context
scn = ctx.scene
cwd = os.getcwd()
objdir = os.path.join(cwd, 'objs')
for objname in inputs:
# import file, or get it if it's already here
obj = getObject(objdir, objname)
obj.location = Vector([0,0,0])
bakeObject(scn, obj)
# save out .blend
if not output == None:
bpy.ops.wm.save_as_mainfile(filepath=output,
check_existing=False,relative_remap=True)
else:
bpy.ops.wm.save_mainfile(check_existing=False,relative_remap=True)
def main():
argv = sys.argv
if "--" not in argv:
argv = []
else:
argv = argv[argv.index("--") + 1:]
usage_text =\
"Usage: blender -b [.blend file] --python " + __file__ + " -- [options]"
parser = argparse.ArgumentParser(description=usage_text)
parser.add_argument("-i", "--input", dest="input", type=str, required=True,
help="Comma delimited list of .objs to import. Exclude the file extension.")
parser.add_argument("-o", "--output", dest="output", type=str, required=False,
help="Name of blend file to save to, if not the same as the one being opened.")
args = parser.parse_args(argv)
output = ""
if not argv:
parser.print_help()
return
if not args.input:
print("input argument not given. aborting.")
parser.print_help()
return
if not args.output:
output = None
else:
output = args.output+".blend"
inputs = args.input.split(",")
execute(inputs, output)
print("baked " + ", ".join(inputs))
if __name__ == "__main__":
main()
|
thomasrstorey/recipesfordisaster
|
actions/bake.py
|
Python
|
mit
| 4,214
|
import re
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, FormRequest, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from scrapy.http import FormRequest
from productloader import load_product
import re
class CartridgeSave(BaseSpider):
name = 'cartridgesave.co.uk'
allowed_domains = ['cartridgesave.co.uk', 'www.cartridgesave.co.uk']
start_urls = ('http://www.cartridgesave.co.uk',)
def __init__(self, *args, **kwargs):
super(CartridgeSave, self).__init__(*args, **kwargs)
self.URL_BASE = 'http://www.cartridgesave.co.uk'
self.product_name_re = re.compile('.*/(.*?)\.html')
def parse_product(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
res = {}
try:
# name = hxs.select('//div[@id="specification"]/ul/li[position()=1]').re('.* \((.*)\)')[0]
url = response.url
name = self.product_name_re.search(url).groups()[0]
price = hxs.select('.//span[@class="ex_vat_price"]/text()').re('\xa3(.*)')[0]
res['url'] = url
res['description'] = name
res['price'] = price
res['sku'] = res['description']
yield load_product(res, response)
except IndexError:
return
def parse(self, response):
if not isinstance(response, HtmlResponse):
return
#categories
hxs = HtmlXPathSelector(response)
# printer brands
printers_brands = hxs.select('//div[@id="manufacturers"]//li/a/@href').extract()
for url in printers_brands:
url = urljoin_rfc(self.URL_BASE, url)
yield Request(url)
# printer list
printers_list = hxs.select('//ul[@class="printer_list"]//li/a/@href').extract()
for url in printers_list:
url = urljoin_rfc(self.URL_BASE, url)
yield Request(url)
# next page
# next_page =
# if next_page:
# url = urljoin_rfc(URL_BASE, next_page[0])
# yield Request(url)
# products
products = hxs.select('//div[@class="group_products"]//li/a[not(@class="lowest_price info")]/@href').extract()
for product in products:
product = urljoin_rfc(self.URL_BASE, product)
yield Request(product, callback=self.parse_product)
|
0--key/lib
|
portfolio/Python/scrapy/inkshop/cartridgesavecouk.py
|
Python
|
apache-2.0
| 2,521
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase as PythonTestCase
import redis
class TestCase(PythonTestCase):
def setUp(self):
self.redis = redis.Redis(host='localhost', port=7575, db=0)
self.redis.flushall()
|
heynemann/octopus
|
tests/__init__.py
|
Python
|
mit
| 262
|
""" Test suite for the fixer modules """
# Python imports
import os
import unittest
from itertools import chain
from operator import itemgetter
# Local imports
from lib2to3 import pygram, pytree, refactor, fixer_util
from lib2to3.tests import support
class FixerTestCase(support.TestCase):
# Other test cases can subclass this class and replace "fixer_pkg" with
# their own.
def setUp(self, fix_list=None, fixer_pkg="lib2to3", options=None):
if fix_list is None:
fix_list = [self.fixer]
self.refactor = support.get_refactorer(fixer_pkg, fix_list, options)
self.fixer_log = []
self.filename = u"<string>"
for fixer in chain(self.refactor.pre_order,
self.refactor.post_order):
fixer.log = self.fixer_log
def _check(self, before, after):
before = support.reformat(before)
after = support.reformat(after)
tree = self.refactor.refactor_string(before, self.filename)
self.assertEqual(after, unicode(tree))
return tree
def check(self, before, after, ignore_warnings=False):
tree = self._check(before, after)
self.assertTrue(tree.was_changed)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def warns(self, before, after, message, unchanged=False):
tree = self._check(before, after)
self.assertIn(message, "".join(self.fixer_log))
if not unchanged:
self.assertTrue(tree.was_changed)
def warns_unchanged(self, before, message):
self.warns(before, before, message, unchanged=True)
def unchanged(self, before, ignore_warnings=False):
self._check(before, before)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def assert_runs_after(self, *names):
fixes = [self.fixer]
fixes.extend(names)
r = support.get_refactorer("lib2to3", fixes)
(pre, post) = r.get_fixers()
n = "fix_" + self.fixer
if post and post[-1].__class__.__module__.endswith(n):
# We're the last fixer to run
return
if pre and pre[-1].__class__.__module__.endswith(n) and not post:
# We're the last in pre and post is empty
return
self.fail("Fixer run order (%s) is incorrect; %s should be last."\
%(", ".join([x.__class__.__module__ for x in (pre+post)]), n))
class Test_ne(FixerTestCase):
fixer = "ne"
def test_basic(self):
b = """if x <> y:
pass"""
a = """if x != y:
pass"""
self.check(b, a)
def test_no_spaces(self):
b = """if x<>y:
pass"""
a = """if x!=y:
pass"""
self.check(b, a)
def test_chained(self):
b = """if x<>y<>z:
pass"""
a = """if x!=y!=z:
pass"""
self.check(b, a)
class Test_has_key(FixerTestCase):
fixer = "has_key"
def test_1(self):
b = """x = d.has_key("x") or d.has_key("y")"""
a = """x = "x" in d or "y" in d"""
self.check(b, a)
def test_2(self):
b = """x = a.b.c.d.has_key("x") ** 3"""
a = """x = ("x" in a.b.c.d) ** 3"""
self.check(b, a)
def test_3(self):
b = """x = a.b.has_key(1 + 2).__repr__()"""
a = """x = (1 + 2 in a.b).__repr__()"""
self.check(b, a)
def test_4(self):
b = """x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4"""
a = """x = (1 + 2 in a.b).__repr__() ** -3 ** 4"""
self.check(b, a)
def test_5(self):
b = """x = a.has_key(f or g)"""
a = """x = (f or g) in a"""
self.check(b, a)
def test_6(self):
b = """x = a + b.has_key(c)"""
a = """x = a + (c in b)"""
self.check(b, a)
def test_7(self):
b = """x = a.has_key(lambda: 12)"""
a = """x = (lambda: 12) in a"""
self.check(b, a)
def test_8(self):
b = """x = a.has_key(a for a in b)"""
a = """x = (a for a in b) in a"""
self.check(b, a)
def test_9(self):
b = """if not a.has_key(b): pass"""
a = """if b not in a: pass"""
self.check(b, a)
def test_10(self):
b = """if not a.has_key(b).__repr__(): pass"""
a = """if not (b in a).__repr__(): pass"""
self.check(b, a)
def test_11(self):
b = """if not a.has_key(b) ** 2: pass"""
a = """if not (b in a) ** 2: pass"""
self.check(b, a)
class Test_apply(FixerTestCase):
fixer = "apply"
def test_1(self):
b = """x = apply(f, g + h)"""
a = """x = f(*g + h)"""
self.check(b, a)
def test_2(self):
b = """y = apply(f, g, h)"""
a = """y = f(*g, **h)"""
self.check(b, a)
def test_3(self):
b = """z = apply(fs[0], g or h, h or g)"""
a = """z = fs[0](*g or h, **h or g)"""
self.check(b, a)
def test_4(self):
b = """apply(f, (x, y) + t)"""
a = """f(*(x, y) + t)"""
self.check(b, a)
def test_5(self):
b = """apply(f, args,)"""
a = """f(*args)"""
self.check(b, a)
def test_6(self):
b = """apply(f, args, kwds,)"""
a = """f(*args, **kwds)"""
self.check(b, a)
# Test that complex functions are parenthesized
def test_complex_1(self):
b = """x = apply(f+g, args)"""
a = """x = (f+g)(*args)"""
self.check(b, a)
def test_complex_2(self):
b = """x = apply(f*g, args)"""
a = """x = (f*g)(*args)"""
self.check(b, a)
def test_complex_3(self):
b = """x = apply(f**g, args)"""
a = """x = (f**g)(*args)"""
self.check(b, a)
# But dotted names etc. not
def test_dotted_name(self):
b = """x = apply(f.g, args)"""
a = """x = f.g(*args)"""
self.check(b, a)
def test_subscript(self):
b = """x = apply(f[x], args)"""
a = """x = f[x](*args)"""
self.check(b, a)
def test_call(self):
b = """x = apply(f(), args)"""
a = """x = f()(*args)"""
self.check(b, a)
# Extreme case
def test_extreme(self):
b = """x = apply(a.b.c.d.e.f, args, kwds)"""
a = """x = a.b.c.d.e.f(*args, **kwds)"""
self.check(b, a)
# XXX Comments in weird places still get lost
def test_weird_comments(self):
b = """apply( # foo
f, # bar
args)"""
a = """f(*args)"""
self.check(b, a)
# These should *not* be touched
def test_unchanged_1(self):
s = """apply()"""
self.unchanged(s)
def test_unchanged_2(self):
s = """apply(f)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """apply(f,)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """apply(f, args, kwds, extras)"""
self.unchanged(s)
def test_unchanged_5(self):
s = """apply(f, *args, **kwds)"""
self.unchanged(s)
def test_unchanged_6(self):
s = """apply(f, *args)"""
self.unchanged(s)
def test_unchanged_7(self):
s = """apply(func=f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_8(self):
s = """apply(f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_9(self):
s = """apply(f, args, kwds=kwds)"""
self.unchanged(s)
def test_space_1(self):
a = """apply( f, args, kwds)"""
b = """f(*args, **kwds)"""
self.check(a, b)
def test_space_2(self):
a = """apply( f ,args,kwds )"""
b = """f(*args, **kwds)"""
self.check(a, b)
class Test_intern(FixerTestCase):
fixer = "intern"
def test_prefix_preservation(self):
b = """x = intern( a )"""
a = """import sys\nx = sys.intern( a )"""
self.check(b, a)
b = """y = intern("b" # test
)"""
a = """import sys\ny = sys.intern("b" # test
)"""
self.check(b, a)
b = """z = intern(a+b+c.d, )"""
a = """import sys\nz = sys.intern(a+b+c.d, )"""
self.check(b, a)
def test(self):
b = """x = intern(a)"""
a = """import sys\nx = sys.intern(a)"""
self.check(b, a)
b = """z = intern(a+b+c.d,)"""
a = """import sys\nz = sys.intern(a+b+c.d,)"""
self.check(b, a)
b = """intern("y%s" % 5).replace("y", "")"""
a = """import sys\nsys.intern("y%s" % 5).replace("y", "")"""
self.check(b, a)
# These should not be refactored
def test_unchanged(self):
s = """intern(a=1)"""
self.unchanged(s)
s = """intern(f, g)"""
self.unchanged(s)
s = """intern(*h)"""
self.unchanged(s)
s = """intern(**i)"""
self.unchanged(s)
s = """intern()"""
self.unchanged(s)
class Test_reduce(FixerTestCase):
fixer = "reduce"
def test_simple_call(self):
b = "reduce(a, b, c)"
a = "from functools import reduce\nreduce(a, b, c)"
self.check(b, a)
def test_bug_7253(self):
# fix_tuple_params was being bad and orphaning nodes in the tree.
b = "def x(arg): reduce(sum, [])"
a = "from functools import reduce\ndef x(arg): reduce(sum, [])"
self.check(b, a)
def test_call_with_lambda(self):
b = "reduce(lambda x, y: x + y, seq)"
a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)"
self.check(b, a)
def test_unchanged(self):
s = "reduce(a)"
self.unchanged(s)
s = "reduce(a, b=42)"
self.unchanged(s)
s = "reduce(a, b, c, d)"
self.unchanged(s)
s = "reduce(**c)"
self.unchanged(s)
s = "reduce()"
self.unchanged(s)
class Test_print(FixerTestCase):
fixer = "print"
def test_prefix_preservation(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_idempotency(self):
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_idempotency_print_as_function(self):
self.refactor.driver.grammar = pygram.python_grammar_no_print_statement
s = """print(1, 1+1, 1+1+1)"""
self.unchanged(s)
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_1(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_2(self):
b = """print 1, 2"""
a = """print(1, 2)"""
self.check(b, a)
def test_3(self):
b = """print"""
a = """print()"""
self.check(b, a)
def test_4(self):
# from bug 3000
b = """print whatever; print"""
a = """print(whatever); print()"""
self.check(b, a)
def test_5(self):
b = """print; print whatever;"""
a = """print(); print(whatever);"""
self.check(b, a)
def test_tuple(self):
b = """print (a, b, c)"""
a = """print((a, b, c))"""
self.check(b, a)
# trailing commas
def test_trailing_comma_1(self):
b = """print 1, 2, 3,"""
a = """print(1, 2, 3, end=' ')"""
self.check(b, a)
def test_trailing_comma_2(self):
b = """print 1, 2,"""
a = """print(1, 2, end=' ')"""
self.check(b, a)
def test_trailing_comma_3(self):
b = """print 1,"""
a = """print(1, end=' ')"""
self.check(b, a)
# >> stuff
def test_vargs_without_trailing_comma(self):
b = """print >>sys.stderr, 1, 2, 3"""
a = """print(1, 2, 3, file=sys.stderr)"""
self.check(b, a)
def test_with_trailing_comma(self):
b = """print >>sys.stderr, 1, 2,"""
a = """print(1, 2, end=' ', file=sys.stderr)"""
self.check(b, a)
def test_no_trailing_comma(self):
b = """print >>sys.stderr, 1+1"""
a = """print(1+1, file=sys.stderr)"""
self.check(b, a)
def test_spaces_before_file(self):
b = """print >> sys.stderr"""
a = """print(file=sys.stderr)"""
self.check(b, a)
def test_with_future_print_function(self):
s = "from __future__ import print_function\n" \
"print('Hai!', end=' ')"
self.unchanged(s)
b = "print 'Hello, world!'"
a = "print('Hello, world!')"
self.check(b, a)
class Test_exec(FixerTestCase):
fixer = "exec"
def test_prefix_preservation(self):
b = """ exec code in ns1, ns2"""
a = """ exec(code, ns1, ns2)"""
self.check(b, a)
def test_basic(self):
b = """exec code"""
a = """exec(code)"""
self.check(b, a)
def test_with_globals(self):
b = """exec code in ns"""
a = """exec(code, ns)"""
self.check(b, a)
def test_with_globals_locals(self):
b = """exec code in ns1, ns2"""
a = """exec(code, ns1, ns2)"""
self.check(b, a)
def test_complex_1(self):
b = """exec (a.b()) in ns"""
a = """exec((a.b()), ns)"""
self.check(b, a)
def test_complex_2(self):
b = """exec a.b() + c in ns"""
a = """exec(a.b() + c, ns)"""
self.check(b, a)
# These should not be touched
def test_unchanged_1(self):
s = """exec(code)"""
self.unchanged(s)
def test_unchanged_2(self):
s = """exec (code)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """exec(code, ns)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """exec(code, ns1, ns2)"""
self.unchanged(s)
class Test_repr(FixerTestCase):
fixer = "repr"
def test_prefix_preservation(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_1(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_2(self):
b = """y = `x`"""
a = """y = repr(x)"""
self.check(b, a)
def test_complex(self):
b = """z = `y`.__repr__()"""
a = """z = repr(y).__repr__()"""
self.check(b, a)
def test_tuple(self):
b = """x = `1, 2, 3`"""
a = """x = repr((1, 2, 3))"""
self.check(b, a)
def test_nested(self):
b = """x = `1 + `2``"""
a = """x = repr(1 + repr(2))"""
self.check(b, a)
def test_nested_tuples(self):
b = """x = `1, 2 + `3, 4``"""
a = """x = repr((1, 2 + repr((3, 4))))"""
self.check(b, a)
class Test_except(FixerTestCase):
fixer = "except"
def test_prefix_preservation(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_simple(self):
b = """
try:
pass
except Foo, e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_simple_no_space_before_target(self):
b = """
try:
pass
except Foo,e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_tuple_unpack(self):
b = """
def foo():
try:
pass
except Exception, (f, e):
pass
except ImportError, e:
pass"""
a = """
def foo():
try:
pass
except Exception as xxx_todo_changeme:
(f, e) = xxx_todo_changeme.args
pass
except ImportError as e:
pass"""
self.check(b, a)
def test_multi_class(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_list_unpack(self):
b = """
try:
pass
except Exception, [a, b]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
[a, b] = xxx_todo_changeme.args
pass"""
self.check(b, a)
def test_weird_target_1(self):
b = """
try:
pass
except Exception, d[5]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
d[5] = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_2(self):
b = """
try:
pass
except Exception, a.foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a.foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_3(self):
b = """
try:
pass
except Exception, a().foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a().foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_bare_except(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_bare_except_and_else_finally(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass
else:
pass
finally:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass
else:
pass
finally:
pass"""
self.check(b, a)
def test_multi_fixed_excepts_before_bare_except(self):
b = """
try:
pass
except TypeError, b:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except TypeError as b:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_one_line_suites(self):
b = """
try: raise TypeError
except TypeError, e:
pass
"""
a = """
try: raise TypeError
except TypeError as e:
pass
"""
self.check(b, a)
b = """
try:
raise TypeError
except TypeError, e: pass
"""
a = """
try:
raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
"""
a = """
try: raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
else: function()
finally: done()
"""
a = """
try: raise TypeError
except TypeError as e: pass
else: function()
finally: done()
"""
self.check(b, a)
# These should not be touched:
def test_unchanged_1(self):
s = """
try:
pass
except:
pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
try:
pass
except Exception:
pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """
try:
pass
except (Exception, SystemExit):
pass"""
self.unchanged(s)
class Test_raise(FixerTestCase):
fixer = "raise"
def test_basic(self):
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_prefix_preservation(self):
b = """raise Exception,5"""
a = """raise Exception(5)"""
self.check(b, a)
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_with_comments(self):
b = """raise Exception, 5 # foo"""
a = """raise Exception(5) # foo"""
self.check(b, a)
b = """raise E, (5, 6) % (a, b) # foo"""
a = """raise E((5, 6) % (a, b)) # foo"""
self.check(b, a)
b = """def foo():
raise Exception, 5, 6 # foo"""
a = """def foo():
raise Exception(5).with_traceback(6) # foo"""
self.check(b, a)
def test_None_value(self):
b = """raise Exception(5), None, tb"""
a = """raise Exception(5).with_traceback(tb)"""
self.check(b, a)
def test_tuple_value(self):
b = """raise Exception, (5, 6, 7)"""
a = """raise Exception(5, 6, 7)"""
self.check(b, a)
def test_tuple_detection(self):
b = """raise E, (5, 6) % (a, b)"""
a = """raise E((5, 6) % (a, b))"""
self.check(b, a)
def test_tuple_exc_1(self):
b = """raise (((E1, E2), E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
def test_tuple_exc_2(self):
b = """raise (E1, (E2, E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
# These should produce a warning
def test_string_exc(self):
s = """raise 'foo'"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val(self):
s = """raise "foo", 5"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val_tb(self):
s = """raise "foo", 5, 6"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
# These should result in traceback-assignment
def test_tb_1(self):
b = """def foo():
raise Exception, 5, 6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
raise Exception, 5, 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
raise Exception,5,6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
raise Exception,5,6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
raise Exception, (5, 6, 7), 6"""
a = """def foo():
raise Exception(5, 6, 7).with_traceback(6)"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
raise Exception, (5, 6, 7), 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5, 6, 7).with_traceback(6)
b = 6"""
self.check(b, a)
class Test_throw(FixerTestCase):
fixer = "throw"
def test_1(self):
b = """g.throw(Exception, 5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_2(self):
b = """g.throw(Exception,5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_3(self):
b = """g.throw(Exception, (5, 6, 7))"""
a = """g.throw(Exception(5, 6, 7))"""
self.check(b, a)
def test_4(self):
b = """5 + g.throw(Exception, 5)"""
a = """5 + g.throw(Exception(5))"""
self.check(b, a)
# These should produce warnings
def test_warn_1(self):
s = """g.throw("foo")"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_2(self):
s = """g.throw("foo", 5)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_3(self):
s = """g.throw("foo", 5, 6)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
# These should not be touched
def test_untouched_1(self):
s = """g.throw(Exception)"""
self.unchanged(s)
def test_untouched_2(self):
s = """g.throw(Exception(5, 6))"""
self.unchanged(s)
def test_untouched_3(self):
s = """5 + g.throw(Exception(5, 6))"""
self.unchanged(s)
# These should result in traceback-assignment
def test_tb_1(self):
b = """def foo():
g.throw(Exception, 5, 6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
g.throw(Exception,5,6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
g.throw(Exception,5,6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
g.throw(Exception, (5, 6, 7), 6)"""
a = """def foo():
g.throw(Exception(5, 6, 7).with_traceback(6))"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
g.throw(Exception, (5, 6, 7), 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5, 6, 7).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_7(self):
b = """def foo():
a + g.throw(Exception, 5, 6)"""
a = """def foo():
a + g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_8(self):
b = """def foo():
a = 5
a + g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
a + g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
class Test_long(FixerTestCase):
fixer = "long"
def test_1(self):
b = """x = long(x)"""
a = """x = int(x)"""
self.check(b, a)
def test_2(self):
b = """y = isinstance(x, long)"""
a = """y = isinstance(x, int)"""
self.check(b, a)
def test_3(self):
b = """z = type(x) in (int, long)"""
a = """z = type(x) in (int, int)"""
self.check(b, a)
def test_unchanged(self):
s = """long = True"""
self.unchanged(s)
s = """s.long = True"""
self.unchanged(s)
s = """def long(): pass"""
self.unchanged(s)
s = """class long(): pass"""
self.unchanged(s)
s = """def f(long): pass"""
self.unchanged(s)
s = """def f(g, long): pass"""
self.unchanged(s)
s = """def f(x, long=True): pass"""
self.unchanged(s)
def test_prefix_preservation(self):
b = """x = long( x )"""
a = """x = int( x )"""
self.check(b, a)
class Test_execfile(FixerTestCase):
fixer = "execfile"
def test_conversion(self):
b = """execfile("fn")"""
a = """exec(compile(open("fn").read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", glob)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), glob)"""
self.check(b, a)
b = """execfile("fn", glob, loc)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), glob, loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), globals=glob)"""
self.check(b, a)
b = """execfile("fn", locals=loc)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), locals=loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob, locals=loc)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), globals=glob, locals=loc)"""
self.check(b, a)
def test_spacing(self):
b = """execfile( "fn" )"""
a = """exec(compile(open( "fn" ).read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", globals = glob)"""
a = """exec(compile(open("fn").read(), "fn", 'exec'), globals = glob)"""
self.check(b, a)
class Test_isinstance(FixerTestCase):
fixer = "isinstance"
def test_remove_multiple_items(self):
b = """isinstance(x, (int, int, int))"""
a = """isinstance(x, int)"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float))"""
a = """isinstance(x, (int, float))"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float, str))"""
a = """isinstance(x, (int, float, str))"""
self.check(b, a)
b = """isinstance(foo() + bar(), (x(), y(), x(), int, int))"""
a = """isinstance(foo() + bar(), (x(), y(), x(), int))"""
self.check(b, a)
def test_prefix_preservation(self):
b = """if isinstance( foo(), ( bar, bar, baz )) : pass"""
a = """if isinstance( foo(), ( bar, baz )) : pass"""
self.check(b, a)
def test_unchanged(self):
self.unchanged("isinstance(x, (str, int))")
class Test_dict(FixerTestCase):
fixer = "dict"
def test_prefix_preservation(self):
b = "if d. keys ( ) : pass"
a = "if list(d. keys ( )) : pass"
self.check(b, a)
b = "if d. items ( ) : pass"
a = "if list(d. items ( )) : pass"
self.check(b, a)
b = "if d. iterkeys ( ) : pass"
a = "if iter(d. keys ( )) : pass"
self.check(b, a)
b = "[i for i in d. iterkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
b = "if d. viewkeys ( ) : pass"
a = "if d. keys ( ) : pass"
self.check(b, a)
b = "[i for i in d. viewkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
def test_trailing_comment(self):
b = "d.keys() # foo"
a = "list(d.keys()) # foo"
self.check(b, a)
b = "d.items() # foo"
a = "list(d.items()) # foo"
self.check(b, a)
b = "d.iterkeys() # foo"
a = "iter(d.keys()) # foo"
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = "d.viewitems() # foo"
a = "d.items() # foo"
self.check(b, a)
def test_unchanged(self):
for wrapper in fixer_util.consuming_calls:
s = "s = %s(d.keys())" % wrapper
self.unchanged(s)
s = "s = %s(d.values())" % wrapper
self.unchanged(s)
s = "s = %s(d.items())" % wrapper
self.unchanged(s)
def test_01(self):
b = "d.keys()"
a = "list(d.keys())"
self.check(b, a)
b = "a[0].foo().keys()"
a = "list(a[0].foo().keys())"
self.check(b, a)
def test_02(self):
b = "d.items()"
a = "list(d.items())"
self.check(b, a)
def test_03(self):
b = "d.values()"
a = "list(d.values())"
self.check(b, a)
def test_04(self):
b = "d.iterkeys()"
a = "iter(d.keys())"
self.check(b, a)
def test_05(self):
b = "d.iteritems()"
a = "iter(d.items())"
self.check(b, a)
def test_06(self):
b = "d.itervalues()"
a = "iter(d.values())"
self.check(b, a)
def test_07(self):
s = "list(d.keys())"
self.unchanged(s)
def test_08(self):
s = "sorted(d.keys())"
self.unchanged(s)
def test_09(self):
b = "iter(d.keys())"
a = "iter(list(d.keys()))"
self.check(b, a)
def test_10(self):
b = "foo(d.keys())"
a = "foo(list(d.keys()))"
self.check(b, a)
def test_11(self):
b = "for i in d.keys(): print i"
a = "for i in list(d.keys()): print i"
self.check(b, a)
def test_12(self):
b = "for i in d.iterkeys(): print i"
a = "for i in d.keys(): print i"
self.check(b, a)
def test_13(self):
b = "[i for i in d.keys()]"
a = "[i for i in list(d.keys())]"
self.check(b, a)
def test_14(self):
b = "[i for i in d.iterkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_15(self):
b = "(i for i in d.keys())"
a = "(i for i in list(d.keys()))"
self.check(b, a)
def test_16(self):
b = "(i for i in d.iterkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_17(self):
b = "iter(d.iterkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_18(self):
b = "list(d.iterkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_19(self):
b = "sorted(d.iterkeys())"
a = "sorted(d.keys())"
self.check(b, a)
def test_20(self):
b = "foo(d.iterkeys())"
a = "foo(iter(d.keys()))"
self.check(b, a)
def test_21(self):
b = "print h.iterkeys().next()"
a = "print iter(h.keys()).next()"
self.check(b, a)
def test_22(self):
b = "print h.keys()[0]"
a = "print list(h.keys())[0]"
self.check(b, a)
def test_23(self):
b = "print list(h.iterkeys().next())"
a = "print list(iter(h.keys()).next())"
self.check(b, a)
def test_24(self):
b = "for x in h.keys()[0]: print x"
a = "for x in list(h.keys())[0]: print x"
self.check(b, a)
def test_25(self):
b = "d.viewkeys()"
a = "d.keys()"
self.check(b, a)
def test_26(self):
b = "d.viewitems()"
a = "d.items()"
self.check(b, a)
def test_27(self):
b = "d.viewvalues()"
a = "d.values()"
self.check(b, a)
def test_28(self):
b = "[i for i in d.viewkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_29(self):
b = "(i for i in d.viewkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_30(self):
b = "iter(d.viewkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_31(self):
b = "list(d.viewkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_32(self):
b = "sorted(d.viewkeys())"
a = "sorted(d.keys())"
self.check(b, a)
class Test_xrange(FixerTestCase):
fixer = "xrange"
def test_prefix_preservation(self):
b = """x = xrange( 10 )"""
a = """x = range( 10 )"""
self.check(b, a)
b = """x = xrange( 1 , 10 )"""
a = """x = range( 1 , 10 )"""
self.check(b, a)
b = """x = xrange( 0 , 10 , 2 )"""
a = """x = range( 0 , 10 , 2 )"""
self.check(b, a)
def test_single_arg(self):
b = """x = xrange(10)"""
a = """x = range(10)"""
self.check(b, a)
def test_two_args(self):
b = """x = xrange(1, 10)"""
a = """x = range(1, 10)"""
self.check(b, a)
def test_three_args(self):
b = """x = xrange(0, 10, 2)"""
a = """x = range(0, 10, 2)"""
self.check(b, a)
def test_wrap_in_list(self):
b = """x = range(10, 3, 9)"""
a = """x = list(range(10, 3, 9))"""
self.check(b, a)
b = """x = foo(range(10, 3, 9))"""
a = """x = foo(list(range(10, 3, 9)))"""
self.check(b, a)
b = """x = range(10, 3, 9) + [4]"""
a = """x = list(range(10, 3, 9)) + [4]"""
self.check(b, a)
b = """x = range(10)[::-1]"""
a = """x = list(range(10))[::-1]"""
self.check(b, a)
b = """x = range(10) [3]"""
a = """x = list(range(10)) [3]"""
self.check(b, a)
def test_xrange_in_for(self):
b = """for i in xrange(10):\n j=i"""
a = """for i in range(10):\n j=i"""
self.check(b, a)
b = """[i for i in xrange(10)]"""
a = """[i for i in range(10)]"""
self.check(b, a)
def test_range_in_for(self):
self.unchanged("for i in range(10): pass")
self.unchanged("[i for i in range(10)]")
def test_in_contains_test(self):
self.unchanged("x in range(10, 3, 9)")
def test_in_consuming_context(self):
for call in fixer_util.consuming_calls:
self.unchanged("a = %s(range(10))" % call)
class Test_xrange_with_reduce(FixerTestCase):
def setUp(self):
super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
def test_double_transform(self):
b = """reduce(x, xrange(5))"""
a = """from functools import reduce
reduce(x, range(5))"""
self.check(b, a)
class Test_raw_input(FixerTestCase):
fixer = "raw_input"
def test_prefix_preservation(self):
b = """x = raw_input( )"""
a = """x = input( )"""
self.check(b, a)
b = """x = raw_input( '' )"""
a = """x = input( '' )"""
self.check(b, a)
def test_1(self):
b = """x = raw_input()"""
a = """x = input()"""
self.check(b, a)
def test_2(self):
b = """x = raw_input('')"""
a = """x = input('')"""
self.check(b, a)
def test_3(self):
b = """x = raw_input('prompt')"""
a = """x = input('prompt')"""
self.check(b, a)
def test_4(self):
b = """x = raw_input(foo(a) + 6)"""
a = """x = input(foo(a) + 6)"""
self.check(b, a)
def test_5(self):
b = """x = raw_input(invite).split()"""
a = """x = input(invite).split()"""
self.check(b, a)
def test_6(self):
b = """x = raw_input(invite) . split ()"""
a = """x = input(invite) . split ()"""
self.check(b, a)
def test_8(self):
b = "x = int(raw_input())"
a = "x = int(input())"
self.check(b, a)
class Test_funcattrs(FixerTestCase):
fixer = "funcattrs"
attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"]
def test(self):
for attr in self.attrs:
b = "a.func_%s" % attr
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.func_%s.foo_bar" % attr
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(func_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_xreadlines(FixerTestCase):
fixer = "xreadlines"
def test_call(self):
b = "for x in f.xreadlines(): pass"
a = "for x in f: pass"
self.check(b, a)
b = "for x in foo().xreadlines(): pass"
a = "for x in foo(): pass"
self.check(b, a)
b = "for x in (5 + foo()).xreadlines(): pass"
a = "for x in (5 + foo()): pass"
self.check(b, a)
def test_attr_ref(self):
b = "foo(f.xreadlines + 5)"
a = "foo(f.__iter__ + 5)"
self.check(b, a)
b = "foo(f().xreadlines + 5)"
a = "foo(f().__iter__ + 5)"
self.check(b, a)
b = "foo((5 + f()).xreadlines + 5)"
a = "foo((5 + f()).__iter__ + 5)"
self.check(b, a)
def test_unchanged(self):
s = "for x in f.xreadlines(5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(k=5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(*k, **v): pass"
self.unchanged(s)
s = "foo(xreadlines)"
self.unchanged(s)
class ImportsFixerTests:
def test_import_module(self):
for old, new in self.modules.items():
b = "import %s" % old
a = "import %s" % new
self.check(b, a)
b = "import foo, %s, bar" % old
a = "import foo, %s, bar" % new
self.check(b, a)
def test_import_from(self):
for old, new in self.modules.items():
b = "from %s import foo" % old
a = "from %s import foo" % new
self.check(b, a)
b = "from %s import foo, bar" % old
a = "from %s import foo, bar" % new
self.check(b, a)
b = "from %s import (yes, no)" % old
a = "from %s import (yes, no)" % new
self.check(b, a)
def test_import_module_as(self):
for old, new in self.modules.items():
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
def test_import_from_as(self):
for old, new in self.modules.items():
b = "from %s import foo as bar" % old
a = "from %s import foo as bar" % new
self.check(b, a)
def test_star(self):
for old, new in self.modules.items():
b = "from %s import *" % old
a = "from %s import *" % new
self.check(b, a)
def test_import_module_usage(self):
for old, new in self.modules.items():
b = """
import %s
foo(%s.bar)
""" % (old, old)
a = """
import %s
foo(%s.bar)
""" % (new, new)
self.check(b, a)
b = """
from %s import x
%s = 23
""" % (old, old)
a = """
from %s import x
%s = 23
""" % (new, old)
self.check(b, a)
s = """
def f():
%s.method()
""" % (old,)
self.unchanged(s)
# test nested usage
b = """
import %s
%s.bar(%s.foo)
""" % (old, old, old)
a = """
import %s
%s.bar(%s.foo)
""" % (new, new, new)
self.check(b, a)
b = """
import %s
x.%s
""" % (old, old)
a = """
import %s
x.%s
""" % (new, old)
self.check(b, a)
class Test_imports(FixerTestCase, ImportsFixerTests):
fixer = "imports"
from ..fixes.fix_imports import MAPPING as modules
def test_multiple_imports(self):
b = """import urlparse, cStringIO"""
a = """import urllib.parse, io"""
self.check(b, a)
def test_multiple_imports_as(self):
b = """
import copy_reg as bar, HTMLParser as foo, urlparse
s = urlparse.spam(bar.foo())
"""
a = """
import copyreg as bar, html.parser as foo, urllib.parse
s = urllib.parse.spam(bar.foo())
"""
self.check(b, a)
class Test_imports2(FixerTestCase, ImportsFixerTests):
fixer = "imports2"
from ..fixes.fix_imports2 import MAPPING as modules
class Test_imports_fixer_order(FixerTestCase, ImportsFixerTests):
def setUp(self):
super(Test_imports_fixer_order, self).setUp(['imports', 'imports2'])
from ..fixes.fix_imports2 import MAPPING as mapping2
self.modules = mapping2.copy()
from ..fixes.fix_imports import MAPPING as mapping1
for key in ('dbhash', 'dumbdbm', 'dbm', 'gdbm'):
self.modules[key] = mapping1[key]
def test_after_local_imports_refactoring(self):
for fix in ("imports", "imports2"):
self.fixer = fix
self.assert_runs_after("import")
class Test_urllib(FixerTestCase):
fixer = "urllib"
from ..fixes.fix_urllib import MAPPING as modules
def test_import_module(self):
for old, changes in self.modules.items():
b = "import %s" % old
a = "import %s" % ", ".join(map(itemgetter(0), changes))
self.check(b, a)
def test_import_from(self):
for old, changes in self.modules.items():
all_members = []
for new, members in changes:
for member in members:
all_members.append(member)
b = "from %s import %s" % (old, member)
a = "from %s import %s" % (new, member)
self.check(b, a)
s = "from foo import %s" % member
self.unchanged(s)
b = "from %s import %s" % (old, ", ".join(members))
a = "from %s import %s" % (new, ", ".join(members))
self.check(b, a)
s = "from foo import %s" % ", ".join(members)
self.unchanged(s)
# test the breaking of a module into multiple replacements
b = "from %s import %s" % (old, ", ".join(all_members))
a = "\n".join(["from %s import %s" % (new, ", ".join(members))
for (new, members) in changes])
self.check(b, a)
def test_import_module_as(self):
for old in self.modules:
s = "import %s as foo" % old
self.warns_unchanged(s, "This module is now multiple modules")
def test_import_from_as(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
b = "from %s import %s as foo_bar" % (old, member)
a = "from %s import %s as foo_bar" % (new, member)
self.check(b, a)
b = "from %s import %s as blah, %s" % (old, member, member)
a = "from %s import %s as blah, %s" % (new, member, member)
self.check(b, a)
def test_star(self):
for old in self.modules:
s = "from %s import *" % old
self.warns_unchanged(s, "Cannot handle star imports")
def test_indented(self):
b = """
def foo():
from urllib import urlencode, urlopen
"""
a = """
def foo():
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
b = """
def foo():
other()
from urllib import urlencode, urlopen
"""
a = """
def foo():
other()
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
def test_import_module_usage(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
new_import = ", ".join([n for (n, mems)
in self.modules[old]])
b = """
import %s
foo(%s.%s)
""" % (old, old, member)
a = """
import %s
foo(%s.%s)
""" % (new_import, new, member)
self.check(b, a)
b = """
import %s
%s.%s(%s.%s)
""" % (old, old, member, old, member)
a = """
import %s
%s.%s(%s.%s)
""" % (new_import, new, member, new, member)
self.check(b, a)
class Test_input(FixerTestCase):
fixer = "input"
def test_prefix_preservation(self):
b = """x = input( )"""
a = """x = eval(input( ))"""
self.check(b, a)
b = """x = input( '' )"""
a = """x = eval(input( '' ))"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = input() # foo"""
a = """x = eval(input()) # foo"""
self.check(b, a)
def test_idempotency(self):
s = """x = eval(input())"""
self.unchanged(s)
s = """x = eval(input(''))"""
self.unchanged(s)
s = """x = eval(input(foo(5) + 9))"""
self.unchanged(s)
def test_1(self):
b = """x = input()"""
a = """x = eval(input())"""
self.check(b, a)
def test_2(self):
b = """x = input('')"""
a = """x = eval(input(''))"""
self.check(b, a)
def test_3(self):
b = """x = input('prompt')"""
a = """x = eval(input('prompt'))"""
self.check(b, a)
def test_4(self):
b = """x = input(foo(5) + 9)"""
a = """x = eval(input(foo(5) + 9))"""
self.check(b, a)
class Test_tuple_params(FixerTestCase):
fixer = "tuple_params"
def test_unchanged_1(self):
s = """def foo(): pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """def foo(a, b, c): pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """def foo(a=3, b=4, c=5): pass"""
self.unchanged(s)
def test_1(self):
b = """
def foo(((a, b), c)):
x = 5"""
a = """
def foo(xxx_todo_changeme):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_2(self):
b = """
def foo(((a, b), c), d):
x = 5"""
a = """
def foo(xxx_todo_changeme, d):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_3(self):
b = """
def foo(((a, b), c), d) -> e:
x = 5"""
a = """
def foo(xxx_todo_changeme, d) -> e:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_semicolon(self):
b = """
def foo(((a, b), c)): x = 5; y = 7"""
a = """
def foo(xxx_todo_changeme): ((a, b), c) = xxx_todo_changeme; x = 5; y = 7"""
self.check(b, a)
def test_keywords(self):
b = """
def foo(((a, b), c), d, e=5) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, e=5) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_varargs(self):
b = """
def foo(((a, b), c), d, *vargs, **kwargs) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, *vargs, **kwargs) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_multi_1(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_multi_2(self):
b = """
def foo(x, ((a, b), c), d, (e, f, g), y) -> z:
x = 5"""
a = """
def foo(x, xxx_todo_changeme, d, xxx_todo_changeme1, y) -> z:
((a, b), c) = xxx_todo_changeme
(e, f, g) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_docstring(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
"foo foo foo foo"
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
"foo foo foo foo"
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_lambda_no_change(self):
s = """lambda x: x + 5"""
self.unchanged(s)
def test_lambda_parens_single_arg(self):
b = """lambda (x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda(x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda ((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
def test_lambda_simple(self):
b = """lambda (x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda (((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
def test_lambda_one_tuple(self):
b = """lambda (x,): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
b = """lambda (((x,))): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
def test_lambda_simple_multi_use(self):
b = """lambda (x, y): x + x + f(x) + x"""
a = """lambda x_y: x_y[0] + x_y[0] + f(x_y[0]) + x_y[0]"""
self.check(b, a)
def test_lambda_simple_reverse(self):
b = """lambda (x, y): y + x"""
a = """lambda x_y: x_y[1] + x_y[0]"""
self.check(b, a)
def test_lambda_nested(self):
b = """lambda (x, (y, z)): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
b = """lambda (((x, (y, z)))): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
def test_lambda_nested_multi_use(self):
b = """lambda (x, (y, z)): x + y + f(y)"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + f(x_y_z[1][0])"""
self.check(b, a)
class Test_methodattrs(FixerTestCase):
fixer = "methodattrs"
attrs = ["func", "self", "class"]
def test(self):
for attr in self.attrs:
b = "a.im_%s" % attr
if attr == "class":
a = "a.__self__.__class__"
else:
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.im_%s.foo_bar" % attr
if attr == "class":
a = "self.foo.__self__.__class__.foo_bar"
else:
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(im_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_next(FixerTestCase):
fixer = "next"
def test_1(self):
b = """it.next()"""
a = """next(it)"""
self.check(b, a)
def test_2(self):
b = """a.b.c.d.next()"""
a = """next(a.b.c.d)"""
self.check(b, a)
def test_3(self):
b = """(a + b).next()"""
a = """next((a + b))"""
self.check(b, a)
def test_4(self):
b = """a().next()"""
a = """next(a())"""
self.check(b, a)
def test_5(self):
b = """a().next() + b"""
a = """next(a()) + b"""
self.check(b, a)
def test_6(self):
b = """c( a().next() + b)"""
a = """c( next(a()) + b)"""
self.check(b, a)
def test_prefix_preservation_1(self):
b = """
for a in b:
foo(a)
a.next()
"""
a = """
for a in b:
foo(a)
next(a)
"""
self.check(b, a)
def test_prefix_preservation_2(self):
b = """
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
for a in b:
foo(a) # abc
# def
next(a)
"""
self.check(b, a)
def test_prefix_preservation_3(self):
b = """
next = 5
for a in b:
foo(a)
a.next()
"""
a = """
next = 5
for a in b:
foo(a)
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_4(self):
b = """
next = 5
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
next = 5
for a in b:
foo(a) # abc
# def
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_5(self):
b = """
next = 5
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
next = 5
for a in b:
foo(foo(a), # abc
a.__next__())
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_6(self):
b = """
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
for a in b:
foo(foo(a), # abc
next(a))
"""
self.check(b, a)
def test_method_1(self):
b = """
class A:
def next(self):
pass
"""
a = """
class A:
def __next__(self):
pass
"""
self.check(b, a)
def test_method_2(self):
b = """
class A(object):
def next(self):
pass
"""
a = """
class A(object):
def __next__(self):
pass
"""
self.check(b, a)
def test_method_3(self):
b = """
class A:
def next(x):
pass
"""
a = """
class A:
def __next__(x):
pass
"""
self.check(b, a)
def test_method_4(self):
b = """
class A:
def __init__(self, foo):
self.foo = foo
def next(self):
pass
def __iter__(self):
return self
"""
a = """
class A:
def __init__(self, foo):
self.foo = foo
def __next__(self):
pass
def __iter__(self):
return self
"""
self.check(b, a)
def test_method_unchanged(self):
s = """
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_assign_simple(self):
s = """
next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_1(self):
s = """
(next, a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_2(self):
s = """
(a, (b, (next, c)), a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_1(self):
s = """
[next, a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_2(self):
s = """
[a, [b, [next, c]], a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign(self):
s = """
def foo():
__builtin__.next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_tuple(self):
s = """
def foo():
(a, __builtin__.next) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_list(self):
s = """
def foo():
[a, __builtin__.next] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_assign_to_next(self):
s = """
def foo():
A.next = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_tuple(self):
s = """
def foo():
(a, A.next) = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_list(self):
s = """
def foo():
[a, A.next] = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_import_1(self):
s = """
import foo.bar as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_2(self):
s = """
import bar, bar.foo as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_3(self):
s = """
import bar, bar.foo as next, baz
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_1(self):
s = """
from x import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_2(self):
s = """
from x.a import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_3(self):
s = """
from x import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_4(self):
s = """
from x.a import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_1(self):
s = """
def next(a):
pass
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_2(self):
b = """
def next(a):
pass
class A:
def next(self):
pass
it.next()
"""
a = """
def next(a):
pass
class A:
def __next__(self):
pass
it.__next__()
"""
self.warns(b, a, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_1(self):
s = """
def f():
global next
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_2(self):
s = """
def f():
global a, next, b
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_simple(self):
s = """
for next in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_1(self):
s = """
for next, b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_2(self):
s = """
for a, (next, c), b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_noncall_access_1(self):
b = """gnext = g.next"""
a = """gnext = g.__next__"""
self.check(b, a)
def test_noncall_access_2(self):
b = """f(g.next + 5)"""
a = """f(g.__next__ + 5)"""
self.check(b, a)
def test_noncall_access_3(self):
b = """f(g().next + 5)"""
a = """f(g().__next__ + 5)"""
self.check(b, a)
class Test_nonzero(FixerTestCase):
fixer = "nonzero"
def test_1(self):
b = """
class A:
def __nonzero__(self):
pass
"""
a = """
class A:
def __bool__(self):
pass
"""
self.check(b, a)
def test_2(self):
b = """
class A(object):
def __nonzero__(self):
pass
"""
a = """
class A(object):
def __bool__(self):
pass
"""
self.check(b, a)
def test_unchanged_1(self):
s = """
class A(object):
def __bool__(self):
pass
"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
class A(object):
def __nonzero__(self, a):
pass
"""
self.unchanged(s)
def test_unchanged_func(self):
s = """
def __nonzero__(self):
pass
"""
self.unchanged(s)
class Test_numliterals(FixerTestCase):
fixer = "numliterals"
def test_octal_1(self):
b = """0755"""
a = """0o755"""
self.check(b, a)
def test_long_int_1(self):
b = """a = 12L"""
a = """a = 12"""
self.check(b, a)
def test_long_int_2(self):
b = """a = 12l"""
a = """a = 12"""
self.check(b, a)
def test_long_hex(self):
b = """b = 0x12l"""
a = """b = 0x12"""
self.check(b, a)
def test_comments_and_spacing(self):
b = """b = 0x12L"""
a = """b = 0x12"""
self.check(b, a)
b = """b = 0755 # spam"""
a = """b = 0o755 # spam"""
self.check(b, a)
def test_unchanged_int(self):
s = """5"""
self.unchanged(s)
def test_unchanged_float(self):
s = """5.0"""
self.unchanged(s)
def test_unchanged_octal(self):
s = """0o755"""
self.unchanged(s)
def test_unchanged_hex(self):
s = """0xABC"""
self.unchanged(s)
def test_unchanged_exp(self):
s = """5.0e10"""
self.unchanged(s)
def test_unchanged_complex_int(self):
s = """5 + 4j"""
self.unchanged(s)
def test_unchanged_complex_float(self):
s = """5.4 + 4.9j"""
self.unchanged(s)
def test_unchanged_complex_bare(self):
s = """4j"""
self.unchanged(s)
s = """4.4j"""
self.unchanged(s)
class Test_renames(FixerTestCase):
fixer = "renames"
modules = {"sys": ("maxint", "maxsize"),
}
def test_import_from(self):
for mod, (old, new) in self.modules.items():
b = "from %s import %s" % (mod, old)
a = "from %s import %s" % (mod, new)
self.check(b, a)
s = "from foo import %s" % old
self.unchanged(s)
def test_import_from_as(self):
for mod, (old, new) in self.modules.items():
b = "from %s import %s as foo_bar" % (mod, old)
a = "from %s import %s as foo_bar" % (mod, new)
self.check(b, a)
def test_import_module_usage(self):
for mod, (old, new) in self.modules.items():
b = """
import %s
foo(%s, %s.%s)
""" % (mod, mod, mod, old)
a = """
import %s
foo(%s, %s.%s)
""" % (mod, mod, mod, new)
self.check(b, a)
def XXX_test_from_import_usage(self):
# not implemented yet
for mod, (old, new) in self.modules.items():
b = """
from %s import %s
foo(%s, %s)
""" % (mod, old, mod, old)
a = """
from %s import %s
foo(%s, %s)
""" % (mod, new, mod, new)
self.check(b, a)
class Test_unicode(FixerTestCase):
fixer = "unicode"
def test_whitespace(self):
b = """unicode( x)"""
a = """str( x)"""
self.check(b, a)
b = """ unicode(x )"""
a = """ str(x )"""
self.check(b, a)
b = """ u'h'"""
a = """ 'h'"""
self.check(b, a)
def test_unicode_call(self):
b = """unicode(x, y, z)"""
a = """str(x, y, z)"""
self.check(b, a)
def test_unichr(self):
b = """unichr(u'h')"""
a = """chr('h')"""
self.check(b, a)
def test_unicode_literal_1(self):
b = '''u"x"'''
a = '''"x"'''
self.check(b, a)
def test_unicode_literal_2(self):
b = """ur'x'"""
a = """r'x'"""
self.check(b, a)
def test_unicode_literal_3(self):
b = """UR'''x''' """
a = """R'''x''' """
self.check(b, a)
def test_native_literal_escape_u(self):
b = """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """'\\\\\\\\u20ac\\\\U0001d121\\\\u20ac'"""
self.check(b, a)
b = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
def test_bytes_literal_escape_u(self):
b = """b'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """b'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
b = """br'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """br'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
def test_unicode_literal_escape_u(self):
b = """u'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
b = """ur'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
def test_native_unicode_literal_escape_u(self):
f = 'from __future__ import unicode_literals\n'
b = f + """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = f + """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
b = f + """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
a = f + """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
self.check(b, a)
class Test_callable(FixerTestCase):
fixer = "callable"
def test_prefix_preservation(self):
b = """callable( x)"""
a = """import collections\nisinstance( x, collections.Callable)"""
self.check(b, a)
b = """if callable(x): pass"""
a = """import collections
if isinstance(x, collections.Callable): pass"""
self.check(b, a)
def test_callable_call(self):
b = """callable(x)"""
a = """import collections\nisinstance(x, collections.Callable)"""
self.check(b, a)
def test_global_import(self):
b = """
def spam(foo):
callable(foo)"""[1:]
a = """
import collections
def spam(foo):
isinstance(foo, collections.Callable)"""[1:]
self.check(b, a)
b = """
import collections
def spam(foo):
callable(foo)"""[1:]
# same output if it was already imported
self.check(b, a)
b = """
from collections import *
def spam(foo):
callable(foo)"""[1:]
a = """
from collections import *
import collections
def spam(foo):
isinstance(foo, collections.Callable)"""[1:]
self.check(b, a)
b = """
do_stuff()
do_some_other_stuff()
assert callable(do_stuff)"""[1:]
a = """
import collections
do_stuff()
do_some_other_stuff()
assert isinstance(do_stuff, collections.Callable)"""[1:]
self.check(b, a)
b = """
if isinstance(do_stuff, Callable):
assert callable(do_stuff)
do_stuff(do_stuff)
if not callable(do_stuff):
exit(1)
else:
assert callable(do_stuff)
else:
assert not callable(do_stuff)"""[1:]
a = """
import collections
if isinstance(do_stuff, Callable):
assert isinstance(do_stuff, collections.Callable)
do_stuff(do_stuff)
if not isinstance(do_stuff, collections.Callable):
exit(1)
else:
assert isinstance(do_stuff, collections.Callable)
else:
assert not isinstance(do_stuff, collections.Callable)"""[1:]
self.check(b, a)
def test_callable_should_not_change(self):
a = """callable(*x)"""
self.unchanged(a)
a = """callable(x, y)"""
self.unchanged(a)
a = """callable(x, kw=y)"""
self.unchanged(a)
a = """callable()"""
self.unchanged(a)
class Test_filter(FixerTestCase):
fixer = "filter"
def test_prefix_preservation(self):
b = """x = filter( foo, 'abc' )"""
a = """x = list(filter( foo, 'abc' ))"""
self.check(b, a)
b = """x = filter( None , 'abc' )"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
def test_filter_basic(self):
b = """x = filter(None, 'abc')"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
b = """x = len(filter(f, 'abc'))"""
a = """x = len(list(filter(f, 'abc')))"""
self.check(b, a)
b = """x = filter(lambda x: x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
# Note the parens around x
b = """x = filter(lambda (x): x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
# XXX This (rare) case is not supported
## b = """x = filter(f, 'abc')[0]"""
## a = """x = list(filter(f, 'abc'))[0]"""
## self.check(b, a)
def test_filter_nochange(self):
a = """b.join(filter(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(filter(f, 'abc'))"""
self.unchanged(a)
a = """iter(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(filter(f, 'abc'))"""
self.unchanged(a)
a = """set(filter(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(filter(f, 'abc'))"""
self.unchanged(a)
a = """any(filter(f, 'abc'))"""
self.unchanged(a)
a = """all(filter(f, 'abc'))"""
self.unchanged(a)
a = """sum(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in filter(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in filter(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in filter(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, filter; filter(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam; x = filter(f, 'abc')"""
a = """from future_builtins import spam; x = list(filter(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; filter(f, 'ham')"
self.unchanged(a)
class Test_map(FixerTestCase):
fixer = "map"
def check(self, b, a):
self.unchanged("from future_builtins import map; " + b, a)
super(Test_map, self).check(b, a)
def test_prefix_preservation(self):
b = """x = map( f, 'abc' )"""
a = """x = list(map( f, 'abc' ))"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = map(f, 'abc') # foo"""
a = """x = list(map(f, 'abc')) # foo"""
self.check(b, a)
def test_None_with_multiple_arguments(self):
s = """x = map(None, a, b, c)"""
self.warns_unchanged(s, "cannot convert map(None, ...) with "
"multiple arguments")
def test_map_basic(self):
b = """x = map(f, 'abc')"""
a = """x = list(map(f, 'abc'))"""
self.check(b, a)
b = """x = len(map(f, 'abc', 'def'))"""
a = """x = len(list(map(f, 'abc', 'def')))"""
self.check(b, a)
b = """x = map(None, 'abc')"""
a = """x = list('abc')"""
self.check(b, a)
b = """x = map(lambda x: x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
# Note the parens around x
b = """x = map(lambda (x): x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
b = """
foo()
# foo
map(f, x)
"""
a = """
foo()
# foo
list(map(f, x))
"""
self.warns(b, a, "You should use a for loop here")
# XXX This (rare) case is not supported
## b = """x = map(f, 'abc')[0]"""
## a = """x = list(map(f, 'abc'))[0]"""
## self.check(b, a)
def test_map_nochange(self):
a = """b.join(map(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(map(f, 'abc'))"""
self.unchanged(a)
a = """iter(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(map(f, 'abc'))"""
self.unchanged(a)
a = """set(map(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(map(f, 'abc'))"""
self.unchanged(a)
a = """any(map(f, 'abc'))"""
self.unchanged(a)
a = """all(map(f, 'abc'))"""
self.unchanged(a)
a = """sum(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in map(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in map(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in map(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, map, eggs; map(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = map(f, 'abc')"""
a = """from future_builtins import spam, eggs; x = list(map(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; map(f, 'ham')"
self.unchanged(a)
class Test_zip(FixerTestCase):
fixer = "zip"
def check(self, b, a):
self.unchanged("from future_builtins import zip; " + b, a)
super(Test_zip, self).check(b, a)
def test_zip_basic(self):
b = """x = zip(a, b, c)"""
a = """x = list(zip(a, b, c))"""
self.check(b, a)
b = """x = len(zip(a, b))"""
a = """x = len(list(zip(a, b)))"""
self.check(b, a)
def test_zip_nochange(self):
a = """b.join(zip(a, b))"""
self.unchanged(a)
a = """(a + foo(5)).join(zip(a, b))"""
self.unchanged(a)
a = """iter(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))[0]"""
self.unchanged(a)
a = """set(zip(a, b))"""
self.unchanged(a)
a = """set(zip(a, b)).pop()"""
self.unchanged(a)
a = """tuple(zip(a, b))"""
self.unchanged(a)
a = """any(zip(a, b))"""
self.unchanged(a)
a = """all(zip(a, b))"""
self.unchanged(a)
a = """sum(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(zip(a, b))"""
self.unchanged(a)
a = """enumerate(zip(a, b), start=1)"""
self.unchanged(a)
a = """for i in zip(a, b): pass"""
self.unchanged(a)
a = """[x for x in zip(a, b)]"""
self.unchanged(a)
a = """(x for x in zip(a, b))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, zip, eggs; zip(a, b)"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = zip(a, b)"""
a = """from future_builtins import spam, eggs; x = list(zip(a, b))"""
self.check(b, a)
a = "from future_builtins import *; zip(a, b)"
self.unchanged(a)
class Test_standarderror(FixerTestCase):
fixer = "standarderror"
def test(self):
b = """x = StandardError()"""
a = """x = Exception()"""
self.check(b, a)
b = """x = StandardError(a, b, c)"""
a = """x = Exception(a, b, c)"""
self.check(b, a)
b = """f(2 + StandardError(a, b, c))"""
a = """f(2 + Exception(a, b, c))"""
self.check(b, a)
class Test_types(FixerTestCase):
fixer = "types"
def test_basic_types_convert(self):
b = """types.StringType"""
a = """bytes"""
self.check(b, a)
b = """types.DictType"""
a = """dict"""
self.check(b, a)
b = """types . IntType"""
a = """int"""
self.check(b, a)
b = """types.ListType"""
a = """list"""
self.check(b, a)
b = """types.LongType"""
a = """int"""
self.check(b, a)
b = """types.NoneType"""
a = """type(None)"""
self.check(b, a)
b = "types.StringTypes"
a = "(str,)"
self.check(b, a)
class Test_idioms(FixerTestCase):
fixer = "idioms"
def test_while(self):
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """
while 1:
foo()
"""
a = """
while True:
foo()
"""
self.check(b, a)
def test_while_unchanged(self):
s = """while 11: foo()"""
self.unchanged(s)
s = """while 0: foo()"""
self.unchanged(s)
s = """while foo(): foo()"""
self.unchanged(s)
s = """while []: foo()"""
self.unchanged(s)
def test_eq_simple(self):
b = """type(x) == T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) == T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_reverse(self):
b = """T == type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T == type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_expression(self):
b = """type(x+y) == d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) == d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_simple(self):
b = """type(x) is T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_reverse(self):
b = """T is type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T is type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_expression(self):
b = """type(x+y) is d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_not_simple(self):
b = """type(x) is not T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is not T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_reverse(self):
b = """T is not type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T is not type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_expression(self):
b = """type(x+y) is not d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is not d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_ne_simple(self):
b = """type(x) != T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) != T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_reverse(self):
b = """T != type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T != type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_expression(self):
b = """type(x+y) != d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) != d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_type_unchanged(self):
a = """type(x).__name__"""
self.unchanged(a)
def test_sort_list_call(self):
b = """
v = list(t)
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(foo(b) + d)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = list(t)
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(t)
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = list( t)
v.sort()
foo(v)
"""
a = r"""
v = sorted( t)
foo(v)
"""
self.check(b, a)
b = r"""
try:
m = list(s)
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
except: pass
"""
self.check(b, a)
b = r"""
try:
m = list(s)
# foo
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
# foo
except: pass
"""
self.check(b, a)
b = r"""
m = list(s)
# more comments
m.sort()"""
a = r"""
m = sorted(s)
# more comments"""
self.check(b, a)
def test_sort_simple_expr(self):
b = """
v = t
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = foo(b)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b))
foo(v)
"""
self.check(b, a)
b = """
v = b.keys()
v.sort()
foo(v)
"""
a = """
v = sorted(b.keys())
foo(v)
"""
self.check(b, a)
b = """
v = foo(b) + d
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = t
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = t
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = t
v.sort()
foo(v)
"""
a = r"""
v = sorted(t)
foo(v)
"""
self.check(b, a)
def test_sort_unchanged(self):
s = """
v = list(t)
w.sort()
foo(w)
"""
self.unchanged(s)
s = """
v = list(t)
v.sort(u)
foo(v)
"""
self.unchanged(s)
class Test_basestring(FixerTestCase):
fixer = "basestring"
def test_basestring(self):
b = """isinstance(x, basestring)"""
a = """isinstance(x, str)"""
self.check(b, a)
class Test_buffer(FixerTestCase):
fixer = "buffer"
def test_buffer(self):
b = """x = buffer(y)"""
a = """x = memoryview(y)"""
self.check(b, a)
def test_slicing(self):
b = """buffer(y)[4:5]"""
a = """memoryview(y)[4:5]"""
self.check(b, a)
class Test_future(FixerTestCase):
fixer = "future"
def test_future(self):
b = """from __future__ import braces"""
a = """"""
self.check(b, a)
b = """# comment\nfrom __future__ import braces"""
a = """# comment\n"""
self.check(b, a)
b = """from __future__ import braces\n# comment"""
a = """\n# comment"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after('print')
class Test_itertools(FixerTestCase):
fixer = "itertools"
def checkall(self, before, after):
# Because we need to check with and without the itertools prefix
# and on each of the three functions, these loops make it all
# much easier
for i in ('itertools.', ''):
for f in ('map', 'filter', 'zip'):
b = before %(i+'i'+f)
a = after %(f)
self.check(b, a)
def test_0(self):
# A simple example -- test_1 covers exactly the same thing,
# but it's not quite as clear.
b = "itertools.izip(a, b)"
a = "zip(a, b)"
self.check(b, a)
def test_1(self):
b = """%s(f, a)"""
a = """%s(f, a)"""
self.checkall(b, a)
def test_qualified(self):
b = """itertools.ifilterfalse(a, b)"""
a = """itertools.filterfalse(a, b)"""
self.check(b, a)
b = """itertools.izip_longest(a, b)"""
a = """itertools.zip_longest(a, b)"""
self.check(b, a)
def test_2(self):
b = """ifilterfalse(a, b)"""
a = """filterfalse(a, b)"""
self.check(b, a)
b = """izip_longest(a, b)"""
a = """zip_longest(a, b)"""
self.check(b, a)
def test_space_1(self):
b = """ %s(f, a)"""
a = """ %s(f, a)"""
self.checkall(b, a)
def test_space_2(self):
b = """ itertools.ifilterfalse(a, b)"""
a = """ itertools.filterfalse(a, b)"""
self.check(b, a)
b = """ itertools.izip_longest(a, b)"""
a = """ itertools.zip_longest(a, b)"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after('map', 'zip', 'filter')
class Test_itertools_imports(FixerTestCase):
fixer = 'itertools_imports'
def test_reduced(self):
b = "from itertools import imap, izip, foo"
a = "from itertools import foo"
self.check(b, a)
b = "from itertools import bar, imap, izip, foo"
a = "from itertools import bar, foo"
self.check(b, a)
b = "from itertools import chain, imap, izip"
a = "from itertools import chain"
self.check(b, a)
def test_comments(self):
b = "#foo\nfrom itertools import imap, izip"
a = "#foo\n"
self.check(b, a)
def test_none(self):
b = "from itertools import imap, izip"
a = ""
self.check(b, a)
b = "from itertools import izip"
a = ""
self.check(b, a)
def test_import_as(self):
b = "from itertools import izip, bar as bang, imap"
a = "from itertools import bar as bang"
self.check(b, a)
b = "from itertools import izip as _zip, imap, bar"
a = "from itertools import bar"
self.check(b, a)
b = "from itertools import imap as _map"
a = ""
self.check(b, a)
b = "from itertools import imap as _map, izip as _zip"
a = ""
self.check(b, a)
s = "from itertools import bar as bang"
self.unchanged(s)
def test_ifilter_and_zip_longest(self):
for name in "filterfalse", "zip_longest":
b = "from itertools import i%s" % (name,)
a = "from itertools import %s" % (name,)
self.check(b, a)
b = "from itertools import imap, i%s, foo" % (name,)
a = "from itertools import %s, foo" % (name,)
self.check(b, a)
b = "from itertools import bar, i%s, foo" % (name,)
a = "from itertools import bar, %s, foo" % (name,)
self.check(b, a)
def test_import_star(self):
s = "from itertools import *"
self.unchanged(s)
def test_unchanged(self):
s = "from itertools import foo"
self.unchanged(s)
class Test_import(FixerTestCase):
fixer = "import"
def setUp(self):
super(Test_import, self).setUp()
# Need to replace fix_import's exists method
# so we can check that it's doing the right thing
self.files_checked = []
self.present_files = set()
self.always_exists = True
def fake_exists(name):
self.files_checked.append(name)
return self.always_exists or (name in self.present_files)
from lib2to3.fixes import fix_import
fix_import.exists = fake_exists
def tearDown(self):
from lib2to3.fixes import fix_import
fix_import.exists = os.path.exists
def check_both(self, b, a):
self.always_exists = True
super(Test_import, self).check(b, a)
self.always_exists = False
super(Test_import, self).unchanged(b)
def test_files_checked(self):
def p(path):
# Takes a unix path and returns a path with correct separators
return os.path.pathsep.join(path.split("/"))
self.always_exists = False
self.present_files = set(['__init__.py'])
expected_extensions = ('.py', os.path.sep, '.pyc', '.so', '.sl', '.pyd')
names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py"))
for name in names_to_test:
self.files_checked = []
self.filename = name
self.unchanged("import jam")
if os.path.dirname(name):
name = os.path.dirname(name) + '/jam'
else:
name = 'jam'
expected_checks = set(name + ext for ext in expected_extensions)
expected_checks.add("__init__.py")
self.assertEqual(set(self.files_checked), expected_checks)
def test_not_in_package(self):
s = "import bar"
self.always_exists = False
self.present_files = set(["bar.py"])
self.unchanged(s)
def test_with_absolute_import_enabled(self):
s = "from __future__ import absolute_import\nimport bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.unchanged(s)
def test_in_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.check(b, a)
def test_import_from_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar" + os.path.sep])
self.check(b, a)
def test_already_relative_import(self):
s = "from . import bar"
self.unchanged(s)
def test_comments_and_indent(self):
b = "import bar # Foo"
a = "from . import bar # Foo"
self.check(b, a)
def test_from(self):
b = "from foo import bar, baz"
a = "from .foo import bar, baz"
self.check_both(b, a)
b = "from foo import bar"
a = "from .foo import bar"
self.check_both(b, a)
b = "from foo import (bar, baz)"
a = "from .foo import (bar, baz)"
self.check_both(b, a)
def test_dotted_from(self):
b = "from green.eggs import ham"
a = "from .green.eggs import ham"
self.check_both(b, a)
def test_from_as(self):
b = "from green.eggs import ham as spam"
a = "from .green.eggs import ham as spam"
self.check_both(b, a)
def test_import(self):
b = "import foo"
a = "from . import foo"
self.check_both(b, a)
b = "import foo, bar"
a = "from . import foo, bar"
self.check_both(b, a)
b = "import foo, bar, x"
a = "from . import foo, bar, x"
self.check_both(b, a)
b = "import x, y, z"
a = "from . import x, y, z"
self.check_both(b, a)
def test_import_as(self):
b = "import foo as x"
a = "from . import foo as x"
self.check_both(b, a)
b = "import a as b, b as c, c as d"
a = "from . import a as b, b as c, c as d"
self.check_both(b, a)
def test_local_and_absolute(self):
self.always_exists = False
self.present_files = set(["foo.py", "__init__.py"])
s = "import foo, bar"
self.warns_unchanged(s, "absolute and local imports together")
def test_dotted_import(self):
b = "import foo.bar"
a = "from . import foo.bar"
self.check_both(b, a)
def test_dotted_import_as(self):
b = "import foo.bar as bang"
a = "from . import foo.bar as bang"
self.check_both(b, a)
def test_prefix(self):
b = """
# prefix
import foo.bar
"""
a = """
# prefix
from . import foo.bar
"""
self.check_both(b, a)
class Test_set_literal(FixerTestCase):
fixer = "set_literal"
def test_basic(self):
b = """set([1, 2, 3])"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1, 2, 3))"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1,))"""
a = """{1}"""
self.check(b, a)
b = """set([1])"""
self.check(b, a)
b = """set((a, b))"""
a = """{a, b}"""
self.check(b, a)
b = """set([a, b])"""
self.check(b, a)
b = """set((a*234, f(args=23)))"""
a = """{a*234, f(args=23)}"""
self.check(b, a)
b = """set([a*23, f(23)])"""
a = """{a*23, f(23)}"""
self.check(b, a)
b = """set([a-234**23])"""
a = """{a-234**23}"""
self.check(b, a)
def test_listcomps(self):
b = """set([x for x in y])"""
a = """{x for x in y}"""
self.check(b, a)
b = """set([x for x in y if x == m])"""
a = """{x for x in y if x == m}"""
self.check(b, a)
b = """set([x for x in y for a in b])"""
a = """{x for x in y for a in b}"""
self.check(b, a)
b = """set([f(x) - 23 for x in y])"""
a = """{f(x) - 23 for x in y}"""
self.check(b, a)
def test_whitespace(self):
b = """set( [1, 2])"""
a = """{1, 2}"""
self.check(b, a)
b = """set([1 , 2])"""
a = """{1 , 2}"""
self.check(b, a)
b = """set([ 1 ])"""
a = """{ 1 }"""
self.check(b, a)
b = """set( [1] )"""
a = """{1}"""
self.check(b, a)
b = """set([ 1, 2 ])"""
a = """{ 1, 2 }"""
self.check(b, a)
b = """set([x for x in y ])"""
a = """{x for x in y }"""
self.check(b, a)
b = """set(
[1, 2]
)
"""
a = """{1, 2}\n"""
self.check(b, a)
def test_comments(self):
b = """set((1, 2)) # Hi"""
a = """{1, 2} # Hi"""
self.check(b, a)
# This isn't optimal behavior, but the fixer is optional.
b = """
# Foo
set( # Bar
(1, 2)
)
"""
a = """
# Foo
{1, 2}
"""
self.check(b, a)
def test_unchanged(self):
s = """set()"""
self.unchanged(s)
s = """set(a)"""
self.unchanged(s)
s = """set(a, b, c)"""
self.unchanged(s)
# Don't transform generators because they might have to be lazy.
s = """set(x for x in y)"""
self.unchanged(s)
s = """set(x for x in y if z)"""
self.unchanged(s)
s = """set(a*823-23**2 + f(23))"""
self.unchanged(s)
class Test_sys_exc(FixerTestCase):
fixer = "sys_exc"
def test_0(self):
b = "sys.exc_type"
a = "sys.exc_info()[0]"
self.check(b, a)
def test_1(self):
b = "sys.exc_value"
a = "sys.exc_info()[1]"
self.check(b, a)
def test_2(self):
b = "sys.exc_traceback"
a = "sys.exc_info()[2]"
self.check(b, a)
def test_3(self):
b = "sys.exc_type # Foo"
a = "sys.exc_info()[0] # Foo"
self.check(b, a)
def test_4(self):
b = "sys. exc_type"
a = "sys. exc_info()[0]"
self.check(b, a)
def test_5(self):
b = "sys .exc_type"
a = "sys .exc_info()[0]"
self.check(b, a)
class Test_paren(FixerTestCase):
fixer = "paren"
def test_0(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_1(self):
b = """[i for i in 1, 2, ]"""
a = """[i for i in (1, 2,) ]"""
self.check(b, a)
def test_2(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_3(self):
b = """[i for i in 1, 2 if i]"""
a = """[i for i in (1, 2) if i]"""
self.check(b, a)
def test_4(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_5(self):
b = """(i for i in 1, 2)"""
a = """(i for i in (1, 2))"""
self.check(b, a)
def test_6(self):
b = """(i for i in 1 ,2 if i)"""
a = """(i for i in (1 ,2) if i)"""
self.check(b, a)
def test_unchanged_0(self):
s = """[i for i in (1, 2)]"""
self.unchanged(s)
def test_unchanged_1(self):
s = """[i for i in foo()]"""
self.unchanged(s)
def test_unchanged_2(self):
s = """[i for i in (1, 2) if nothing]"""
self.unchanged(s)
def test_unchanged_3(self):
s = """(i for i in (1, 2))"""
self.unchanged(s)
def test_unchanged_4(self):
s = """[i for i in m]"""
self.unchanged(s)
class Test_metaclass(FixerTestCase):
fixer = 'metaclass'
def test_unchanged(self):
self.unchanged("class X(): pass")
self.unchanged("class X(object): pass")
self.unchanged("class X(object1, object2): pass")
self.unchanged("class X(object1, object2, object3): pass")
self.unchanged("class X(metaclass=Meta): pass")
self.unchanged("class X(b, arg=23, metclass=Meta): pass")
self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass")
s = """
class X:
def __metaclass__(self): pass
"""
self.unchanged(s)
s = """
class X:
a[23] = 74
"""
self.unchanged(s)
def test_comments(self):
b = """
class X:
# hi
__metaclass__ = AppleMeta
"""
a = """
class X(metaclass=AppleMeta):
# hi
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
# Bedtime!
"""
a = """
class X(metaclass=Meta):
pass
# Bedtime!
"""
self.check(b, a)
def test_meta(self):
# no-parent class, odd body
b = """
class X():
__metaclass__ = Q
pass
"""
a = """
class X(metaclass=Q):
pass
"""
self.check(b, a)
# one parent class, no body
b = """class X(object): __metaclass__ = Q"""
a = """class X(object, metaclass=Q): pass"""
self.check(b, a)
# one parent, simple body
b = """
class X(object):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta; x = 4; g = 23
"""
a = """
class X(metaclass=Meta):
x = 4; g = 23
"""
self.check(b, a)
# one parent, simple body, __metaclass__ last
b = """
class X(object):
bar = 7
__metaclass__ = Meta
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# redefining __metaclass__
b = """
class X():
__metaclass__ = A
__metaclass__ = B
bar = 7
"""
a = """
class X(metaclass=B):
bar = 7
"""
self.check(b, a)
# multiple inheritance, simple body
b = """
class X(clsA, clsB):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(clsA, clsB, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# keywords in the class statement
b = """class m(a, arg=23): __metaclass__ = Meta"""
a = """class m(a, arg=23, metaclass=Meta): pass"""
self.check(b, a)
b = """
class X(expression(2 + 4)):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X(expression(2 + 4), x**4):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), x**4, metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
save.py = 23
"""
a = """
class X(metaclass=Meta):
save.py = 23
"""
self.check(b, a)
class Test_getcwdu(FixerTestCase):
fixer = 'getcwdu'
def test_basic(self):
b = """os.getcwdu"""
a = """os.getcwd"""
self.check(b, a)
b = """os.getcwdu()"""
a = """os.getcwd()"""
self.check(b, a)
b = """meth = os.getcwdu"""
a = """meth = os.getcwd"""
self.check(b, a)
b = """os.getcwdu(args)"""
a = """os.getcwd(args)"""
self.check(b, a)
def test_comment(self):
b = """os.getcwdu() # Foo"""
a = """os.getcwd() # Foo"""
self.check(b, a)
def test_unchanged(self):
s = """os.getcwd()"""
self.unchanged(s)
s = """getcwdu()"""
self.unchanged(s)
s = """os.getcwdb()"""
self.unchanged(s)
def test_indentation(self):
b = """
if 1:
os.getcwdu()
"""
a = """
if 1:
os.getcwd()
"""
self.check(b, a)
def test_multilation(self):
b = """os .getcwdu()"""
a = """os .getcwd()"""
self.check(b, a)
b = """os. getcwdu"""
a = """os. getcwd"""
self.check(b, a)
b = """os.getcwdu ( )"""
a = """os.getcwd ( )"""
self.check(b, a)
class Test_operator(FixerTestCase):
fixer = "operator"
def test_operator_isCallable(self):
b = "operator.isCallable(x)"
a = "hasattr(x, '__call__')"
self.check(b, a)
def test_operator_sequenceIncludes(self):
b = "operator.sequenceIncludes(x, y)"
a = "operator.contains(x, y)"
self.check(b, a)
b = "operator .sequenceIncludes(x, y)"
a = "operator .contains(x, y)"
self.check(b, a)
b = "operator. sequenceIncludes(x, y)"
a = "operator. contains(x, y)"
self.check(b, a)
def test_operator_isSequenceType(self):
b = "operator.isSequenceType(x)"
a = "import collections\nisinstance(x, collections.Sequence)"
self.check(b, a)
def test_operator_isMappingType(self):
b = "operator.isMappingType(x)"
a = "import collections\nisinstance(x, collections.Mapping)"
self.check(b, a)
def test_operator_isNumberType(self):
b = "operator.isNumberType(x)"
a = "import numbers\nisinstance(x, numbers.Number)"
self.check(b, a)
def test_operator_repeat(self):
b = "operator.repeat(x, n)"
a = "operator.mul(x, n)"
self.check(b, a)
b = "operator .repeat(x, n)"
a = "operator .mul(x, n)"
self.check(b, a)
b = "operator. repeat(x, n)"
a = "operator. mul(x, n)"
self.check(b, a)
def test_operator_irepeat(self):
b = "operator.irepeat(x, n)"
a = "operator.imul(x, n)"
self.check(b, a)
b = "operator .irepeat(x, n)"
a = "operator .imul(x, n)"
self.check(b, a)
b = "operator. irepeat(x, n)"
a = "operator. imul(x, n)"
self.check(b, a)
def test_bare_isCallable(self):
s = "isCallable(x)"
t = "You should use 'hasattr(x, '__call__')' here."
self.warns_unchanged(s, t)
def test_bare_sequenceIncludes(self):
s = "sequenceIncludes(x, y)"
t = "You should use 'operator.contains(x, y)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isSequenceType(self):
s = "isSequenceType(z)"
t = "You should use 'isinstance(z, collections.Sequence)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isMappingType(self):
s = "isMappingType(x)"
t = "You should use 'isinstance(x, collections.Mapping)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isNumberType(self):
s = "isNumberType(y)"
t = "You should use 'isinstance(y, numbers.Number)' here."
self.warns_unchanged(s, t)
def test_bare_operator_repeat(self):
s = "repeat(x, n)"
t = "You should use 'operator.mul(x, n)' here."
self.warns_unchanged(s, t)
def test_bare_operator_irepeat(self):
s = "irepeat(y, 187)"
t = "You should use 'operator.imul(y, 187)' here."
self.warns_unchanged(s, t)
class Test_exitfunc(FixerTestCase):
fixer = "exitfunc"
def test_simple(self):
b = """
import sys
sys.exitfunc = my_atexit
"""
a = """
import sys
import atexit
atexit.register(my_atexit)
"""
self.check(b, a)
def test_names_import(self):
b = """
import sys, crumbs
sys.exitfunc = my_func
"""
a = """
import sys, crumbs, atexit
atexit.register(my_func)
"""
self.check(b, a)
def test_complex_expression(self):
b = """
import sys
sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression
"""
a = """
import sys
import atexit
atexit.register(do(d)/a()+complex(f=23, g=23)*expression)
"""
self.check(b, a)
def test_comments(self):
b = """
import sys # Foo
sys.exitfunc = f # Blah
"""
a = """
import sys
import atexit # Foo
atexit.register(f) # Blah
"""
self.check(b, a)
b = """
import apples, sys, crumbs, larry # Pleasant comments
sys.exitfunc = func
"""
a = """
import apples, sys, crumbs, larry, atexit # Pleasant comments
atexit.register(func)
"""
self.check(b, a)
def test_in_a_function(self):
b = """
import sys
def f():
sys.exitfunc = func
"""
a = """
import sys
import atexit
def f():
atexit.register(func)
"""
self.check(b, a)
def test_no_sys_import(self):
b = """sys.exitfunc = f"""
a = """atexit.register(f)"""
msg = ("Can't find sys import; Please add an atexit import at the "
"top of your file.")
self.warns(b, a, msg)
def test_unchanged(self):
s = """f(sys.exitfunc)"""
self.unchanged(s)
class Test_asserts(FixerTestCase):
fixer = "asserts"
def test_deprecated_names(self):
tests = [
('self.assert_(True)', 'self.assertTrue(True)'),
('self.assertEquals(2, 2)', 'self.assertEqual(2, 2)'),
('self.assertNotEquals(2, 3)', 'self.assertNotEqual(2, 3)'),
('self.assertAlmostEquals(2, 3)', 'self.assertAlmostEqual(2, 3)'),
('self.assertNotAlmostEquals(2, 8)', 'self.assertNotAlmostEqual(2, 8)'),
('self.failUnlessEqual(2, 2)', 'self.assertEqual(2, 2)'),
('self.failIfEqual(2, 3)', 'self.assertNotEqual(2, 3)'),
('self.failUnlessAlmostEqual(2, 3)', 'self.assertAlmostEqual(2, 3)'),
('self.failIfAlmostEqual(2, 8)', 'self.assertNotAlmostEqual(2, 8)'),
('self.failUnless(True)', 'self.assertTrue(True)'),
('self.failUnlessRaises(foo)', 'self.assertRaises(foo)'),
('self.failIf(False)', 'self.assertFalse(False)'),
]
for b, a in tests:
self.check(b, a)
def test_variants(self):
b = 'eq = self.assertEquals'
a = 'eq = self.assertEqual'
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail")'
a = 'self.assertEqual(2, 3, msg="fail")'
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail") # foo'
a = 'self.assertEqual(2, 3, msg="fail") # foo'
self.check(b, a)
b = 'self.assertEquals (2, 3)'
a = 'self.assertEqual (2, 3)'
self.check(b, a)
b = ' self.assertEquals (2, 3)'
a = ' self.assertEqual (2, 3)'
self.check(b, a)
b = 'with self.failUnlessRaises(Explosion): explode()'
a = 'with self.assertRaises(Explosion): explode()'
self.check(b, a)
b = 'with self.failUnlessRaises(Explosion) as cm: explode()'
a = 'with self.assertRaises(Explosion) as cm: explode()'
self.check(b, a)
def test_unchanged(self):
self.unchanged('self.assertEqualsOnSaturday')
self.unchanged('self.assertEqualsOnSaturday(3, 5)')
|
linuxwhatelse/plugin.audio.linuxwhatelse.gmusic
|
resources/libs/lib-sys/lib2to3/lib2to3/tests/test_fixers.py
|
Python
|
gpl-3.0
| 122,292
|
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
import os
from invenio.bibdocfile import BibRecDocs, decompose_file, normalize_format
def Move_Files_Archive(parameters, curdir, form, user_info=None):
"""DEPRECATED: Use FFT instead."""
MainDir = "%s/files/MainFiles" % curdir
IncludeDir = "%s/files/AdditionalFiles" % curdir
watcheddirs = {'Main' : MainDir, 'Additional' : IncludeDir}
for type, dir in watcheddirs.iteritems():
if os.path.exists(dir):
formats = {}
files = os.listdir(dir)
files.sort()
for file in files:
dummy, filename, extension = decompose_file(file)
if not formats.has_key(filename):
formats[filename] = []
formats[filename].append(normalize_format(extension))
# first delete all missing files
bibarchive = BibRecDocs(sysno)
existingBibdocs = bibarchive.list_bibdocs(type)
for existingBibdoc in existingBibdocs:
if not formats.has_key(bibarchive.get_docname(existingBibdoc.id)):
existingBibdoc.delete()
# then create/update the new ones
for key in formats.keys():
# instanciate bibdoc object
bibarchive.add_new_file('%s/%s%s' % (dir, key, formats[key]), doctype=type, never_fail=True)
return ""
|
jmacmahon/invenio
|
modules/websubmit/lib/functions/Move_Files_Archive.py
|
Python
|
gpl-2.0
| 2,161
|
#!/usr/bin/env python3
from struct import pack, unpack
from datetime import date
from pathlib import Path
import os.path
import argparse
import sys
import re
configFilename = 'openmw.cfg'
configPaths = { 'linux': '~/.config/openmw',
'freebsd': '~/.config/openmw',
'darwin': '~/Library/Preferences/openmw' }
modPaths = { 'linux': '~/.local/share/openmw/data',
'freebsd': '~/.local/share/openmw/data',
'darwin': '~/Library/Application Support/openmw/data' }
def packLong(i):
# little-endian, "standard" 4-bytes (old 32-bit systems)
return pack('<l', i)
def packString(s):
return bytes(s, 'ascii')
def packPaddedString(s, l):
bs = bytes(s, 'ascii')
if len(bs) > l:
# still need to null-terminate
return bs[:(l-1)] + bytes(1)
else:
return bs + bytes(l - len(bs))
def parseString(ba):
i = ba.find(0)
return ba[:i].decode(encoding='ascii', errors='ignore')
def parseNum(ba):
return int.from_bytes(ba, 'little')
def parseFloat(ba):
return unpack('f', ba)[0]
def parseLEV(rec):
levrec = {}
sr = rec['subrecords']
levrec['type'] = rec['type']
levrec['name'] = parseString(sr[0]['data'])
levrec['calcfrom'] = parseNum(sr[1]['data'])
levrec['chancenone'] = parseNum(sr[2]['data'])
levrec['file'] = os.path.basename(rec['fullpath'])
# Apparently, you can have LEV records that end before
# the INDX subrecord. Found those in Tamriel_Data.esm
if len(sr) > 3:
listcount = parseNum(sr[3]['data'])
listitems = []
for i in range(0,listcount*2,2):
itemid = parseString(sr[4+i]['data'])
itemlvl = parseNum(sr[5+i]['data'])
listitems.append((itemlvl, itemid))
levrec['items'] = listitems
else:
levrec['items'] = []
return levrec
def parseTES3(rec):
tesrec = {}
sr = rec['subrecords']
tesrec['version'] = parseFloat(sr[0]['data'][0:4])
tesrec['filetype'] = parseNum(sr[0]['data'][4:8])
tesrec['author'] = parseString(sr[0]['data'][8:40])
tesrec['desc'] = parseString(sr[0]['data'][40:296])
tesrec['numrecords'] = parseNum(sr[0]['data'][296:300])
masters = []
for i in range(1, len(sr), 2):
mastfile = parseString(sr[i]['data'])
mastsize = parseNum(sr[i+1]['data'])
masters.append((mastfile, mastsize))
tesrec['masters'] = masters
return tesrec
def pullSubs(rec, subtype):
return [ s for s in rec['subrecords'] if s['type'] == subtype ]
def readHeader(ba):
header = {}
header['type'] = ba[0:4].decode()
header['length'] = int.from_bytes(ba[4:8], 'little')
return header
def readSubRecord(ba):
sr = {}
sr['type'] = ba[0:4].decode()
sr['length'] = int.from_bytes(ba[4:8], 'little')
endbyte = 8 + sr['length']
sr['data'] = ba[8:endbyte]
return (sr, ba[endbyte:])
def readRecords(filename):
fh = open(filename, 'rb')
while True:
headerba = fh.read(16)
if headerba is None or len(headerba) < 16:
return None
record = {}
header = readHeader(headerba)
record['type'] = header['type']
record['length'] = header['length']
record['subrecords'] = []
# stash the filename here (a bit hacky, but useful)
record['fullpath'] = filename
remains = fh.read(header['length'])
while len(remains) > 0:
(subrecord, restofbytes) = readSubRecord(remains)
record['subrecords'].append(subrecord)
remains = restofbytes
yield record
def oldGetRecords(filename, rectype):
return ( r for r in readRecords(filename) if r['type'] == rectype )
def getRecords(filename, rectypes):
numtypes = len(rectypes)
retval = [ [] for x in range(numtypes) ]
for r in readRecords(filename):
if r['type'] in rectypes:
for i in range(numtypes):
if r['type'] == rectypes[i]:
retval[i].append(r)
return retval
def packStringSubRecord(lbl, strval):
str_bs = packString(strval) + bytes(1)
l = packLong(len(str_bs))
return packString(lbl) + l + str_bs
def packIntSubRecord(lbl, num, numsize=4):
# This is interesting. The 'pack' function from struct works fine like this:
#
# >>> pack('<l', 200)
# b'\xc8\x00\x00\x00'
#
# but breaks if you make that format string a non-literal:
#
# >>> fs = '<l'
# >>> pack(fs, 200)
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# struct.error: repeat count given without format specifier
#
# This is as of Python 3.5.2
num_bs = b''
if numsize == 4:
# "standard" 4-byte longs, little-endian
num_bs = pack('<l', num)
elif numsize == 2:
num_bs = pack('<h', num)
elif numsize == 1:
# don't think endian-ness matters for bytes, but consistency
num_bs = pack('<b', num)
elif numsize == 8:
num_bs = pack('<q', num)
return packString(lbl) + packLong(numsize) + num_bs
def packLEV(rec):
start_bs = b''
id_bs = b''
if rec['type'] == 'LEVC':
start_bs += b'LEVC'
id_bs = 'CNAM'
else:
start_bs += b'LEVI'
id_bs = 'INAM'
headerflags_bs = bytes(8)
name_bs = packStringSubRecord('NAME', rec['name'])
calcfrom_bs = packIntSubRecord('DATA', rec['calcfrom'])
chance_bs = packIntSubRecord('NNAM', rec['chancenone'], 1)
subrec_bs = packIntSubRecord('INDX', len(rec['items']))
for (lvl, lid) in rec['items']:
subrec_bs += packStringSubRecord(id_bs, lid)
subrec_bs += packIntSubRecord('INTV', lvl, 2)
reclen = len(name_bs) + len(calcfrom_bs) + len(chance_bs) + len(subrec_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
name_bs + calcfrom_bs + chance_bs + subrec_bs
def packTES3(desc, numrecs, masters):
start_bs = b'TES3'
headerflags_bs = bytes(8)
hedr_bs = b'HEDR' + packLong(300)
version_bs = pack('<f', 1.0)
# .esp == 0, .esm == 1, .ess == 32
# suprisingly, .omwaddon == 0, also -- figured it would have its own
ftype_bs = bytes(4)
author_bs = packPaddedString('omwllf, copyright 2017, jmelesky', 32)
desc_bs = packPaddedString(desc, 256)
numrecs_bs = packLong(numrecs)
masters_bs = b''
for (m, s) in masters:
masters_bs += packStringSubRecord('MAST', m)
masters_bs += packIntSubRecord('DATA', s, 8)
reclen = len(hedr_bs) + len(version_bs) + len(ftype_bs) + len(author_bs) +\
len(desc_bs) + len(numrecs_bs) + len(masters_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
hedr_bs + version_bs + ftype_bs + author_bs + \
desc_bs + numrecs_bs + masters_bs
def ppSubRecord(sr):
if sr['type'] in ['NAME', 'INAM', 'CNAM']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseString(sr['data'])))
elif sr['type'] in ['DATA', 'NNAM', 'INDX', 'INTV']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseNum(sr['data'])))
else:
print(" %s, length %d" % (sr['type'], sr['length']))
def ppRecord(rec):
print("%s, length %d" % (rec['type'], rec['length']))
for sr in rec['subrecords']:
ppSubRecord(sr)
def ppLEV(rec):
if rec['type'] == 'LEVC':
print("Creature list '%s' from '%s':" % (rec['name'], rec['file']))
else:
print("Item list '%s' from '%s':" % (rec['name'], rec['file']))
print("flags: %d, chance of none: %d" % (rec['calcfrom'], rec['chancenone']))
for (lvl, lid) in rec['items']:
print(" %2d - %s" % (lvl, lid))
def ppTES3(rec):
print("TES3 record, type %d, version %f" % (rec['filetype'], rec['version']))
print("author: %s" % rec['author'])
print("description: %s" % rec['desc'])
for (mfile, msize) in rec['masters']:
print(" master %s, size %d" % (mfile, msize))
print()
def mergeableLists(alllists):
candidates = {}
for l in alllists:
lid = l['name']
if lid in candidates:
candidates[lid].append(l)
else:
candidates[lid] = [l]
mergeables = {}
for k in candidates:
if len(candidates[k]) > 1:
mergeables[k] = candidates[k]
return mergeables
def mergeLists(lls):
# last one gets priority for list-level attributes
last = lls[-1]
newLev = { 'type': last['type'],
'name': last['name'],
'calcfrom': last['calcfrom'],
'chancenone': last['chancenone'] }
allItems = []
for l in lls:
allItems += l['items']
newLev['files'] = [ x['file'] for x in lls ]
newLev['file'] = ', '.join(newLev['files'])
# This ends up being a bit tricky, but it prevents us
# from overloading lists with the same stuff.
#
# This is needed, because the original leveled lists
# contain multiple entries for some creatures/items, and
# that gets reproduced in many plugins.
#
# If we just added and sorted, then the more plugins you
# have, the less often you'd see plugin content. This
# method prevents the core game content from overwhelming
# plugin contents.
allUniques = [ x for x in set(allItems) ]
allUniques.sort()
newList = []
for i in allUniques:
newCount = max([ x['items'].count(i) for x in lls ])
newList += [i] * newCount
newLev['items'] = newList
return newLev
def mergeAllLists(alllists):
mergeables = mergeableLists(alllists)
merged = []
for k in mergeables:
merged.append(mergeLists(mergeables[k]))
return merged
def readCfg(cfg):
# first, open the file and pull all 'data' and 'content' lines, in order
data_dirs = []
mods = []
with open(cfg, 'r') as f:
for l in f.readlines():
# match of form "blah=blahblah"
m = re.search(r'^(.*)=(.*)$', l)
if m:
varname = m.group(1).strip()
# get rid of not only whitespace, but also surrounding quotes
varvalue = m.group(2).strip().strip('\'"')
if varname == 'data':
data_dirs.append(varvalue)
elif varname == 'content':
mods.append(varvalue)
# we've got the basenames of the mods, but not the full paths
# and we have to search through the data_dirs to find them
fp_mods = []
for m in mods:
for p in data_dirs:
full_path = os.path.join(p, m)
if os.path.exists(full_path):
fp_mods.append(full_path)
break
print("Config file parsed...")
return fp_mods
def dumplists(cfg):
llists = []
fp_mods = readCfg(cfg)
for f in fp_mods:
[ ppTES3(parseTES3(x)) for x in oldGetRecords(f, 'TES3') ]
for f in fp_mods:
llists += [ parseLEV(x) for x in oldGetRecords(f, 'LEVI') ]
for f in fp_mods:
llists += [ parseLEV(x) for x in oldGetRecords(f, 'LEVC') ]
for l in llists:
ppLEV(l)
def main(cfg, outmoddir, outmod):
fp_mods = readCfg(cfg)
# first, let's grab the "raw" records from the files
(rtes3, rlevi, rlevc) = ([], [], [])
for f in fp_mods:
print("Parsing '%s' for relevant records" % f)
(rtes3t, rlevit, rlevct) = getRecords(f, ('TES3', 'LEVI', 'LEVC'))
rtes3 += rtes3t
rlevi += rlevit
rlevc += rlevct
# next, parse the tes3 records so we can get a list
# of master files required by all our mods
tes3list = [ parseTES3(x) for x in rtes3 ]
masters = {}
for t in tes3list:
for m in t['masters']:
masters[m[0]] = m[1]
master_list = [ (k,v) for (k,v) in masters.items() ]
# now, let's parse the levi and levc records into
# mergeable lists, then merge them
# creature lists
clist = [ parseLEV(x) for x in rlevc ]
levc = mergeAllLists(clist)
# item lists
ilist = [ parseLEV(x) for x in rlevi ]
levi = mergeAllLists(ilist)
# now build the binary representation of
# the merged lists.
# along the way, build up the module
# description for the new merged mod, out
# of the names of mods that had lists
llist_bc = b''
pluginlist = []
for x in levi + levc:
# ppLEV(x)
llist_bc += packLEV(x)
pluginlist += x['files']
plugins = set(pluginlist)
moddesc = "Merged leveled lists from: %s" % ', '.join(plugins)
# finally, build the binary form of the
# TES3 record, and write the whole thing
# out to disk
if not os.path.exists(outmoddir):
p = Path(outmoddir)
p.mkdir(parents=True)
with open(outmod, 'wb') as f:
f.write(packTES3(moddesc, len(levi + levc), master_list))
f.write(llist_bc)
# And give some hopefully-useful instructions
modShortName = os.path.basename(outmod)
print("\n\n****************************************")
print(" Great! I think that worked. When you next start the OpenMW Launcher, look for a module named %s. Make sure of the following things:" % modShortName)
print(" 1. %s is at the bottom of the list. Drag it to the bottom if it's not. It needs to load last." % modShortName)
print(" 2. %s is checked (enabled)" % modShortName)
print(" 3. Any other OMWLLF mods are *un*checked. Loading them might not cause problems, but probably will")
print("\n")
print(" Then, go ahead and start the game! Your leveled lists should include adjustments from all relevant enabled mods")
print("\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--conffile', type = str, default = None,
action = 'store', required = False,
help = 'Conf file to use. Optional. By default, attempts to use the default conf file location.')
parser.add_argument('-d', '--moddir', type = str, default = None,
action = 'store', required = False,
help = 'Directory to store the new module in. By default, attempts to use the default work directory for OpenMW-CS')
parser.add_argument('-m', '--modname', type = str, default = None,
action = 'store', required = False,
help = 'Name of the new module to create. By default, this is "OMWLLF Mod - <today\'s date>.omwaddon.')
parser.add_argument('--dumplists', default = False,
action = 'store_true', required = False,
help = 'Instead of generating merged lists, dump all leveled lists in the conf mods. Used for debugging')
p = parser.parse_args()
# determine the conf file to use
confFile = ''
if p.conffile:
confFile = p.conffile
else:
pl = sys.platform
if pl in configPaths:
baseDir = os.path.expanduser(configPaths[pl])
confFile = os.path.join(baseDir, configFilename)
elif pl == 'win32':
# this is ugly. first, imports that only work properly on windows
from ctypes import *
import ctypes.wintypes
buf = create_unicode_buffer(ctypes.wintypes.MAX_PATH)
# opaque arguments. they are, roughly, for our purposes:
# - an indicator of folder owner (0 == current user)
# - an id for the type of folder (5 == 'My Documents')
# - an indicator for user to call from (0 same as above)
# - a bunch of flags for different things
# (if you want, for example, to get the default path
# instead of the actual path, or whatnot)
# 0 == current stuff
# - the variable to hold the return value
windll.shell32.SHGetFolderPathW(0, 5, 0, 0, buf)
# pull out the return value and construct the rest
baseDir = os.path.join(buf.value, 'My Games', 'OpenMW')
confFile = os.path.join(baseDir, configFilename)
else:
print("Sorry, I don't recognize the platform '%s'. You can try specifying the conf file using the '-c' flag." % p)
sys.exit(1)
baseModDir = ''
if p.moddir:
baseModDir = p.moddir
else:
pl = sys.platform
if pl in configPaths:
baseModDir = os.path.expanduser(modPaths[pl])
elif pl == 'win32':
# this is ugly in exactly the same ways as above.
# see there for more information
from ctypes import *
import ctypes.wintypes
buf = create_unicode_buffer(ctypes.wintypes.MAX_PATH)
windll.shell32.SHGetFolderPathW(0, 5, 0, 0, buf)
baseDir = os.path.join(buf.value, 'My Games', 'OpenMW')
baseModDir = os.path.join(baseDir, 'data')
else:
print("Sorry, I don't recognize the platform '%s'. You can try specifying the conf file using the '-c' flag." % p)
sys.exit(1)
if not os.path.exists(confFile):
print("Sorry, the conf file '%s' doesn't seem to exist." % confFile)
sys.exit(1)
modName = ''
if p.modname:
modName = p.modname
else:
modName = 'OMWLLF Mod - %s.omwaddon' % date.today().strftime('%Y-%m-%d')
modFullPath = os.path.join(baseModDir, modName)
if p.dumplists:
dumplists(confFile)
else:
main(confFile, baseModDir, modFullPath)
# regarding the windows path detection:
#
# "SHGetFolderPath" is deprecated in favor of "SHGetKnownFolderPath", but
# >>> windll.shell32.SHGetKnownFolderPath('{FDD39AD0-238F-46AF-ADB4-6C85480369C7}', 0, 0, buf2)
# -2147024894
|
jmelesky/omwllf
|
omwllf.py
|
Python
|
isc
| 17,912
|
"""
EGADS transforms algorithms. See EGADS Algorithm Documentation for more info.
"""
__author__ = "mfreer, ohenry"
__date__ = "2017-01-08 11:42"
__version__ = "1.2"
import logging
try:
from interpolation_linear import *
from isotime_to_elements import *
from isotime_to_seconds import *
from seconds_to_isotime import *
from time_to_decimal_year import *
logging.info('egads [transforms] algorithms have been loaded')
except Exception:
logging.error('an error occured during the loading of a [transforms] algorithm')
|
eufarn7sp/egads-eufar
|
egads/algorithms/transforms/__init__.py
|
Python
|
bsd-3-clause
| 547
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_awsci
----------------------------------
Tests for `awsci` module.
"""
import unittest
from awsci import awsci
class TestAwsci(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
|
glemmaPaul/AWSci
|
tests/test_awsci.py
|
Python
|
isc
| 325
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import filecmp
import random
import textwrap
from airflow import DAG
from airflow.operators.dummy import DummyOperator
from airflow.operators.python import BranchPythonOperator, PythonOperator
from airflow.providers.qubole.operators.qubole import QuboleOperator
from airflow.providers.qubole.sensors.qubole import QuboleFileSensor, QubolePartitionSensor
from airflow.utils.dates import days_ago
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
}
with DAG(
dag_id='example_qubole_operator',
default_args=default_args,
schedule_interval=None,
start_date=days_ago(2),
tags=['example'],
) as dag:
dag.doc_md = textwrap.dedent(
"""
This is only an example DAG to highlight usage of QuboleOperator in various scenarios,
some of these tasks may or may not work based on your Qubole account setup.
Run a shell command from Qubole Analyze against your Airflow cluster with following to
trigger it manually `airflow dags trigger example_qubole_operator`.
*Note: Make sure that connection `qubole_default` is properly set before running this
example. Also be aware that it might spin up clusters to run these examples.*
"""
)
def compare_result_fn(**kwargs):
"""
Compares the results of two QuboleOperator tasks.
:param kwargs: The context of the executed task.
:type kwargs: dict
:return: True if the files are the same, False otherwise.
:rtype: bool
"""
ti = kwargs['ti']
qubole_result_1 = hive_show_table.get_results(ti)
qubole_result_2 = hive_s3_location.get_results(ti)
return filecmp.cmp(qubole_result_1, qubole_result_2)
hive_show_table = QuboleOperator(
task_id='hive_show_table',
command_type='hivecmd',
query='show tables',
cluster_label='{{ params.cluster_label }}',
fetch_logs=True,
# If `fetch_logs`=true, will fetch qubole command logs and concatenate
# them into corresponding airflow task logs
tags='airflow_example_run',
# To attach tags to qubole command, auto attach 3 tags - dag_id, task_id, run_id
qubole_conn_id='qubole_default',
# Connection id to submit commands inside QDS, if not set "qubole_default" is used
params={
'cluster_label': 'default',
},
)
hive_s3_location = QuboleOperator(
task_id='hive_s3_location',
command_type="hivecmd",
script_location="s3n://public-qubole/qbol-library/scripts/show_table.hql",
notify=True,
tags=['tag1', 'tag2'],
# If the script at s3 location has any qubole specific macros to be replaced
# macros='[{"date": "{{ ds }}"}, {"name" : "abc"}]',
trigger_rule="all_done",
)
compare_result = PythonOperator(
task_id='compare_result', python_callable=compare_result_fn, trigger_rule="all_done"
)
compare_result << [hive_show_table, hive_s3_location]
options = ['hadoop_jar_cmd', 'presto_cmd', 'db_query', 'spark_cmd']
branching = BranchPythonOperator(task_id='branching', python_callable=lambda: random.choice(options))
branching << compare_result
join = DummyOperator(task_id='join', trigger_rule='one_success')
hadoop_jar_cmd = QuboleOperator(
task_id='hadoop_jar_cmd',
command_type='hadoopcmd',
sub_command='jar s3://paid-qubole/HadoopAPIExamples/'
'jars/hadoop-0.20.1-dev-streaming.jar '
'-mapper wc '
'-numReduceTasks 0 -input s3://paid-qubole/HadoopAPITests/'
'data/3.tsv -output '
's3://paid-qubole/HadoopAPITests/data/3_wc',
cluster_label='{{ params.cluster_label }}',
fetch_logs=True,
params={
'cluster_label': 'default',
},
)
pig_cmd = QuboleOperator(
task_id='pig_cmd',
command_type="pigcmd",
script_location="s3://public-qubole/qbol-library/scripts/script1-hadoop-s3-small.pig",
parameters="key1=value1 key2=value2",
trigger_rule="all_done",
)
pig_cmd << hadoop_jar_cmd << branching
pig_cmd >> join
presto_cmd = QuboleOperator(task_id='presto_cmd', command_type='prestocmd', query='show tables')
shell_cmd = QuboleOperator(
task_id='shell_cmd',
command_type="shellcmd",
script_location="s3://public-qubole/qbol-library/scripts/shellx.sh",
parameters="param1 param2",
trigger_rule="all_done",
)
shell_cmd << presto_cmd << branching
shell_cmd >> join
db_query = QuboleOperator(
task_id='db_query', command_type='dbtapquerycmd', query='show tables', db_tap_id=2064
)
db_export = QuboleOperator(
task_id='db_export',
command_type='dbexportcmd',
mode=1,
hive_table='default_qubole_airline_origin_destination',
db_table='exported_airline_origin_destination',
partition_spec='dt=20110104-02',
dbtap_id=2064,
trigger_rule="all_done",
)
db_export << db_query << branching
db_export >> join
db_import = QuboleOperator(
task_id='db_import',
command_type='dbimportcmd',
mode=1,
hive_table='default_qubole_airline_origin_destination',
db_table='exported_airline_origin_destination',
where_clause='id < 10',
parallelism=2,
dbtap_id=2064,
trigger_rule="all_done",
)
prog = '''
import scala.math.random
import org.apache.spark._
/** Computes an approximation to pi */
object SparkPi {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Pi")
val spark = new SparkContext(conf)
val slices = if (args.length > 0) args(0).toInt else 2
val n = math.min(100000L * slices, Int.MaxValue).toInt // avoid overflow
val count = spark.parallelize(1 until n, slices).map { i =>
val x = random * 2 - 1
val y = random * 2 - 1
if (x*x + y*y < 1) 1 else 0
}.reduce(_ + _)
println("Pi is roughly " + 4.0 * count / n)
spark.stop()
}
}
'''
spark_cmd = QuboleOperator(
task_id='spark_cmd',
command_type="sparkcmd",
program=prog,
language='scala',
arguments='--class SparkPi',
tags='airflow_example_run',
)
spark_cmd << db_import << branching
spark_cmd >> join
with DAG(
dag_id='example_qubole_sensor',
default_args=default_args,
schedule_interval=None,
start_date=days_ago(2),
doc_md=__doc__,
tags=['example'],
) as dag2:
dag2.doc_md = textwrap.dedent(
"""
This is only an example DAG to highlight usage of QuboleSensor in various scenarios,
some of these tasks may or may not work based on your QDS account setup.
Run a shell command from Qubole Analyze against your Airflow cluster with following to
trigger it manually `airflow dags trigger example_qubole_sensor`.
*Note: Make sure that connection `qubole_default` is properly set before running
this example.*
"""
)
check_s3_file = QuboleFileSensor(
task_id='check_s3_file',
qubole_conn_id='qubole_default',
poke_interval=60,
timeout=600,
data={
"files": [
"s3://paid-qubole/HadoopAPIExamples/jars/hadoop-0.20.1-dev-streaming.jar",
"s3://paid-qubole/HadoopAPITests/data/{{ ds.split('-')[2] }}.tsv",
] # will check for availability of all the files in array
},
)
check_hive_partition = QubolePartitionSensor(
task_id='check_hive_partition',
poke_interval=10,
timeout=60,
data={
"schema": "default",
"table": "my_partitioned_table",
"columns": [
{"column": "month", "values": ["{{ ds.split('-')[1] }}"]},
{"column": "day", "values": ["{{ ds.split('-')[2] }}", "{{ yesterday_ds.split('-')[2] }}"]},
], # will check for partitions like [month=12/day=12,month=12/day=13]
},
)
check_s3_file >> check_hive_partition
|
airbnb/airflow
|
airflow/providers/qubole/example_dags/example_qubole.py
|
Python
|
apache-2.0
| 9,117
|
import os
from flask import Flask, current_app
from .extensions import db, login_manager, csrf, mail
import config as Config
from .common import constants as COMMON_CONSTANTS
from .api import helloworld
from .user import user
from .user import User
# For import *
__all__ = ['create_app']
DEFAULT_BLUEPRINTS = [
user
]
def create_app(config=None, app_name=None, blueprints=None):
"""Create a Flask app."""
if app_name is None:
app_name = Config.DefaultConfig.PROJECT
if blueprints is None:
blueprints = DEFAULT_BLUEPRINTS
app = Flask(app_name, instance_path=COMMON_CONSTANTS.INSTANCE_FOLDER_PATH, instance_relative_config=True)
configure_app(app, config)
configure_hook(app)
configure_blueprints(app, blueprints)
configure_extensions(app)
configure_logging(app)
configure_error_handlers(app)
return app
def configure_app(app, config=None):
"""Different ways of configurations."""
# http://flask.pocoo.org/docs/api/#configuration
app.config.from_object(Config.DefaultConfig)
if config:
app.config.from_object(config)
return
# get mode from os environment
application_mode = os.getenv('APPLICATION_MODE', 'LOCAL')
app.config.from_object(Config.get_config(application_mode))
def configure_extensions(app):
# flask-sqlalchemy
db.init_app(app)
# flask-login
login_manager.login_view = 'user.login'
@login_manager.user_loader
def load_user(id):
return User.query.get(id)
login_manager.setup_app(app)
# flask-wtf
csrf.init_app(app)
#Flask-Mail
mail.init_app(app)
def configure_blueprints(app, blueprints):
for blueprint in blueprints:
app.register_blueprint(blueprint)
def configure_logging(app):
pass
def configure_hook(app):
@app.before_request
def before_request():
pass
def configure_error_handlers(app):
# example
@app.errorhandler(500)
def server_error_page(error):
return "ERROR PAGE!"
|
MedQA/medqa
|
app/app.py
|
Python
|
mit
| 1,979
|
#!/usr/bin/python
import re
import subprocess
# Calls the R system specifying that commands come from file commands.R
# The commands.R provided with this assignment will read the file named
# data and will output a histogram of that data to the file pageshist,pdf
def runR( ):
res = subprocess.call(['R', '-f', 'commands.R'])
# log2hist analyzes a log file to calculate the total number of pages
# printed by each user during the period represented by this log file,
# and uses R to produce a pdf file pageshist.pdf showing a histogram
# of these totals. logfilename is a string which is the name of the
# log file to analyze.
#
def log2hist(logfilename):
logFile = open(logfilename)
users = {}
for printJobString in logFile:
userRegex = re.search('(\suser:\s)(.+?)(\sprinter:\s)', printJobString)
if userRegex:
userString = userRegex.group(2)
pagesInt = int(re.search('(\spages:\s)(.+?)(\scode:\s)', printJobString).group(2))
if userString not in users:
users[userString] = pagesInt
else:
users[userString] += pagesInt
data = open('data', 'w+')
for pagesInt in users.values():
data.write("%d\n" % pagesInt)
data.close()
runR()
|
PLJNS/Rutgers-Prin-Prog-Fall-2013
|
Python/pages.py
|
Python
|
mit
| 1,179
|
"""
Given a sorted array consisting of only integers where every element appears twice except for one element which appears once. Find this single element that appears only once.
Example 1:
Input: [1,1,2,3,3,4,4,8,8]
Output: 2
Example 2:
Input: [3,3,7,7,10,11,11]
Output: 10
Note: Your solution should run in O(log n) time and O(1) space.
"""
def singleNonDuplicate(nums):
"""
:type nums: List[int]
:rtype: int
"""
return singleNonDuplicate_helper(nums, 0, len(nums) - 1)
def singleNonDuplicate_helper(num, start, end):
if start >= end:
return num[start]
mid = int((start + end) / 2)
if num[mid] == num[mid ^ 1]:
return singleNonDuplicate_helper(num, mid + 1, end)
else:
return singleNonDuplicate_helper(num, start, mid)
def singleNonDuplicate_linear(nums):
"""
:type nums: List[int]
:rtype: int
"""
sum = 0
for i, n in enumerate(nums):
if i % 2 == 0:
sum += n
else:
sum -= n
if sum != 0:
return nums[i - 1]
if sum != 0:
return nums[-1]
def test_singleNonDuplicate():
assert singleNonDuplicate([1, 1, 2]) == 2
assert singleNonDuplicate([1, 1, 2, 3, 3, 4, 4, 8, 8]) == 2
assert singleNonDuplicate([3, 3, 7, 7, 10, 11, 11]) == 10
assert singleNonDuplicate([0, 1, 1]) == 0
if __name__ == '__main__':
test_singleNonDuplicate()
|
yfpeng/pengyifan-leetcode
|
src/main/python/pyleetcode/Single_Element_in_a_Sorted_Array.py
|
Python
|
bsd-3-clause
| 1,481
|
from ..utils import *
##
# Minions
# Tentacle of N'Zoth
class OG_151:
deathrattle = Hit(ALL_MINIONS, 1)
|
smallnamespace/fireplace
|
fireplace/cards/wog/neutral_legendary.py
|
Python
|
agpl-3.0
| 108
|
##########################################################################
#
# Copyright (c) 2008-2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreGL
IECoreGL.init( False )
import os.path
import os
import math
class UserAtributesTest( unittest.TestCase ) :
def testUserAttributesInDeferredMode( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.worldBegin()
self.assertEqual( r.getAttribute( "user:notSetYet" ), None )
r.setAttribute( "user:test", IECore.FloatData( 1 ) )
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
r.attributeBegin()
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
r.setAttribute( "user:test2", IECore.IntData( 10 ) )
self.assertEqual( r.getAttribute( "user:test2" ), IECore.IntData( 10 ) )
r.attributeEnd()
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
self.assertEqual( r.getAttribute( "user:test2" ), None )
r.worldEnd()
def performProceduralTest( self, threaded ) :
errors = list()
class SimpleProcedural( IECore.ParameterisedProcedural ):
def __init__( s, level = 0 ):
IECore.ParameterisedProcedural.__init__( s )
s.__level = level
def doBound( s, args ) :
return Box3f( V3f( -1 ), V3f( 1 ) )
def doRender( s, renderer, args ):
try:
if s.__level == 0 :
with IECore.AttributeBlock( renderer ) :
renderer.setAttribute( "user:myTestAttribute", IECore.IntData(11) )
# rendering a child procedural
SimpleProcedural( 1 ).render( renderer )
self.assertEqual( renderer.getAttribute( "user:myTestAttribute" ), IECore.IntData(11) )
# rendering child procedural from inside a Group
g = IECore.Group()
g.addChild( SimpleProcedural( 2 ) )
g.render( renderer )
elif s.__level == 1 :
self.assertEqual( renderer.getAttribute( "user:myTestAttribute" ), IECore.IntData(11) )
elif s.__level == 2 :
self.assertEqual( renderer.getAttribute( "user:myTestAttribute" ), IECore.IntData(11) )
except Exception, e :
errors.append( IECore.exceptionInfo()[1] )
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
with IECore.WorldBlock( r ) :
r.setAttribute( "gl:procedural:reentrant", IECore.BoolData( threaded ) )
p = SimpleProcedural()
p.render( r )
if errors :
raise Exception, "ERRORS:\n".join( errors )
def testUserAttributesInSingleThreadedProcedural( self ) :
self.performProceduralTest( False )
def testUserAttributesInMultiThreadedProcedural( self ) :
self.performProceduralTest( True )
def testUserAttributesInImmediateMode( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "immediate" ) )
r.worldBegin()
self.assertEqual( r.getAttribute( "user:notSetYet" ), None )
r.setAttribute( "user:test", IECore.FloatData( 1 ) )
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
r.attributeBegin()
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
r.setAttribute( "user:test2", IECore.IntData( 10 ) )
self.assertEqual( r.getAttribute( "user:test2" ), IECore.IntData( 10 ) )
r.attributeEnd()
self.assertEqual( r.getAttribute( "user:test" ), IECore.FloatData( 1 ) )
self.assertEqual( r.getAttribute( "user:test2" ), None )
r.worldEnd()
if __name__ == "__main__":
unittest.main()
|
lento/cortex
|
test/IECoreGL/UserAttributesTest.py
|
Python
|
bsd-3-clause
| 5,167
|
from unittest import TestCase
from flake8.options.manager import OptionManager
from darglint.config import get_config
from darglint.docstring.style import DocstringStyle
from darglint.flake8_entry import DarglintChecker
from darglint.strictness import Strictness
class Flake8TestCase(TestCase):
"""Tests that flake8 config is parsed correctly."""
def test_config_parsed(self):
default_config = get_config().get_default_instance()
parser = OptionManager('', '')
DarglintChecker.add_options(parser)
options, args = parser.parse_args([])
DarglintChecker.parse_options(options)
self.assertEqual(default_config.style, DarglintChecker.config.style)
argv = [
'--docstring-style=numpy',
'--strictness=short'
]
options, args = parser.parse_args(argv)
DarglintChecker.config = default_config
DarglintChecker.parse_options(options)
self.assertEqual(DarglintChecker.config.style, DocstringStyle.NUMPY)
self.assertEqual(DarglintChecker.config.strictness, Strictness.SHORT_DESCRIPTION)
|
terrencepreilly/darglint
|
integration_tests/test_flake8.py
|
Python
|
mit
| 1,115
|
"""Decodes a sentence using the Viterbi Algorithm
Input: a sentence (str)
Initialization: set pi(0, <START>, <START>) = 1
"""
from nltk.tokenize import TreebankWordTokenizer
from preprocessing import import_wsj
from parameters_class import Parameters
class Decoder:
def __init__(self, corpus_name):
"""Initialize parameters using a corpus
The corpus must be from the Penn WSJ treebank
corpus_name can be one of three names:
"train" uses /Users/ischeinfeld/Documents/Code/WSJ/train.txt
"develop" uses /Users/ischeinfeld/Documents/Code/WSJ/develop.txt"
"test" uses /Users/ischeinfeld/Documents/Code/WSJ/test.txt"
or corpus_name can be a complete path to the corpus text:
ex. /dir/dir/dir/file(.txt)
"""
self.corpus = import_wsj(corpus_name)
self.params = Parameters(self.corpus)
def decode(self, sentence):
"""Decode a sentence
Input: a sentence (str)
Output: a tuple with lists of tokens, tags, and probabilities
"""
token_seq = self.prep_sentence(sentence)
print(token_seq)
### Calculate pi values
pi = []
tags = self.params.tags
pi.append({})
pi[0]['<START>'] = {}
pi[0]['<START>']['<START>'] = 1 # pi[k][u][v]
for k in range(1, len(token_seq)):
print("k: ", k)
pi.append({})
for w in pi[k-1]:
for u in pi[k-1][w]:
pi[k][u] = {}
for v in tags:
pi[k][u][v] = pi[k-1][w][u] * self.params.q(v, w, u) * self.params.e(token_seq[k - 1],v) # k-1 at end because token_seq is indexed from 0
'''
tag_seq = ['<START>', '<START>']
prob_seq = []
return (token_seq, tag_seq, prob_seq)
'''
print(pi[1])
print(pi[2])
'''
for k in pi:
print(k)
for u in k:
for v in k[u]:
print("pi(", k, ", ", u, ", ", v, ")")
'''
def get_prob(self, u, v, s, x):
q = self.params.q(s, u, v)
e = self.params.e(x, s)
return q * e
def prep_sentence(self, sentence):
"""Tokenizes a sentence string"""
sentence_list = TreebankWordTokenizer().tokenize(sentence)
sentence_list.append('<STOP>')
return sentence_list
|
ischeinfeld/py_nlp
|
src_deprecated/decoder_class_old.py
|
Python
|
mit
| 2,037
|
def test():
assert True
|
brianjimenez/python-course
|
material/examples/test_example_7.py
|
Python
|
mit
| 29
|
# Copyright 2014 Hewlett-Packard Development Company, L.P
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import helpers
from openstack_dashboard.test.integration_tests.tests import decorators
@decorators.skip_because(bugs=["1526791"])
class TestKeypair(helpers.TestCase):
"""Checks that the user is able to create/delete keypair."""
KEYPAIR_NAME = helpers.gen_random_resource_name("keypair")
def test_keypair(self):
keypair_page = self.home_pg.go_to_accessandsecurity_keypairspage()
keypair_page.create_keypair(self.KEYPAIR_NAME)
keypair_page = self.home_pg.go_to_accessandsecurity_keypairspage()
self.assertTrue(keypair_page.is_keypair_present(self.KEYPAIR_NAME))
keypair_page.delete_keypair(self.KEYPAIR_NAME)
self.assertFalse(keypair_page.is_keypair_present(self.KEYPAIR_NAME))
|
Tesora-Release/tesora-horizon
|
openstack_dashboard/test/integration_tests/tests/test_keypair.py
|
Python
|
apache-2.0
| 1,445
|
# -*- coding: utf-8 -*-
# Copyright 2014 Rob Ruana
# Licensed under the BSD License, see LICENSE file for details.
"""Tests for :mod:`sphinxcontrib.napoleon.docstring` module."""
import textwrap
from sphinxcontrib.napoleon import Config
from sphinxcontrib.napoleon.docstring import GoogleDocstring, NumpyDocstring
from unittest import TestCase
try:
# Python >=3.3
from unittest.mock import Mock
except ImportError:
from mock import Mock
class BaseDocstringTest(TestCase):
pass
class GoogleDocstringTest(BaseDocstringTest):
docstrings = [(
"""Single line summary""",
"""Single line summary"""
), (
"""
Single line summary
Extended description
""",
"""
Single line summary
Extended description
"""
), (
"""
Single line summary
Args:
arg1(str):Extended
description of arg1
""",
"""
Single line summary
:Parameters: **arg1** (*str*) --
Extended
description of arg1"""
), (
"""
Single line summary
Args:
arg1(str):Extended
description of arg1
arg2 ( int ) : Extended
description of arg2
Keyword Args:
kwarg1(str):Extended
description of kwarg1
kwarg2 ( int ) : Extended
description of kwarg2""",
"""
Single line summary
:Parameters: * **arg1** (*str*) --
Extended
description of arg1
* **arg2** (*int*) --
Extended
description of arg2
:Keyword Arguments: * **kwarg1** (*str*) --
Extended
description of kwarg1
* **kwarg2** (*int*) --
Extended
description of kwarg2"""
), (
"""
Single line summary
Arguments:
arg1(str):Extended
description of arg1
arg2 ( int ) : Extended
description of arg2
Keyword Arguments:
kwarg1(str):Extended
description of kwarg1
kwarg2 ( int ) : Extended
description of kwarg2""",
"""
Single line summary
:Parameters: * **arg1** (*str*) --
Extended
description of arg1
* **arg2** (*int*) --
Extended
description of arg2
:Keyword Arguments: * **kwarg1** (*str*) --
Extended
description of kwarg1
* **kwarg2** (*int*) --
Extended
description of kwarg2"""
), (
"""
Single line summary
Return:
str:Extended
description of return value
""",
"""
Single line summary
:returns: *str* --
Extended
description of return value"""
), (
"""
Single line summary
Returns:
str:Extended
description of return value
""",
"""
Single line summary
:returns: *str* --
Extended
description of return value"""
), (
"""
Single line summary
Returns:
Extended
description of return value
""",
"""
Single line summary
:returns: Extended
description of return value"""
)]
def test_docstrings(self):
config = Config(napoleon_use_param=False, napoleon_use_rtype=False)
for docstring, expected in self.docstrings:
actual = str(GoogleDocstring(textwrap.dedent(docstring), config))
expected = textwrap.dedent(expected)
self.assertEqual(expected, actual)
def test_parameters_with_class_reference(self):
docstring = """\
Construct a new XBlock.
This class should only be used by runtimes.
Arguments:
runtime (:class:`Runtime`): Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
field_data (:class:`FieldData`): Interface used by the XBlock
fields to access their data from wherever it is persisted.
scope_ids (:class:`ScopeIds`): Identifiers needed to resolve scopes.
"""
actual = str(GoogleDocstring(docstring))
expected = """\
Construct a new XBlock.
This class should only be used by runtimes.
:param runtime: Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
:type runtime: :class:`Runtime`
:param field_data: Interface used by the XBlock
fields to access their data from wherever it is persisted.
:type field_data: :class:`FieldData`
:param scope_ids: Identifiers needed to resolve scopes.
:type scope_ids: :class:`ScopeIds`
"""
self.assertEqual(expected, actual)
class NumpyDocstringTest(BaseDocstringTest):
docstrings = [(
"""Single line summary""",
"""Single line summary"""
), (
"""
Single line summary
Extended description
""",
"""
Single line summary
Extended description
"""
), (
"""
Single line summary
Parameters
----------
arg1:str
Extended
description of arg1
""",
"""
Single line summary
:Parameters: **arg1** (*str*) --
Extended
description of arg1"""
), (
"""
Single line summary
Parameters
----------
arg1:str
Extended
description of arg1
arg2 : int
Extended
description of arg2
Keyword Arguments
-----------------
kwarg1:str
Extended
description of kwarg1
kwarg2 : int
Extended
description of kwarg2
""",
"""
Single line summary
:Parameters: * **arg1** (*str*) --
Extended
description of arg1
* **arg2** (*int*) --
Extended
description of arg2
:Keyword Arguments: * **kwarg1** (*str*) --
Extended
description of kwarg1
* **kwarg2** (*int*) --
Extended
description of kwarg2"""
), (
"""
Single line summary
Return
------
str
Extended
description of return value
""",
"""
Single line summary
:returns: *str* --
Extended
description of return value"""
), (
"""
Single line summary
Returns
-------
str
Extended
description of return value
""",
"""
Single line summary
:returns: *str* --
Extended
description of return value"""
)]
def test_docstrings(self):
config = Config(napoleon_use_param=False, napoleon_use_rtype=False)
for docstring, expected in self.docstrings:
actual = str(NumpyDocstring(textwrap.dedent(docstring), config))
expected = textwrap.dedent(expected)
self.assertEqual(expected, actual)
def test_parameters_with_class_reference(self):
docstring = """\
Parameters
----------
param1 : :class:`MyClass <name.space.MyClass>` instance
"""
config = Config(napoleon_use_param=False)
actual = str(NumpyDocstring(docstring, config))
expected = """\
:Parameters: **param1** (:class:`MyClass <name.space.MyClass>` instance)
"""
self.assertEqual(expected, actual)
config = Config(napoleon_use_param=True)
actual = str(NumpyDocstring(docstring, config))
expected = """\
:type param1: :class:`MyClass <name.space.MyClass>` instance
"""
self.assertEqual(expected, actual)
def test_parameters_without_class_reference(self):
docstring = """\
Parameters
----------
param1 : MyClass instance
"""
config = Config(napoleon_use_param=False)
actual = str(NumpyDocstring(docstring, config))
expected = """\
:Parameters: **param1** (*MyClass instance*)
"""
self.assertEqual(expected, actual)
config = Config(napoleon_use_param=True)
actual = str(NumpyDocstring(textwrap.dedent(docstring), config))
expected = """\
:type param1: MyClass instance
"""
self.assertEqual(expected, actual)
def test_see_also_refs(self):
docstring = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
See Also
--------
some, other, funcs
otherfunc : relationship
"""
actual = str(NumpyDocstring(docstring))
expected = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
.. seealso::
:obj:`some`, :obj:`other`, :obj:`funcs`
\n\
:obj:`otherfunc`
relationship
"""
self.assertEqual(expected, actual)
docstring = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
See Also
--------
some, other, funcs
otherfunc : relationship
"""
config = Config()
app = Mock()
actual = str(NumpyDocstring(docstring, config, app, "method"))
expected = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
.. seealso::
:meth:`some`, :meth:`other`, :meth:`funcs`
\n\
:meth:`otherfunc`
relationship
"""
self.assertEqual(expected, actual)
|
marcoantoniooliveira/labweb
|
oscar/lib/python2.7/site-packages/tests/napoleon/test_docstring.py
|
Python
|
bsd-3-clause
| 10,061
|
import re
from datetime import date
from bs4 import BeautifulSoup
from django.test import Client
from freezegun import freeze_time
import ws.utils.perms as perm_utils
from ws import enums, models
from ws.tests import TestCase, factories, strip_whitespace
class Helpers:
client: Client
@staticmethod
def _form_data(form):
for elem in form.find_all('textarea'):
yield elem['name'], elem.text
for elem in form.find_all('input'):
yield elem['name'], elem.get('value', '')
for select in form.find_all('select'):
selection = select.find('option', selected=True)
value = selection['value'] if selection else ''
yield select['name'], value
def _get(self, url: str):
response = self.client.get(url)
assert response.status_code == 200
soup = BeautifulSoup(response.content, 'html.parser')
return response, soup
@staticmethod
def _expect_title(soup, expected):
title = strip_whitespace(soup.title.string)
assert title == f'{expected} | MITOC Trips'
@staticmethod
def _expect_past_trips(response, expected_trip_pks):
assert expected_trip_pks == [trip.pk for trip in response.context['past_trips']]
@staticmethod
def _expect_current_trips(response, expected_trip_pks):
assert [
trip.pk for trip in response.context['current_trips']
] == expected_trip_pks
@staticmethod
def _expect_upcoming_header(soup, expected_text):
"""Expect a text label on the header, plus the subscribe+digest buttons."""
header = soup.body.find('h3')
header_text = strip_whitespace(header.get_text())
# There is an RSS button and a weekly email digest button included in the header
assert header_text == f'{expected_text} RSS Weekly digest'
@staticmethod
def _expect_link_for_date(soup, datestring):
link = soup.find('a', href=f'/trips/?after={datestring}')
assert link.get_text(strip=True) == 'Previous trips'
@freeze_time("2019-02-15 12:25:00 EST")
class UpcomingTripsViewTest(TestCase, Helpers):
def test_upcoming_trips_without_filter(self):
"""With no default filter, we only show upcoming trips."""
response, soup = self._get('/trips/')
# We don't bother rendering any past trips
self.assertNotIn('past_trips', response.context)
self._expect_title(soup, 'Upcoming trips')
# We just say 'Upcoming trips' (no mention of date)
self._expect_upcoming_header(soup, 'Upcoming trips')
def test_invalid_filter(self):
"""When an invalid date is passed, we just ignore it."""
# Make two trips that are in the future, but before the requested cutoff
factories.TripFactory.create(trip_date='2019-02-28')
factories.TripFactory.create(trip_date='2019-02-27')
# Ask for upcoming trips after an invalid future date
response, soup = self._get('/trips/?after=2019-02-31')
# We warn the user that this date was invalid.
warning = soup.find(class_='alert alert-danger')
self.assertTrue(response.context['date_invalid'])
self.assertIn('Invalid date', warning.get_text())
# However, we still return results (behaving as if no date filter was given)
# We don't include past trips, though, since the `after` cutoff was invalid
# (We only show upcoming trips)
self._expect_title(soup, 'Upcoming trips')
self.assertNotIn('past_trips', response.context)
# We use today's date for the 'previous trips' link
self._expect_link_for_date(soup, '2018-02-15')
def test_trips_with_filter(self):
"""We support filtering the responded list of trips."""
# Make a very old trip that will not be in our filter
factories.TripFactory.create(trip_date='2016-12-23')
# Make an older trip, that takes place after our query
expected_trip = factories.TripFactory.create(trip_date='2017-11-21')
# Filter based on a date in the past
response, soup = self._get('/trips/?after=2017-11-15')
self.assertFalse(response.context['date_invalid'])
# Observe that we have an 'Upcoming trips' section, plus a section for past trips
self._expect_upcoming_header(soup, 'Upcoming trips')
self._expect_title(soup, 'Trips after 2017-11-15')
self._expect_past_trips(response, [expected_trip.pk])
self._expect_link_for_date(soup, '2016-11-15')
def test_upcoming_trips_can_be_filtered(self):
"""If supplying an 'after' date in the future, that still permits filtering!"""
_next_week = factories.TripFactory.create(trip_date='2019-02-22')
next_month = factories.TripFactory.create(trip_date='2019-03-22')
response, soup = self._get('/trips/?after=2019-03-15')
self._expect_link_for_date(soup, '2018-03-15')
# We remove the RSS + email buttons
header = soup.body.find('h3')
self.assertEqual(strip_whitespace(header.text), 'Trips after Mar 15, 2019')
# The trip next month is included, but not next week (since we're filtering ahead)
self._expect_current_trips(response, [next_month.pk])
@freeze_time("2019-02-15 12:25:00 EST")
class AllTripsViewTest(TestCase, Helpers):
def test_all_trips_with_no_past(self):
"""Even with no past trips, we still display 'All trips'"""
response, soup = self._get('/trips/all/')
self.assertFalse(response.context['past_trips'])
self._expect_title(soup, 'All trips')
def test_all_trips_with_past_trips(self):
"""Test the usual case - 'all trips' segmenting past & upcoming trips."""
next_week = factories.TripFactory.create(trip_date='2019-02-22')
last_month = factories.TripFactory.create(trip_date='2019-01-15')
years_ago = factories.TripFactory.create(trip_date='2010-11-15')
response, soup = self._get('/trips/all/')
self._expect_title(soup, 'All trips')
self._expect_current_trips(response, [next_week.pk])
self._expect_past_trips(response, [last_month.pk, years_ago.pk])
def test_all_trips_with_filter(self):
"""We support filtering trips even on the 'all' page.
The default interaction with filtering trips should instead just use
the `/trips/` URL, but this test demonstrates that filtering works on
the 'all' page too.
"""
# Make a very old trip that will not be in our filter
factories.TripFactory.create(trip_date='2016-12-23')
# Make an older trip, that takes place after our query
expected_trip = factories.TripFactory.create(trip_date='2017-11-21')
response, soup = self._get('/trips/all/?after=2017-11-15')
self._expect_title(soup, 'Trips after 2017-11-15')
self._expect_past_trips(response, [expected_trip.pk])
self._expect_link_for_date(soup, '2016-11-15')
class CreateTripViewTest(TestCase, Helpers):
@freeze_time("2019-12-15 12:25:00 EST")
def test_superuser_can_create_any_program(self):
"""Even though it's not IAP, the superuser can make any trip type."""
user = factories.UserFactory.create(is_superuser=True)
factories.ParticipantFactory.create(user_id=user.pk)
self.client.force_login(user)
_resp, soup = self._get('/trips/create/')
options = soup.find('select', attrs={'name': 'program'}).find_all('option')
self.assertCountEqual(
[opt['value'] for opt in options],
[program.value for program in enums.Program],
)
@freeze_time("2022-01-24 12:25:00 EST")
def test_winter_school_is_default_during_iap(self):
"""For simplicity, we assume that new trips in IAP are in the WS program."""
leader = factories.ParticipantFactory.create()
factories.LeaderRatingFactory.create(
participant=leader, activity=models.LeaderRating.WINTER_SCHOOL
)
self.client.force_login(leader.user)
_resp, soup = self._get('/trips/create/')
select = soup.find('select', attrs={'name': 'program'})
self.assertEqual(
[opt.text for opt in select.find_all('option')],
['Winter School', 'Winter (outside IAP)', 'Circus', 'Service', 'None'],
)
ws_option = select.find('option', value=enums.Program.WINTER_SCHOOL.value)
self.assertIn('selected', ws_option.attrs)
@freeze_time("2019-12-15 12:25:00 EST")
def test_winter_school_not_available_outside_iap(self):
"""Normal trip leaders can only make normal winter trips outside IAP."""
leader = factories.ParticipantFactory.create()
factories.LeaderRatingFactory.create(
participant=leader, activity=models.LeaderRating.WINTER_SCHOOL
)
self.client.force_login(leader.user)
_resp, soup = self._get('/trips/create/')
select = soup.find('select', attrs={'name': 'program'})
winter_option = select.find('option', value=enums.Program.WINTER_NON_IAP.value)
self.assertIn('selected', winter_option.attrs)
programs = [opt['value'] for opt in select.find_all('option')]
self.assertNotIn(enums.Program.WINTER_SCHOOL.value, programs)
def test_creation(self):
"""End-to-end test of form submission on creating a new trip.
This is something of an integration test. Dealing with forms
in this way is a bit of a hassle, but this ensures that we're handling
everything properly.
More specific behavior testing should be done at the form level.
"""
user = factories.UserFactory.create()
self.client.force_login(user)
trip_leader = factories.ParticipantFactory.create(user=user)
factories.LeaderRatingFactory.create(
participant=trip_leader, activity=models.LeaderRating.BIKING
)
_resp, soup = self._get('/trips/create/')
form = soup.find('form')
form_data = dict(self._form_data(form))
# We have the selections pre-populated too.
self.assertEqual(form_data['program'], enums.Program.BIKING.value)
self.assertEqual(form_data['algorithm'], 'lottery')
# Fill in the form with some blank, but required values (accept the other defaults)
form_data.update(
{
'name': 'My Great Trip',
'trip_type': enums.TripType.MOUNTAIN_BIKING.value,
'difficulty_rating': 'Intermediate',
'description': "Let's go hiking!",
}
)
self.assertEqual(form['action'], '.')
# Upon form submission, we're redirected to the new trip's page!
resp = self.client.post('/trips/create/', form_data, follow=False)
self.assertEqual(resp.status_code, 302)
new_trip_url = re.compile(r'^/trips/(\d+)/$')
self.assertRegex(resp.url, new_trip_url)
match = new_trip_url.match(resp.url)
assert match is not None
trip_pk = int(match.group(1))
trip = models.Trip.objects.get(pk=trip_pk)
self.assertEqual(trip.creator, trip_leader)
self.assertEqual(trip.name, 'My Great Trip')
class EditTripViewTest(TestCase, Helpers):
def test_superusers_may_edit_trip_without_required_activity(self):
admin = factories.UserFactory.create(is_superuser=True)
self.client.force_login(admin)
trip = factories.TripFactory.create(program=enums.Program.SERVICE.value)
self.assertIsNone(trip.required_activity_enum())
_edit_resp, soup = self._get(f'/trips/{trip.pk}/edit/')
self.assertTrue(soup.find('form'))
def test_leaders_cannot_edit_other_leaders_trip(self):
leader = factories.ParticipantFactory.create()
factories.LeaderRatingFactory.create(
participant=leader, activity=models.LeaderRating.CLIMBING
)
self.client.force_login(leader.user)
trip = factories.TripFactory.create(
name="Rad Trip", program=enums.Program.CLIMBING.value
)
_edit_resp, soup = self._get(f'/trips/{trip.pk}/edit/')
self.assertTrue(soup.find('h2', text='Must be a leader to administrate trip'))
self.assertFalse(soup.find('form'))
@freeze_time("2022-01-15 12:25:00 EST")
def test_editing_non_ws_trip_during_iap(self):
"""Existing trips, which are not WS, don't have the program enum changed.
This test ensures that we don't accidentally change an existing trip's program,
due to logic which is meant to default a *new* trip to being WS during IAP.
"""
leader = factories.ParticipantFactory.create()
self.client.force_login(leader.user)
factories.LeaderRatingFactory.create(
participant=leader, activity=models.LeaderRating.WINTER_SCHOOL
)
trip = factories.TripFactory.create(
creator=leader, program=enums.Program.NONE.value
)
trip.leaders.add(leader)
_edit_resp, soup = self._get(f'/trips/{trip.pk}/edit/')
select = soup.find('select', attrs={'name': 'program'})
# WS is given as an *option* for this trip, but not selected
self.assertIn('Winter School', [opt.text for opt in select.find_all('option')])
# The existing trip's program is selected
ws_option = select.find('option', value=enums.Program.NONE.value)
self.assertIn('selected', ws_option.attrs)
def test_editing(self):
user = factories.UserFactory.create(email='leader@example.com')
self.client.force_login(user)
trip_creator = factories.ParticipantFactory.create(user=user)
factories.LeaderRatingFactory.create(
participant=trip_creator, activity=models.LeaderRating.WINTER_SCHOOL
)
trip = factories.TripFactory.create(
creator=trip_creator, program=enums.Program.WINTER_SCHOOL.value
)
trip.leaders.add(trip_creator)
# Add an old leader to this trip, to demonstrate that editing & submitting is allowed
old_leader = factories.ParticipantFactory.create()
factories.LeaderRatingFactory.create(
participant=old_leader,
activity=models.LeaderRating.WINTER_SCHOOL,
active=False,
)
trip.leaders.add(old_leader)
_edit_resp, soup = self._get(f'/trips/{trip.pk}/edit/')
form = soup.find('form')
form_data = dict(self._form_data(form))
# We supply the two leaders via an Angular directive
# (Angular will be used to populate the `leaders` input, so manually populate here)
self.assertEqual(
soup.find('leader-select')['leader-ids'],
f'[{trip_creator.pk}, {old_leader.pk}]',
)
form_data['leaders'] = [trip_creator.pk, old_leader.pk]
# We have the selections pre-populated with existing data
self.assertEqual(form_data['program'], enums.Program.WINTER_SCHOOL.value)
self.assertEqual(form_data['algorithm'], 'lottery')
# Make some updates to the trip!
form_data.update({'name': 'An old WS trip'})
self.assertEqual(form['action'], '.')
# Upon form submission, we're redirected to the new trip's page!
resp = self.client.post(f'/trips/{trip.pk}/edit/', form_data, follow=False)
self.assertEqual(resp.status_code, 302)
trip = models.Trip.objects.get(pk=trip.pk)
self.assertEqual(trip.creator, trip_creator)
self.assertCountEqual(trip.leaders.all(), [old_leader, trip_creator])
self.assertEqual(trip.name, 'An old WS trip')
# To support any legacy behavior still around, we set activity.
self.assertEqual(trip.activity, 'winter_school')
@freeze_time("2019-02-15 12:25:00 EST")
def test_update_rescinds_approval(self):
leader = factories.ParticipantFactory.create()
self.client.force_login(leader.user)
factories.LeaderRatingFactory.create(
participant=leader, activity=enums.Activity.CLIMBING.value
)
trip = factories.TripFactory.create(
creator=leader,
program=enums.Program.CLIMBING.value,
trip_date=date(2019, 3, 2),
chair_approved=True,
)
edit_resp, soup = self._get(f'/trips/{trip.pk}/edit/')
self.assertTrue(edit_resp.context['update_rescinds_approval'])
form = soup.find('form')
form_data = dict(self._form_data(form))
self.assertEqual(
strip_whitespace(soup.find(class_='alert-warning').text),
'This trip has been approved by the activity chair. '
'Making any changes will rescind this approval.',
)
# Upon form submission, we're redirected to the new trip's page!
resp = self.client.post(f'/trips/{trip.pk}/edit/', form_data, follow=False)
self.assertEqual(resp.status_code, 302)
self.assertEqual(resp.url, f'/trips/{trip.pk}/')
# We can see that chair approval is now removed.
trip = models.Trip.objects.get(pk=trip.pk)
self.assertFalse(trip.chair_approved)
class ApproveTripsViewTest(TestCase):
def setUp(self):
self.user = factories.UserFactory.create()
self.client.force_login(self.user)
@staticmethod
def _make_climbing_trip(chair_approved=False, **kwargs):
return factories.TripFactory.create(
program=enums.Program.CLIMBING.value,
activity=enums.Activity.CLIMBING.value,
chair_approved=chair_approved,
**kwargs,
)
def test_unauthenticated(self):
self.client.logout()
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/accounts/login/?next=/climbing/trips/')
def test_not_an_activity_chair(self):
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 403)
def test_bad_activity(self):
response = self.client.get('/snowmobiling/trips/')
self.assertEqual(response.status_code, 404)
def test_no_trips_found(self):
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['trips_needing_approval'], [])
self.assertIsNone(response.context['first_unapproved_trip'])
def test_all_trips_approved(self):
self._make_climbing_trip(chair_approved=True)
self._make_climbing_trip(chair_approved=True)
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['trips_needing_approval'], [])
self.assertIsNone(response.context['first_unapproved_trip'])
def test_level_column(self):
"""The "level" column only appears for activity chairs."""
self._make_climbing_trip(chair_approved=True)
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
soup = BeautifulSoup(self.client.get('/climbing/trips/').content, 'html.parser')
self.assertFalse(soup.find('th', text='Level'))
perm_utils.make_chair(self.user, enums.Activity.WINTER_SCHOOL)
factories.TripFactory.create(program=enums.Program.WINTER_SCHOOL.value)
ws_soup = BeautifulSoup(
self.client.get('/winter_school/trips/').content, 'html.parser'
)
self.assertTrue(ws_soup.find('th', text='Level'))
def test_chair(self):
self._make_climbing_trip(chair_approved=True)
unapproved = factories.TripFactory.create(
program=enums.Program.SCHOOL_OF_ROCK.value,
activity=enums.Activity.CLIMBING.value,
chair_approved=False,
)
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['first_unapproved_trip'], unapproved)
@freeze_time("2019-07-05 12:25:00 EST")
def test_past_unapproved_trips_ignored(self):
"""We only prompt chairs to look at trips which are upcoming & unapproved."""
# Unapproved, but it's in the past!
self._make_climbing_trip(trip_date=date(2019, 7, 4))
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
response = self.client.get('/climbing/trips/')
self.assertEqual(response.status_code, 200)
self.assertIsNone(response.context['first_unapproved_trip'])
# Make some future trips now - these trips will be ranked by date/itinerary!
fri = self._make_climbing_trip(trip_date=date(2019, 7, 5))
sun = self._make_climbing_trip(trip_date=date(2019, 7, 7))
sat = self._make_climbing_trip(trip_date=date(2019, 7, 6))
context = self.client.get('/climbing/trips/').context
self.assertEqual(context['trips_needing_approval'], [fri, sat, sun])
self.assertEqual(context['first_unapproved_trip'], fri)
@freeze_time("2019-07-05 12:25:00 EST")
def test_trips_with_itinerary_first(self):
"""Trips that have an itinerary are first in the approval flow."""
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
sat_with_info = self._make_climbing_trip(
trip_date=date(2019, 7, 6),
info=factories.TripInfoFactory.create(),
)
sat_without_info = self._make_climbing_trip(
trip_date=date(2019, 7, 6), info=None
)
sun_with_info = self._make_climbing_trip(
trip_date=date(2019, 7, 7),
info=factories.TripInfoFactory.create(),
)
sun_without_info = self._make_climbing_trip(
trip_date=date(2019, 7, 7), info=None
)
context = self.client.get('/climbing/trips/').context
self.assertEqual(
context['trips_needing_approval'],
[sat_with_info, sat_without_info, sun_with_info, sun_without_info],
)
self.assertEqual(context['first_unapproved_trip'], sat_with_info)
@freeze_time("2019-07-05 12:25:00 EST")
def test_trips_needing_itinerary(self):
perm_utils.make_chair(self.user, enums.Activity.CLIMBING)
sat_trip = self._make_climbing_trip(trip_date=date(2019, 7, 6))
sun_trip = self._make_climbing_trip(trip_date=date(2019, 7, 7))
sun_trip_info = self._make_climbing_trip(
trip_date=date(2019, 7, 7), info=factories.TripInfoFactory.create()
)
dean = factories.ParticipantFactory.create(
name="Dean Potter", email="dean@example.com"
)
sun_trip.leaders.add(dean)
# Leaders with multiple trips aren't repeated
lynn = factories.ParticipantFactory.create(
name="Lynn Hill", email="lynn@example.com"
)
sat_trip.leaders.add(lynn)
sun_trip.leaders.add(lynn)
# This trip is a week away; itineraries aren't open yet
next_sat_trip = self._make_climbing_trip(trip_date=date(2019, 7, 13))
# Alex has no trips that need itinerary
alex = factories.ParticipantFactory.create(
name="Alex Puccio", email="alex@example.com"
)
sun_trip_info.leaders.add(alex)
next_sat_trip.leaders.add(alex)
context = self.client.get('/climbing/trips/').context
# Leaders are sorted by name
self.assertEqual(
context['leader_emails_missing_itinerary'],
'"Dean Potter" <dean@example.com>, "Lynn Hill" <lynn@example.com>',
)
|
DavidCain/mitoc-trips
|
ws/tests/views/test_trips.py
|
Python
|
gpl-3.0
| 23,912
|
from Sire.IO import *
from Sire.Mol import *
from Sire.CAS import *
from Sire.System import *
from Sire.Move import *
from Sire.MM import *
from Sire.FF import *
from Sire.Units import *
from Sire.Vol import *
from Sire.Maths import *
from Sire.Base import *
from Sire.Qt import *
import os,re,sys
import shutil
combining_rules = "arithmetic"
temperature = 25 * celsius
pressure = 1 * atm
coulomb_cutoff = 1000 * angstrom
coulomb_feather = 999.5 * angstrom
lj_cutoff = 1000 * angstrom
lj_feather = 999.5 * angstrom
#############################################################
top_file = "test/io/SYSTEM.top"
crd_file = "test/io/SYSTEM.crd"
print("Solvated protein/ligand complex: Loading a top file...")
timer = QTime()
timer.start()
amber = Amber()
molecules, space = amber.readCrdTop(crd_file, top_file)
ms = timer.elapsed()
print("...took %d s" % (ms/1000.))
# Overload, we want to calc the energy in a non periodic box for comparison with Sander
space = Cartesian()
moleculeNumbers = molecules.molNums()
moleculeList = []
for moleculeNumber in moleculeNumbers:
molecule = molecules.molecule(moleculeNumber).molecule()
moleculeList.append(molecule)
system = System()
solute = MoleculeGroup("solute", moleculeList[0])
protein = MoleculeGroup("protein", moleculeList[1])
solvent = MoleculeGroup("solvent")
for molecule in moleculeList[2:]:
solvent.add(molecule)
all = MoleculeGroup("all")
all.add(solute)
all.add(solvent)
all.add(protein)
# Add these groups to the System
system.add(solute)
system.add(solvent)
system.add(protein)
system.add(all)
# Now create all of the forcefields
# - first solvent-solvent coulomb/LJ (CLJ) energy
solventff = InterCLJFF("solvent:solvent")
solventff.add(solvent)
# Now solute bond, angle, dihedral energy
solute_intraff = InternalFF("solute_intraff")
solute_intraff.add(solute)
# Now solute intramolecular CLJ energy
solute_intraclj = IntraCLJFF("solute_intraclj")
solute_intraclj.add(solute)
# The protein bond, angle, dihedral energy
protein_intraff = InternalFF("protein_intraff")
protein_intraff.add(protein)
# The protein intramolecular CLJ energy
protein_intraclj = IntraCLJFF("protein_intraclj")
protein_intraclj.add(protein)
# Now the solute-solvent CLJ energy
solute_solventff = InterGroupCLJFF("solute:solvent")
solute_solventff.add(solute, MGIdx(0))
solute_solventff.add(solvent, MGIdx(1))
# Now the solute-protein CLJ energy
solute_proteinff = InterGroupCLJFF("solute:protein")
solute_proteinff.add(solute, MGIdx(0))
solute_proteinff.add(protein, MGIdx(1))
# The protein-solvent energy
protein_solventff = InterGroupCLJFF("protein:solvent")
protein_solventff.add(protein, MGIdx(0))
protein_solventff.add(solvent, MGIdx(1))
# Here is the list of all forcefields
forcefields = [ solute_intraff, solute_intraclj,
solventff, solute_solventff,
protein_intraff, protein_intraclj,
solute_proteinff, protein_solventff ]
# Add these forcefields to the system
for forcefield in forcefields:
system.add(forcefield)
system.setProperty( "space", space )
system.setProperty( "switchingFunction",
HarmonicSwitchingFunction(coulomb_cutoff, coulomb_feather,
lj_cutoff, lj_feather) )
system.setProperty( "combiningRules", VariantProperty(combining_rules) )
total_nrg = solute_intraclj.components().total() + solute_intraff.components().total() +\
solventff.components().total() + solute_solventff.components().total() +\
protein_intraclj.components().total() + protein_intraff.components().total() + \
solute_proteinff.components().total() + protein_solventff.components().total()
e_total = system.totalComponent()
system.setComponent( e_total, total_nrg )
# Add a space wrapper that wraps all molecules into the box centered at (0,0,0)
#system.add( SpaceWrapper(Vector(0,0,0), all) )
print("\nTotal energy ")
print(system.energy())
print("Components energies ")
for component in list(system.energyComponents().keys()):
print(component, system.energyComponents().value(component) * kcal_per_mol)
# Note that tip3p water are likely to have bonds between hydrogen atoms.
PDB().write(all, "out.pdb")
print("The AMBER11/sander energies for this system are ")
print("""
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = -47010.2216 EKtot = 0.0000 EPtot = -47010.2216
# BOND = 898.1982 ANGLE = 5310.2620 DIHED = 2922.5644
# 1-4 NB = 790.8755 1-4 EEL = 7702.0145 VDWAALS = 7345.0484
# EELEC = -71979.1846 EHBOND = 0.0000 RESTRAINT = 0.0000
# EKCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 1856243.3813
""")
print("The total energies differ by %12.8f kcal/mol" % ( system.energy().value() - -47010.2216 ))
print("""Some difference is expected in the coulombic energies due to the
use of different constants by Sander ( %12.8f ) and Sire ( %12.8f ).
The other terms should be in excellent agreement. """ % (18.2223*18.2223,
one_over_four_pi_eps0))
#############################################################
#top_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/LIG_SOLV.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/LIG_SOLV.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/methanol/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/methanol/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/ethane/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/ethane/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/ethane-edit/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/ethane-edit/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/benzene/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/benzene/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/benzene-distorted/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/benzene-distorted/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/pentane/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/pentane/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/dimedone/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/dimedone/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/cyclopropane/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/cyclopropane/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/cyclohexane/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/cyclohexane/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/cyclohexane-edit/LIG_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/cyclohexane-edit/LIG_GAS.crd"
#top_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/COMPLEX_GAS.top"
#crd_file = "/home/julien/projects/cyps/sire/setup/leap/LI8/COMPLEX_GAS.crd"
#
# SOLUTE solvated, non periodic
#
#Etot = -4270.5482 EKtot = 0.0000 EPtot = -4270.5482
# BOND = 246.3819 ANGLE = 39.9317 DIHED = 18.2875
#1-4 NB = 17.8213 1-4 EEL = -100.7708 VDWAALS = 1484.6639
# EELEC = -5976.8636 EHBOND = 0.0000 RESTRAINT = 0.0000
#KCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 704972.9702
# SOLUTE in gas
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 1035.2593 EKtot = 0.0000 EPtot = 1035.2593
# BOND = 246.3483 ANGLE = 39.9317 DIHED = 18.2875
# 1-4 NB = 17.8213 1-4 EEL = -100.7804 VDWAALS = 800.6806
# EELEC = 12.9704 EHBOND = 0.0000 RESTRAINT = 0.0000
# EKCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 291200.3229
# Density = 0.0012
# Methanol gas phase
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 4.5332 EKtot = 0.0000 EPtot = 4.5332
# BOND = 0.1647 ANGLE = 0.3909 DIHED = 0.1363
# 1-4 NB = 0.0000 1-4 EEL = 3.8414 VDWAALS = 0.0000
# EELEC = 0.0000 EHBOND = 0.0000 RESTRAINT = 0.0000
# EKCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 239214.0118
# Density = 0.0002
# Ethane
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 1.8211 EKtot = 0.0000 EPtot = 1.8211
# BOND = 0.0352 ANGLE = 0.3727 DIHED = 0.3701
# 1-4 NB = 0.1184 1-4 EEL = 0.9247 VDWAALS = 0.0000
# EELEC = 0.0000 EHBOND = 0.0000 RESTRAINT = 0.0000
# EKCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 248073.6160
# Density = 0.0002
# Benzene
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 6.9367 EKtot = 0.0000 EPtot = 6.9367
# BOND = 1.4488 ANGLE = 0.0003 DIHED = 0.0000
# 1-4 NB = 3.3090 1-4 EEL = -0.1645 VDWAALS = -0.1587
# EELEC = 2.5019 EHBOND = 0.0000 RESTRAINT = 0.0000
# EKCMT = 0.0000 VIRIAL = 0.0000 VOLUME = 251003.3702
# Density = 0.0005
# ------------------------------------------------------------------------------
# benzene-distorted
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 43.3166 EKtot = 0.0000 EPtot = 43.3166
# BOND = 12.1533 ANGLE = 5.1175 DIHED = 20.4993
# 1-4 NB = 3.3749 1-4 EEL = -0.1636 VDWAALS = -0.1592
# EELEC = 2.4944 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# Pentane
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 6.2335 EKtot = 0.0000 EPtot = 6.2335
# BOND = 3.0295 ANGLE = 0.2986 DIHED = 0.8000
# 1-4 NB = 1.5475 1-4 EEL = -0.3847 VDWAALS = -0.1756
# EELEC = 1.1182 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# DIMEDONE
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = -57.1783 EKtot = 0.0000 EPtot = -57.1783
# BOND = 1.0758 ANGLE = 1.8962 DIHED = 10.5221
# 1-4 NB = 4.1964 1-4 EEL = -150.4665 VDWAALS = -1.4352
# EELEC = 77.0329 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# Cyclopropane
#
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 15.1811 EKtot = 0.0000 EPtot = 15.1811
# BOND = 1.8459 ANGLE = 1.4451 DIHED = 6.2051
# 1-4 NB = 0.0893 1-4 EEL = 5.5957 VDWAALS = 0.0000
# EELEC = 0.0000 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# Cyclohexane
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 8.2912 EKtot = 0.0000 EPtot = 8.2912
# BOND = 2.5535 ANGLE = 0.2117 DIHED = 2.7506
# 1-4 NB = 2.7541 1-4 EEL = -0.8982 VDWAALS = -0.5223
# EELEC = 1.4417 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# LI8 + PROTEIN + 168 water HOH
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = 6474.5265 EKtot = 0.0000 EPtot = 6474.5265
# BOND = 897.8876 ANGLE = 5310.2620 DIHED = 2922.5644
# 1-4 NB = 790.8755 1-4 EEL = 7702.0144 VDWAALS = 859.0611
# EELEC = -12008.1385 EHBOND = 0.0000 RESTRAINT = 0.0000
#
# Li8 + PROTEIN + WATERBOX
# NSTEP = 0 TIME(PS) = 0.000 TEMP(K) = 0.00 PRESS = 0.0
# Etot = -47010.2216 EKtot = 0.0000 EPtot = -47010.2216
# BOND = 898.1982 ANGLE = 5310.2620 DIHED = 2922.5644
# 1-4 NB = 790.8755 1-4 EEL = 7702.0145 VDWAALS = 7345.0484
# EELEC = -71979.1846 EHBOND = 0.0000 RESTRAINT = 0.0000
|
chryswoods/SireTests
|
unittests/SireIO/amber.py
|
Python
|
gpl-2.0
| 13,125
|
#-*- coding: utf-8 -*-
import os
from clang.cindex import Config, Index, TypeKind
class ClangExtractor(object):
def __init__(self, libclang_path, srcdir):
if Config.library_file != libclang_path:
Config.set_library_file(libclang_path)
self.srcdir = srcdir
def extract(self):
protos = dict()
for dirpath, dirnames, filenames in os.walk(self.srcdir):
for fname in filenames:
fpath = dirpath + "/" + fname
fext = fname.split(".")[-1]
if fext == "c" or fext == "h":
index = Index.create()
tu = index.parse(fpath)
self.__clang_find_protos(tu.cursor, protos)
return protos
def __clang_find_protos(self, node, protos):
if (node.type.kind == TypeKind.FUNCTIONPROTO): # or node.type.kind == TypeKind.FUNCTIONNOPROTO):
if node.spelling not in protos.keys():
protos[node.spelling] = list()
if len(protos[node.spelling]) == 0:
if (node.result_type.spelling == "Lisp_Object"):
protos[node.spelling].append("void *")
else:
protos[node.spelling].append(node.result_type.get_canonical().spelling)
for c in node.get_arguments():
if (c.type.spelling == "Lisp_Object"):
protos[node.spelling].append("void *")
else:
protos[node.spelling].append(c.type.get_canonical().spelling)
if node.type.is_function_variadic():
protos[node.spelling].append("...")
for c in node.get_children():
self.__clang_find_protos(c, protos)
|
Frky/scat
|
src/shell/data/clangextractor.py
|
Python
|
mit
| 1,769
|
import sys
import pymol
import os
import time
pdb1 = sys.argv[1]
pdb2 = sys.argv[2]
# load pdb files
cmd.load(pdb1,"native")
cmd.load(pdb2,"other")
# full path to first pdb file
full_path = os.path.abspath(pdb1)
dir_path = os.path.dirname(full_path)
# parse residue list
reslist = []
sitem = sys.argv[3].split(",")
reslist.extend(sitem[:])
pymol.finish_launching()
# hide initial pdb structures
cmd.do('hide all')
# fit the two files
cmd.do('super native,other')
# set res 1 as center and origin
cmd.do("center native and resi 1")
cmd.do("disable native")
cmd.do("disable other")
cmd.do("cd %s" % dir_path)
for res in reslist:
# create combined object from native and other
cmd.do("create %s,native,0,1" % res)
cmd.do("create %s,other,0,2" % res)
# coloring
cmd.do('util.cbap %s' % res)
# enable all states
# select 5AA area around specified residue and draw lines
cmd.do("select %s & (br. all within 5 of (%s and resi %s))" % (res,res,res))
cmd.do("intra_fit sele")
cmd.do("show lines, sele")
# draw sticks for residue
cmd.do("select %s and resi %s" %(res,res))
cmd.do("show sticks, sele")
# center
cmd.do('set all_states, 1')
# make sure that all the residue pairs are centered
cmd.do('pair_fit %s and resi %s and n. ca, native and resi 1 and n. ca' % (res,res))
cmd.center("%s and resi %s" %(res,res))
if len(sys.argv) > 4 and sys.argv[4]:
pymol.cmd.png("%s_%d" % (res,0),width=1200,dpi=1200,quiet=1,ray=1)
pymol.cmd.rotate("x",90,"%s" % res,camera=0)
pymol.cmd.png("%s_%d" % (res,1),width=1200,dpi=1200,quiet=1,ray=1)
pymol.cmd.rotate("y",90,"%s" % res,camera=0)
pymol.cmd.png("%s_%d" % (res,2),width=1200,dpi=1200,quiet=1,ray=1)
cmd.disable('%s' %res)
cmd.do("delete sele")
|
larsbratholm/pymol_residue_compare
|
pymol_residue_compare.py
|
Python
|
mit
| 1,807
|
from django.conf.urls import url
from eventex.subscriptions.views import new, detail, json_example
urlpatterns = [
url(r'^$', new, name='new'),
url(r'^json$', json_example, name='json_example'),
url(r'^(?P<pk>\d+)/$', detail, name='detail'),
]
|
renzon/wttd
|
eventex/subscriptions/urls.py
|
Python
|
agpl-3.0
| 258
|
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
CONF = cfg.CONF
opt = cfg.StrOpt('baa')
CONF.register_opt(opt, group='fbaar')
|
varunarya10/oslo.config
|
tests/testmods/fbaar_baa_opt.py
|
Python
|
apache-2.0
| 715
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import files
import graphs
def main(argv):
n, m, edges = files.read_graph(argv[0])
nodes = [n for n in xrange(1, n + 1)]
degree = graphs.degree_table(edges, directed = False)
print ' '.join(str(degree[node]) for node in nodes)
if __name__ == "__main__":
main(sys.argv[1:])
|
cowboysmall/rosalind
|
src/heights/rosalind_deg.py
|
Python
|
mit
| 400
|
from yowsup_ext.layers.store import db
import peewee
import datetime
class Broadcast(db.get_base_model()):
wId = peewee.CharField() #Can't remember so far what that was for, but it's in openwa so maybe important
created = peewee.DateTimeField(default=datetime.datetime.now())
|
vaginessa/yowlayer-store
|
yowsup_ext/layers/store/models/broadcast.py
|
Python
|
gpl-3.0
| 284
|
def is_hiragana(ch):
return 0x3040 <= ord(ch) <= 0x309F
def is_katakana(ch):
return 0x30A0 <= ord(ch) <= 0x30FF
def get_character_type(ch):
if ch.isspace():
return 'ZSPACE'
elif ch.isdigit():
return 'ZDIGIT'
elif ch.islower():
return 'ZLLET'
elif ch.isupper():
return 'ZULET'
elif is_hiragana(ch):
return 'HIRAG'
elif is_katakana(ch):
return 'KATAK'
else:
return 'OTHER'
def get_character_types(string):
character_types = map(get_character_type, string)
character_types_str = '-'.join(sorted(set(character_types)))
return character_types_str
def extract_pos_with_subtype(morph):
idx = morph.index('*')
return '-'.join(morph[1:idx])
def word2features(sent, i):
word = sent[i][0]
chtype = get_character_types(sent[i][0])
postag = extract_pos_with_subtype(sent[i])
features = [
'bias',
'word=' + word,
'type=' + chtype,
'postag=' + postag,
]
if i >= 2:
word2 = sent[i-2][0]
chtype2 = get_character_types(sent[i-2][0])
postag2 = extract_pos_with_subtype(sent[i-2])
iobtag2 = sent[i-2][-1]
features.extend([
'-2:word=' + word2,
'-2:type=' + chtype2,
'-2:postag=' + postag2,
'-2:iobtag=' + iobtag2,
])
else:
features.append('BOS')
if i >= 1:
word1 = sent[i-1][0]
chtype1 = get_character_types(sent[i-1][0])
postag1 = extract_pos_with_subtype(sent[i-1])
iobtag1 = sent[i-1][-1]
features.extend([
'-1:word=' + word1,
'-1:type=' + chtype1,
'-1:postag=' + postag1,
'-1:iobtag=' + iobtag1,
])
else:
features.append('BOS')
if i < len(sent)-1:
word1 = sent[i+1][0]
chtype1 = get_character_types(sent[i+1][0])
postag1 = extract_pos_with_subtype(sent[i+1])
features.extend([
'+1:word=' + word1,
'+1:type=' + chtype1,
'+1:postag=' + postag1,
])
else:
features.append('EOS')
if i < len(sent)-2:
word2 = sent[i+2][0]
chtype2 = get_character_types(sent[i+2][0])
postag2 = extract_pos_with_subtype(sent[i+2])
features.extend([
'+2:word=' + word2,
'+2:type=' + chtype2,
'+2:postag=' + postag2,
])
else:
features.append('EOS')
return features
def sent2features(sent):
return [word2features(sent, i) for i in range(len(sent))]
def sent2labels(sent):
return [morph[-1] for morph in sent]
def sent2tokens(sent):
return [morph[0] for morph in sent]
def word2features_(word):
pass
def sent2features_(sent):
return [word2features_(word) for word in sent]
def get_words(sents):
res = []
for sent in sents:
words = []
for word in sent:
if word[1] == '名詞':
surface = word[0]
words.append(surface)
res.append(words)
return res
from gensim import corpora, matutils
def create_dictionary(words):
dic = corpora.Dictionary(words)
return dic
def create_bow(dictionary, words):
vec = dictionary.doc2bow(words)
return vec
def to_features(dictionary, words):
tmp = dictionary.doc2bow(words)
dense = list(matutils.corpus2dense([tmp], num_terms=len(dictionary)).T[0])
return dense
|
Hironsan/HotPepperGourmetDialogue
|
modules/LanguageUnderstanding/utils/utils.py
|
Python
|
mit
| 3,481
|
from core.environment import logger
import datasource
# exception pass-through
from datasource import NoSuchTable, DuplicateEntry
from core.utils import *
import re
import copy
_db = None
def getDB():
global _db
if not _db:
_db = datasource.MySQLDataSource(logger=logger)
return _db
def setDB(db):
global _db
_db = db
def getNodes(nodefilter=None,nodelist=None,domainfilter=None,domainlist=None,
contactfilter=None,contactlist=[],orderby=None,invert=False,invertNodelist=False,fields=None,countOnly=False):
where = []
if nodefilter:
where.append( 'node_name like %s' % getDB().escapeFilter(nodefilter) )
if nodelist != None:
where.append( 'node_name in %s' % getDB().escapeList(nodelist) )
if invertNodelist:
where[-1] = 'not ' + where[-1]
if domainfilter:
where.append( 'domain_name like %s' % getDB().escapeFilter(domainfilter) )
if domainlist != None:
where.append( 'domain_name in %s' % getDB().escapeList(domainlist) )
if contactfilter:
where.append( 'contact like %s' % getDB().escapeFilter(contactfilter) )
if contactlist:
where.append( 'contact in %s' % getDB().escapeList(contactlist) )
if countOnly:
orderby = None
fields = {
'count':'count(1)'
}
return __getData('nodes_mv',where=where,orderby=orderby,invert=invert,fields=fields)[0].count
return __getData('nodes_mv',where=where,orderby=orderby,invert=invert,fields=fields)
def getVolumes(): return __getData('volumes')
def getDomains(domainfilter=None,domainlist=[],orderby=None,countOnly=False):
where = []
if domainfilter:
where.append( 'domain_name like %s' % getDB().escapeFilter(domainfilter) )
if domainlist:
where.append( 'domain_name in %s' % getDB().escapeList(domainlist) )
if countOnly:
orderby = None
fields = {
'count':'count(1)'
}
return __getData('domains_mv',where=where,orderby=orderby,fields=fields)[0].count
return __getData('domains_mv',where=where,orderby=orderby)
def getSummary(end_start_time=None,start_time=None,start_end_time=None,
end_time=None,entityfilter=None,entitylist=None,activity=None,schedulefilter=None,
minDuration=0,orderby='start_time',limit=None,countOnly=False):
where = []
if end_start_time:
where.append( 'end_time >= %s'% getDB().escape(str(end_start_time)) )
if start_time:
where.append( 'start_time >= %s'% getDB().escape(str(start_time)) )
if start_end_time:
where.append( 'start_time <= %s'% getDB().escape(str(start_end_time)) )
if end_time:
where.append( 'end_time <= %s'% getDB().escape(str(end_time)) )
if entityfilter:
where.append( 'entity like %s '% getDB().escapeFilter(entityfilter) )
if entitylist != None:
where.append( 'entity in %s '% getDB().escapeList(entitylist) )
if activity:
where.append( 'activity = %s '% getDB().escape(activity) )
if schedulefilter:
where.append( 'schedule_name like %s '% getDB().escapeFilter(schedulefilter) )
if minDuration and not countOnly:
where.append( 'unix_timestamp(end_time)-unix_timestamp(start_time) >= %d '% minDuration )
if limit and not countOnly:
orderby += ' limit %d' % limit
fields = None
groupby = None
if countOnly:
orderby = None
groupby = """date_format(start_time, '%Y-%m-%d'), date_format(end_time, '%Y-%m-%d')"""
fields = {
'count_all':'count(1)',
'count_duration':'sum(if(unix_timestamp(end_time)-unix_timestamp(start_time) >= %d,1,0))'% minDuration,
'start_time':'min(start_time)',
'end_time':'max(end_time)',
}
return __getData('summary',where=where,orderby=orderby,groupby=groupby,fields=fields)
def getSummaryStatistics(end_start_time=None,start_time=None,start_end_time=None,
end_time=None,entityfilter=None,entitylist=None,activity=None,schedulefilter=None,
orderby=None,groupby='entity'):
where = []
if end_start_time:
where.append( 'end_time >= %s'% getDB().escape(str(end_start_time)) )
if start_time:
where.append( 'start_time >= %s'% getDB().escape(str(start_time)) )
if start_end_time:
where.append( 'start_time <= %s'% getDB().escape(str(start_end_time)) )
if end_time:
where.append( 'end_time <= %s'% getDB().escape(str(end_time)) )
if entityfilter:
where.append( 'entity like %s '% getDB().escapeFilter(entityfilter) )
if entitylist != None:
where.append( 'entity in %s '% getDB().escapeList(entitylist) )
if activity:
where.append( 'activity = %s '% getDB().escape(activity) )
if schedulefilter != None:
where.append( 'ifnull(schedule_name,"") like %s '% getDB().escapeFilter(schedulefilter) )
fields = {
'entity' : 'entity',
'successful' : 'sum(if(successful = "YES",1,0))',
'unsuccessful' : 'sum(if(successful = "NO",1,0))',
'avg_transfer' : 'avg(bytes)',
'max_transfer' : 'max(bytes)',
'sum_transfer' : 'sum(bytes)',
'avg_duration' : 'sec_to_time(avg(unix_timestamp(end_time)-unix_timestamp(start_time)))',
'max_duration' : 'sec_to_time(max(unix_timestamp(end_time)-unix_timestamp(start_time)))',
'sum_duration' : 'sec_to_time(sum(unix_timestamp(end_time)-unix_timestamp(start_time)))',
'avg_examined' : 'avg(examined)',
'max_examined' : 'max(examined)',
'sum_examined' : 'sum(examined)',
'avg_affected' : 'avg(affected)',
'max_affected' : 'max(affected)',
'sum_affected' : 'sum(affected)',
'avg_failed' : 'avg(failed)',
'max_failed' : 'max(failed)',
'sum_failed' : 'sum(failed)',
'avg_successp' : 'avg(affected/(failed+affected))*100',
'max_successp' : '(1-min(failed/(failed+affected)))*100',
}
return __getData('summary',where=where,orderby=orderby,groupby=groupby,fields=fields)
def getSummaryActivities():
'''Returns distinct list of possible activities'''
return __getData('summary_activities_mv')
def getSnapshots(orderby=None): return __getData('snapshots',orderby=orderby)
def getLocalDB(): return __getData('local_db')
def getFailedFiles(start_date,countOnly=False):
fields = None
if countOnly:
fields = {'count' : 'count(1)'}
return __getData('actlog_failed_files_mv',fields=fields,where="date_time>'%s'"%start_date)[0].count
return __getData('actlog_failed_files_mv',fields=fields,where="date_time>'%s'"%start_date,optimize_performance=True)
def getFilespaces(nodefilter=None,domainfilter=None,domainlist=[],nodelist=[],node_name=None,orderby=None):
if node_name:
return __getData('filespaces_mv', where="node_name='%s'"%node_name,orderby=orderby)
where = []
if nodefilter:
where.append( 'node_name like %s' % getDB().escapeFilter(nodefilter) )
if nodelist:
where.append( 'node_name in %s' % getDB().escapeList(domainlist) )
if domainfilter:
where.append( 'domain_name like %s' % getDB().escapeFilter(domainfilter) )
if domainlist:
where.append( 'domain_name in %s' % getDB().escapeList(domainlist) )
return __getData('filespaces_mv',where=where,orderby=orderby)
def getAlertStoragepools():
return __getData('alert_stgpools_view')
def getStoragepools(stgpoolfilter='*',stgpoollist=[],orderby=None):
where = '''(stgpool_name like %s or stgpool_name in %s)''' % (
getDB().escapeFilter(stgpoolfilter),
getDB().escapeList(stgpoollist))
return __getData('stgpools_mv',where=where,orderby=orderby)
def getActlog(message=None,nodefilter=None,msgno=None,
start=None,end=None,orderby=None,messageCutLength=None,countOnly=False):
where = '1=1 '
if message: where+= " and message like %s" % getDB().escapeFilter( message )
if msgno: where+= " and msgno = %s"% getDB().escapeFilter( msgno )
if nodefilter:
where+= " and ( nodename like %s"% getDB().escapeFilter( nodefilter )
where+= " or (nodename is null and message like %s ) )" % getDB().escapeFilter( '* %s *' % nodefilter )
if start: where+= " and date_time>=%s"% getDB().escape( str(start) )
if end: where+= " and date_time<=%s"% getDB().escape( str(end) )
fields = {}
override = {}
if not countOnly:
orderby += " limit 1000"
if messageCutLength:
override['message'] = '''if(length(message)>%d,concat(left(message,%d),
' ...'),message)''' % (messageCutLength,messageCutLength-4)
else:
orderby=None
fields['count'] = 'count(1)'
actlog = __getData('actlog',where=where,optimize_performance=True,
orderby=orderby,fields=fields,overrideFields=override)
if countOnly: return actlog[0]['count']
return actlog
def getOccupancy(stgpool=None,orderby=None):
where=None
if stgpool:
where='stgpool_name = %s' % getDB().escape(stgpool)
return __getData('occupancy_mv',where=where,orderby=orderby)
def getBackupHistory(start=0):
dbResult = __getData('actlog_backup_history_mv',where="date_time > '%s'"%start,optimize_performance=True)
finalResult = []
for i in dbResult:
# TODO: Refactor this code to collector
expr = ''
if i['msgno'] == 2579:
''' ANR2579E Schedule DAGLEGT_03_05 in domain CUSTOMERS for node EXCHANGE.HUGSANDIMENN.IS failed (return code 12). (SESSION: 68105) '''
expr = r'ANR2579E Schedule .* in domain .* for node (.*) failed.*'
#split = i['message'].split()
#i['nodename'] = split[8]
#i['domainname'] = split[5]
elif i['msgno'] == 2578:
'''ANR2578W Schedule DAGLEGT_22_07 in domain LSH for node VALUESAP.SIMI.IS has missed its scheduled start up window.'''
expr = r'ANR2578. Schedule .* in domain .* for node (.*) has missed its .*'
elif i['msgno'] == 2507:
''' ANR2507I Schedule DAGLEGT_03_05 for domain CUSTOMERS started at 04/03/2007 03:00:00 AM for node VIPER.ALP.IS completed successfully at 04/03/2007 04:05:16 AM. (SESSION: 68102) '''
''' ANR2507I Schedule SQL_LOG_1815 for domain SJOVA started at 04/24/07 18:15:00 for node SJOVA-SLP-DB1.SJOVA.LOCAL_SQL comp... '''
expr = r'^ANR2507I Schedule .* for domain .* started at .* for node (.*) completed .*'
#split = i['message'].split()
#i['nodename'] = split[13]
#i['domainname'] = split[5]
i['nodename'] = re.sub( expr, r'\1', i['message'])
finalResult.append( i )
return finalResult
def getStatus(): return __getData('status', orderby='snap_id desc limit 1')
def getDb(): return __getData('db', orderby='snap_id desc limit 1')
def getLog(): return __getData('log', orderby='snap_id desc limit 1')
def getClientSchedules(domainfilter=None,schedulefilter=None,orderby=None):
where = "1=1 "
if schedulefilter != None:
where += " and schedule_name like %s "% getDB().escapeFilter (schedulefilter)
if domainfilter != None:
where += " and domain_name like %s " % getDB().escapeFilter(domainfilter)
return __getData('client_schedules',where=where,orderby=orderby)
def getClientSchedule(schedule_name):
where="schedule_name='%s'"%schedule_name
return __getData('client_schedules', where=where)
def getAssociations(schedule_name=None,node_name=None,domain_name=None,orderby=None):
where = '1=1 '
if schedule_name:
where += " and schedule_name = %s "% getDB().escape(schedule_name)
if node_name:
where += " and node_name = %s "% getDB().escape(node_name)
if domain_name:
where += " and domain_name = %s " % getDB().escape(domain_name)
return __getData('associations_view',where=where,orderby=orderby)
def getClientopts(node_name=None):
if node_name:
where = "node_name = %s"%getDB().escape(node_name)
else:
where = None
return __getData('clientopts_view',where=where)
def getCopygroups(domain_name=None):
if domain_name:
where = "domain_name = %s"%getDB().escape(domain_name)
else:
where = None
return __getData('bu_copygroups',where=where)
# Single returns
def getNode(node_name): return __getData('nodes_mv', where="node_name=%s"%getDB().escape(node_name))
def getDomain(domain_name): return __getData('domains_mv', where="domain_name=%s"%getDB().escape(domain_name))
def getStoragepool(stgpool_name): return __getData('stgpools_mv', where="stgpool_name=%s"%getDB().escape(stgpool_name))
def getLastSnapshot(): return __getData('last_snap_view')
def getActlogCount(orderby=None,messageCutLength=None):
override = {}
if messageCutLength:
override['newest_message'] = '''if(length(newest_message)>%d,concat(left(newest_message,%d),
' ...'),newest_message)''' % (messageCutLength,messageCutLength-4)
return __getData('actlog_count_mv',orderby=orderby,overrideFields=override)
def getSchedules(start):
fields = 'entity,start_time,successful'
query = '''select %s from summary where activity = 'BACKUP' and start_time > '%s' order by entity'''
print query%(fields,start)
count,res = getDB().query(query%(fields,start))
objects = []
for row in res:
do = DataObject('schedules', fields.split(','), row,optimize_performance=True)
objects.append(do)
return objects
# Here comes the private stuff
def __simpleSelect(tableName, where=None,
orderby=None,groupby=None,fields=None,invert=False):
fieldList = []
fieldOrder = []
for key,value in fields.items():
if value:
if key == value: fieldList.append(key)
else: fieldList.append('%s %s' % (value, key))
fieldOrder.append(key)
query = 'select %s from %s'% (','.join(fieldList), tableName)
if where:
begin = ''
end = ''
if invert:
begin = 'not ('
end = ')'
if type(where) == list:
query += ' where ' + begin + ' and '.join( where ) + end
else:
query += ' where ' + begin + where + end
if groupby != None:
query += ' group by ' + groupby
if orderby != None:
query += ' order by ' + orderby
count,res = getDB().query(query)
return fieldOrder,res
def __getFields(tableName):
count,results = getDB().query('describe %s'%tableName.split()[0])
fields = {}
for row in results:
field,type,null,key,default,extra = row
fields[field] = field
return fields
_fieldCache = {}
def __getData(tableName,where=None,optimize_performance=False,
orderby=None,groupby=None,fields=None,overrideFields={},invert=False):
global _fieldCache
if not fields:
if not _fieldCache.has_key(tableName):
_fieldCache[tableName] = __getFields(tableName)
fields = _fieldCache[tableName]
if overrideFields:
fields = copy.deepcopy(fields)
for key,value in overrideFields.items(): fields[key] = value
(fieldOrder,result) = __simpleSelect(tableName,where=where,
orderby=orderby,groupby=groupby,fields=fields,invert=invert)
rows = []
for values in result:
rows.append( DataObject(tableName,fieldOrder,values, optimize_performance=optimize_performance) )
return rows
# Class definitions
class DataObject(dict):
'''A Generic data container. '''
def __init__(self, className,fields,values,optimize_performance=False):
self.name = className
assert( len(values) == len(fields) )
fields = map(str,fields)
# TODO: optimize this non-sense some how
for i in range(len(fields)):
self[fields[i]] = values[i]
def __getattr__(self,key):
if key != 'has_key' and self.has_key(key): return self.get(key)
return super(DataObject,self).__getattribute__(key)
def __setattr__(self,key,value): self[key] = value
def __getitem__(self, key): return self.get(key)
def __str__(self):
return self.__repr__()
def __repr__(self):
result = self.name + '\n'
keys = self.keys()
keys.sort()
for k in keys:
result += ' %s: %s \n'%(k,self[k])
return result
__hasStatmonACL = None
def hasStatmonACL():
global __hasStatmonACL
if __hasStatmonACL == None:
try:
getDB().query('desc statmon_users')
getDB().query('desc statmon_acl')
__hasStatmonACL = True
except NoSuchTable:
__hasStatmonACL = False
return __hasStatmonACL
def getStatmonUsers(orderby='user'):
return __getData('statmon_users',orderby=orderby)
def getStatmonUserStats(orderby='user',extraCount=None):
fields = {
'user' : 'u.user',
'node_count' : 'count( n.node_name )',
'logical_bytes_arch' : 'sum( logical_bytes_arch )',
'physical_bytes_arch' : 'sum( physical_bytes_arch )',
'num_files_arch' : 'sum( num_files_arch )',
'logical_bytes_bkup' : 'sum( logical_bytes_bkup )',
'physical_bytes_bkup' : 'sum( physical_bytes_bkup )',
'num_files_bkup' : 'sum( num_files_bkup )',
}
if extraCount:
condition = "n.node_name like '%" + "' or n.node_name like '%".join(extraCount) + "'"
fields['node_count'] = 'sum( if(not (%s),1,0) )' % condition
fields['extra_count'] = 'sum( if(%s,1,0) )' % condition
table = """statmon_users u left join statmon_acl a on (u.user=a.user and a.type='node_name')
left join nodes_mv n on (a.entity=n.node_name)"""
return __getData(table,orderby=orderby,where="u.access != 9 and u.access != 1 and u.access != 41",fields=fields,groupby='u.user')
def updateStatmonUser(user,password=None,access=None):
values = (getDB().escape(user),getDB().escape(password),int(access))
query = 'insert into statmon_users ( user, password, access, reg_date ) values ( %s, password(%s), %s, now() )' % values
if password or access:
query += ' on duplicate key update'
comma = ''
if password:
query += ' password=password(%s)' % getDB().escape(password)
comma = ','
if access:
query += comma + ' access=%s' % int(access)
try:
getDB().query(query)
getDB().query('commit')
except DuplicateEntry: pass
def deleteStatmonUser(user):
query = 'delete from statmon_users where user=%s' % getDB().escape(user)
getDB().query(query)
query = 'delete from statmon_acl where user=%s' % getDB().escape(user)
getDB().query(query)
getDB().query('commit')
def getStatmonUser(user,password=None):
where = []
where.append('user = %s' % getDB().escape(user))
if password != None:
where.append('password = password(%s)' % getDB().escape(password))
users = __getData('statmon_users',where=where)
if not users:
return None
assert(len(users)==1)
return users[0]
def validateStatmonUser(user,password=None):
query = [ 'update statmon_users',
'set last_login = now(), login_count = login_count + 1',
'where user = %s' % getDB().escape(user) ]
if password != None:
query.append('and password = password(%s)' % getDB().escape(password))
count, rows = getDB().query('\n'.join(query))
if count:
getDB().query('commit')
return True # loggin successful, commit last_login updates
return False # loggin unsucessful, nothing to commit
def getStatmonACL(user,entityType):
where = 'user=%s and type=%s' % (getDB().escape(user),getDB().escape(entityType))
return map(lambda x: x.entity, __getData('statmon_acl',where=where))
def updateStatmonACL(user,entityType,entityList,replace=True):
if replace:
query = 'delete from statmon_acl where user=%s and type=%s' % (getDB().escape(user),getDB().escape(entityType))
getDB().query(query)
try:
for entity in entityList:
query = 'insert into statmon_acl ( user, type, entity ) values ( %s, %s, %s )'
query = query % ( getDB().escape(user),getDB().escape(entityType), getDB().escape(entity) )
try:
getDB().query(query)
getDB().query('commit')
except DuplicateEntry: pass
except:
getDB().query('rollback')
raise
# TODO: Integrate with the rest of the model
# e.g. 'de-anzafy' cause this could be handy
def getAnzaContacts():
"""Returns a distinct list of all contacts Anza custom report"""
#select contact from nodes where contact is not null group by contact;
where = 'contact is not null group by contact'
fields = {}
fields['count'] = 'count(1)'
fields['contact'] = 'contact'
return __getData('occupancy_anza_node_view',where=where,fields=fields)
|
palli/statmon
|
statmon/model.py
|
Python
|
gpl-3.0
| 19,026
|
# Copyright 2011-2013 Kwant authors.
#
# This file is part of Kwant. It is subject to the license terms in the file
# LICENSE.rst found in the top-level directory of this distribution and at
# http://kwant-project.org/license. A list of Kwant authors can be found in
# the file AUTHORS.rst at the top-level directory of this distribution and at
# http://kwant-project.org/authors.
__all__ = ['lll', 'cvp', 'voronoi']
import numpy as np
from itertools import product
def gs_coefficient(a, b):
"""Gram-Schmidt coefficient."""
return np.dot(a, b) / np.linalg.norm(b)**2
def gs(mat):
"""Compute Gram-Schmidt decomposition on a matrix."""
mat = np.copy(mat)
for i in range(len(mat)):
for j in range(i):
mat[i] -= gs_coefficient(mat[i], mat[j]) * mat[j]
return mat
def is_c_reduced(vecs, c):
"""Check if a basis is c-reduced."""
vecs = gs(vecs)
r = np.apply_along_axis(lambda x: np.linalg.norm(x)**2, 1, vecs)
return np.all((r[: -1] / r[1:]) < c)
def lll(basis, c=1.34):
"""
Calculate a reduced lattice basis using LLL algorithm.
Reduce a basis of a lattice to an almost orthonormal form. For details see
e.g. http://en.wikipedia.org/wiki/LLL-algorithm.
Parameters
----------
basis : 2d array-like of floats
The lattice basis vectors to be reduced.
c : float
Reduction parameter for the algorithm. Must be larger than 1 1/3,
since otherwise a solution is not guaranteed to exist.
Returns
-------
reduced_basis : numpy array
The basis vectors of the LLL-reduced basis.
transformation : numpy integer array
Coefficient matrix for tranforming from the reduced basis to the
original one.
"""
vecs = np.copy(basis)
if vecs.ndim != 2:
raise ValueError('`vecs` must be a 2d array-like object.')
if vecs.shape[0] > vecs.shape[1]:
raise ValueError('The number of basis vectors exceeds the '
'space dimensionality.')
vecs_orig = np.copy(vecs)
vecsstar = np.copy(vecs)
m = vecs.shape[0]
u = np.identity(m)
def ll_reduce(i):
for j in reversed(range(i)):
vecs[i] -= np.round(u[i, j]) * vecs[j]
u[i] -= np.round(u[i, j]) * u[j]
# Initialize values.
for i in range(m):
for j in range(i):
u[i, j] = gs_coefficient(vecs[i], vecsstar[j])
vecsstar[i] -= u[i, j] * vecsstar[j]
ll_reduce(i)
# Main part of LLL algorithm.
i = 0
while i < m-1:
if (np.linalg.norm(vecsstar[i]) ** 2 <
c * np.linalg.norm(vecsstar[i+1]) ** 2):
i += 1
else:
vecsstar[i+1] += u[i+1, i] * vecsstar[i]
u[i, i] = gs_coefficient(vecs[i], vecsstar[i+1])
u[i, i+1] = u[i+1, i] = 1
u[i+1, i+1] = 0
vecsstar[i] -= u[i, i] * vecsstar[i+1]
vecs[[i, i+1]] = vecs[[i+1, i]]
vecsstar[[i, i+1]] = vecsstar[[i+1, i]]
u[[i, i+1]] = u[[i+1, i]]
for j in range(i+2, m):
u[j, i] = gs_coefficient(vecs[j], vecsstar[i])
u[j, i+1] = gs_coefficient(vecs[j], vecsstar[i+1])
if abs(u[i+1, i]) > 0.5:
ll_reduce(i+1)
i = max(i-1, 0)
coefs = np.linalg.lstsq(vecs_orig.T, vecs.T)[0]
if not np.allclose(np.round(coefs), coefs, atol=1e-6):
raise RuntimeError('LLL algorithm instability.')
if not is_c_reduced(vecs, c):
raise RuntimeError('LLL algorithm instability.')
return vecs, np.array(np.round(coefs), int)
def cvp(vec, basis, n=1):
"""
Solve the n-closest vector problem for a vector, given a basis.
This algorithm performs poorly in general, so it should be supplied
with LLL-reduced bases.
Parameters
----------
vec : 1d array-like of floats
The lattice vectors closest to this vector are to be found.
basis : 2d array-like of floats
Sequence of basis vectors
n : int
Number of lattice vectors closest to the point that need to be found.
Returns
-------
coords : numpy array
An array with the coefficients of the lattice vectors closest to the
requested point.
Notes
-----
This function can also be used to solve the `n` shortest lattice vector
problem if the `vec` is zero, and `n+1` points are requested
(and the first output is ignored).
"""
# Calculate coordinates of the starting point in this basis.
basis = np.asarray(basis)
if basis.ndim != 2:
raise ValueError('`basis` must be a 2d array-like object.')
vec = np.asarray(vec)
center_coords = np.array(np.round(np.linalg.lstsq(basis.T, vec)[0]), int)
# Cutoff radius for n-th nearest neighbor.
rad = 1
nth_dist = np.inf
while True:
r = np.round(rad * np.linalg.cond(basis)) + 1
points = np.mgrid[tuple(slice(i - r, i + r) for i in center_coords)]
points = points.reshape(basis.shape[0], -1).T
if len(points) < n:
rad += 1
continue
point_coords = np.dot(points, basis)
point_coords -= vec.T
distances = np.sqrt(np.sum(point_coords**2, 1))
order = np.argsort(distances)
distances = distances[order]
if distances[n - 1] < nth_dist:
nth_dist = distances[n - 1]
rad += 1
else:
return np.array(points[order][:n], int)
def voronoi(basis):
"""
Return an array of lattice vectors forming its voronoi cell.
Parameters
----------
basis : 2d array-like of floats
Basis vectors for which the Voronoi neighbors have to be found.
Returns
-------
voronoi_neighbors : numpy array of ints
All the lattice vectors that may potentially neighbor the origin.
Notes
-----
This algorithm does not calculate the minimal Voronoi cell of the lattice
and can be optimized. Its main aim is flood-fill, however, and better
safe than sorry.
"""
basis = np.asarray(basis)
if basis.ndim != 2:
raise ValueError('`basis` must be a 2d array-like object.')
displacements = list(product(*(len(basis) * [[0, .5]])))[1:]
vertices = np.array([cvp(np.dot(vec, basis), basis)[0] for vec in
displacements])
vertices = np.array(np.round((vertices - displacements) * 2), int)
for i in range(len(vertices)):
if not np.any(vertices[i]):
vertices[i] += 2 * np.array(displacements[i])
vertices = np.concatenate([vertices, -vertices])
return vertices
|
adriaanvuik/solid_state_physics
|
lll.py
|
Python
|
bsd-2-clause
| 6,649
|
# -*- coding: utf-8 -*-
"""
Unit tests for reverse URL lookups.
"""
from __future__ import unicode_literals
import sys
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.conf.urls import include, url
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import override_script_prefix
from django.urls import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, Resolver404,
ResolverMatch, get_callable, get_resolver, resolve, reverse, reverse_lazy,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from . import middleware, urlconf_outer, views
from .utils import URLObject
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
(
'/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/normal/42/37/', 'inc-normal-view', '', '', 'inc-normal-view', views.empty_view, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/view_class/42/37/', 'inc-view-class', '', '', 'inc-view-class', views.view_class_instance, tuple(),
{'arg1': '42', 'arg2': '37'}
),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
(
'/included/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(),
{'arg2': '37'}
),
(
'/included/12/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(),
{'arg2': '37'}
),
# Unnamed views should have None as the url_name. Regression data for #21157.
(
'/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance,
tuple(), {'arg1': '42', 'arg2': '37'}
),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view, ('42', '37'), {}),
(
'/included/12/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view,
('12', '42', '37'), {}
),
# Namespaces
(
'/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/normal/42/37/', 'inc-normal-view', '', 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
# Nested namespaces
(
'/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3',
'inc-ns1:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp',
'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp', 'inc-app:test-ns3',
'inc-app:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp',
'inc-app:inc-ns4:inc-ns2:test-ns3', 'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
# Namespaces capturing variables
('/inc70/', 'inner-nothing', '', 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, tuple(), {'outer': '70'}),
(
'/inc78/extra/foobar/', 'inner-extra', '', 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, tuple(),
{'outer': '78', 'extra': 'foobar'}
),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('named_optional', '/optional/1/', [1], {}),
('named_optional', '/optional/1/', [], {'arg1': 1}),
('named_optional', '/optional/1/2/', [1, 2], {}),
('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('named_optional_terminated', '/optional/1/2/', [1, 2], {}),
('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
(
'windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [],
dict(drive_name='C', path=r'Documents and Settings\spam')
),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}),
('nested-capture', '/nested/capture/opt/', ['opt/'], {}),
('nested-capture', NoReverseMatch, [], {'p': 'opt'}),
('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}),
('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}),
('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}),
('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}),
('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
RegexURLResolver should raise an exception when no urlpatterns exist.
"""
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
with self.assertRaisesMessage(
ImproperlyConfigured,
"The included URLconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import."
):
getattr(resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(SimpleTestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
with self.assertRaises(NoReverseMatch):
reverse(None)
@override_script_prefix('/{{invalid}}/')
def test_prefix_braces(self):
self.assertEqual(
'/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include')
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix('/bogus)/'):
self.assertEqual(
'/bogus)/includes/non_path_include/',
reverse('non_path_include')
)
with override_script_prefix('/(bogus)/'):
self.assertEqual(
'/(bogus)/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/bump%20map/')
def test_prefix_format_char(self):
self.assertEqual(
'/bump%2520map/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/%7Eme/')
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1]))
def test_patterns_reported(self):
# Regression for #17076
try:
# this url exists, but requires an argument
reverse("people", args=[])
except NoReverseMatch as e:
pattern_description = r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
self.assertIn(pattern_description, str(e))
else:
# we can't use .assertRaises, since we want to inspect the
# exception
self.fail("Expected a NoReverseMatch, but none occurred.")
@override_script_prefix('/script:name/')
def test_script_name_escaping(self):
self.assertEqual(
reverse('optional', args=['foo:bar']),
'/script:name/optional/foo:bar/'
)
def test_reverse_returns_unicode(self):
name, expected, args, kwargs = test_data[0]
self.assertIsInstance(
reverse(name, args=args, kwargs=kwargs),
six.text_type
)
class ResolverTests(unittest.TestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_repr(self):
"""
Test repr of RegexURLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced URLconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
try:
resolver.resolve(proxy_url)
except TypeError:
self.fail('Failed to coerce lazy object to text')
def test_non_regex(self):
"""
Verifies that we raise a Resolver404 if what we are resolving doesn't
meet the basic requirements of a path to match - i.e., at the very
least, it matches the root pattern '^/'. We must never return None
from resolve, or we will get a TypeError further down the line.
Regression for #10834.
"""
with self.assertRaises(Resolver404):
resolve('')
with self.assertRaises(Resolver404):
resolve('a')
with self.assertRaises(Resolver404):
resolve('\\')
with self.assertRaises(Resolver404):
resolve('.')
def test_404_tried_urls_have_names(self):
"""
Verifies that the list of URLs that come back from a Resolver404
exception contains a list in the right format for printing out in
the DEBUG 404 page with both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
try:
resolve('/included/non-existent-url', urlconf=urls)
self.fail('resolve did not raise a 404')
except Resolver404 as e:
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
tried = e.args[0]['tried']
self.assertEqual(
len(e.args[0]['tried']),
len(url_types_names),
'Wrong number of tried URLs returned. Expected %s, got %s.' % (
len(url_types_names), len(e.args[0]['tried'])
)
)
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(
t.name,
e['name'],
'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name)
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
alfred = User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.force_login(alfred)
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
if six.PY2:
self.assertEqual(
b'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
Test that reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings('settings.py', extra="""
from django.urls import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
with self.assertRaises(NoReverseMatch):
redirect('not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_reverse_by_path_nested(self):
# Views added to urlpatterns using include() should be reversible.
from .views import nested_view
self.assertEqual(reverse(nested_view), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
with self.assertRaises(NoReverseMatch):
redirect(absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
@ignore_warnings(category=RemovedInDjango20Warning)
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view')
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view', args=[37, 42])
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing')
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing', args=[37, 42])
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
"Non-existent namespaces raise errors"
with self.assertRaises(NoReverseMatch):
reverse('blahblah:urlobject-view')
with self.assertRaises(NoReverseMatch):
reverse('test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
"Normal lookups work as expected"
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
"Normal lookups work on names included from other patterns"
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
"Dynamic URL objects can be found using a namespace"
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_app_object(self):
"Dynamic URL objects can return a (pattern, app_name) 2-tuple, and include() can set the namespace"
self.assertEqual('/newapp1/inner/', reverse('new-ns1:urlobject-view'))
self.assertEqual('/newapp1/inner/37/42/', reverse('new-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/newapp1/inner/42/37/', reverse('new-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/newapp1/inner/+%5C$*/', reverse('new-ns1:urlobject-special-view'))
def test_app_object_default_namespace(self):
"Namespace defaults to app_name when including a (pattern, app_name) 2-tuple"
self.assertEqual('/new-default/inner/', reverse('newapp:urlobject-view'))
self.assertEqual('/new-default/inner/37/42/', reverse('newapp:urlobject-view', args=[37, 42]))
self.assertEqual(
'/new-default/inner/42/37/', reverse('newapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/new-default/inner/+%5C$*/', reverse('newapp:urlobject-special-view'))
def test_embedded_namespace_object(self):
"Namespaces can be installed anywhere in the URL pattern tree"
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual(
'/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
"Namespaces can be applied to include()'d urlpatterns"
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_app_name_pattern(self):
"Namespaces can be applied to include()'d urlpatterns that set an app_name attribute"
self.assertEqual('/app-included1/normal/', reverse('app-ns1:inc-normal-view'))
self.assertEqual('/app-included1/normal/37/42/', reverse('app-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/app-included1/normal/42/37/', reverse('app-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/app-included1/+%5C$*/', reverse('app-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
"When using an include with namespaces when there is a regex variable in front of it"
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual(
'/ns-outer/42/normal/37/4/',
reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4})
)
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
"Namespaces can be embedded"
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
"Namespaces can be nested"
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view')
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/37/42/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42])
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view')
)
def test_app_lookup_object(self):
"A default application namespace can be used for lookup"
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
"A default application namespace is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual(
'/included/test3/inner/37/42/',
reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3')
)
self.assertEqual(
'/included/test3/inner/42/37/',
reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3')
)
self.assertEqual(
'/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3')
)
def test_app_lookup_object_without_default(self):
"An application namespace without a default is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual(
'/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1')
)
self.assertEqual(
'/other1/inner/42/37/',
reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1')
)
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/+%5C$*/included/normal/42/37/',
reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
"Namespace prefixes can capture variables: see #15900"
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual(
'/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'})
)
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
def test_nested_app_lookup(self):
"A nested current_app should be split in individual namespaces (#24904)"
self.assertEqual('/ns-included1/test4/inner/', reverse('inc-ns1:testapp:urlobject-view'))
self.assertEqual('/ns-included1/test4/inner/37/42/', reverse('inc-ns1:testapp:urlobject-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/test4/inner/+%5C$*/', reverse('inc-ns1:testapp:urlobject-special-view'))
self.assertEqual(
'/ns-included1/test3/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='inc-ns1:test-ns3')
)
def test_current_app_no_partial_match(self):
"current_app should either match the whole path or shouldn't be used"
self.assertEqual(
'/ns-included1/test4/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37},
current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='non-existent:test-ns3')
)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve_error_handler(400), handler)
self.assertEqual(self.resolver.resolve_error_handler(404), handler)
self.assertEqual(self.resolver.resolve_error_handler(500), handler)
def test_callable_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
try:
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 404 handler")
try:
with self.assertRaises(ValueError):
self.client.get('/bad_view/')
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 500 handler")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if ROOT_URLCONF is None"""
def test_no_handler_exception(self):
with self.assertRaises(ImproperlyConfigured):
self.client.get('/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_urlpattern_resolve(self):
for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
# Test legacy support for extracting "function, args, kwargs"
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# Test ResolverMatch capabilities.
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# ... and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(SimpleTestCase):
def test_noncallable_view(self):
# View is not a callable (explicit import; arbitrary Python object)
with self.assertRaisesMessage(TypeError, 'view must be a callable'):
url(r'uncallable-object/$', views.uncallable)
def test_invalid_regex(self):
# Regex contains an error (refs #6170)
msg = '(regex_error/$" is not a valid regular expression'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse(views.empty_view)
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view)
# passing a callable should return the callable
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
# A missing view (identified by an AttributeError) should raise
# ViewDoesNotExist, ...
with six.assertRaisesRegex(self, ViewDoesNotExist, ".*View does not exist in.*"):
get_callable('urlpatterns_reverse.views.i_should_not_exist')
# ... but if the AttributeError is caused by something else don't
# swallow it.
with self.assertRaises(AttributeError):
get_callable('urlpatterns_reverse.views_broken.i_am_broken')
class IncludeTests(SimpleTestCase):
url_patterns = [
url(r'^inner/$', views.empty_view, name='urlobject-view'),
url(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
url(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
app_urls = URLObject('inc-app')
def test_include_app_name_but_no_namespace(self):
msg = "Must specify a namespace if specifying app_name."
with self.assertRaisesMessage(ValueError, msg):
include(self.url_patterns, app_name='bar')
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace(self):
# no app_name -> deprecated
self.assertEqual(include(self.url_patterns, 'namespace'), (self.url_patterns, None, 'namespace'))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace_app_name(self):
# app_name argument to include -> deprecated
self.assertEqual(
include(self.url_patterns, 'namespace', 'app_name'),
(self.url_patterns, 'app_name', 'namespace')
)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_3_tuple(self):
# 3-tuple -> deprecated
self.assertEqual(
include((self.url_patterns, 'app_name', 'namespace')),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, 'app_name')),
(self.url_patterns, 'app_name', 'app_name')
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, 'app_name'), namespace='namespace'),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_app_name(self):
self.assertEqual(
include(self.app_urls),
(self.app_urls, 'inc-app', 'inc-app')
)
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, 'namespace'),
(self.app_urls, 'inc-app', 'namespace')
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
'/lookahead-/a-city/',
'/lookbehind-/a-city/',
'/lookahead+/a-city/',
'/lookbehind+/a-city/',
]
for test_url in test_urls:
match = resolve(test_url)
self.assertEqual(match.kwargs, {'city': 'a-city'})
def test_invalid_resolve(self):
test_urls = [
'/lookahead-/not-a-city/',
'/lookbehind-/not-a-city/',
'/lookahead+/other-city/',
'/lookbehind+/other-city/',
]
for test_url in test_urls:
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
url = reverse('lookahead-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead+/a-city/')
url = reverse('lookahead-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead-/a-city/')
url = reverse('lookbehind-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind+/a-city/')
url = reverse('lookbehind-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind-/a-city/')
def test_invalid_reverse(self):
with self.assertRaises(NoReverseMatch):
reverse('lookahead-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookahead-negative', kwargs={'city': 'not-a-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-negative', kwargs={'city': 'not-a-city'})
|
yephper/django
|
tests/urlpatterns_reverse/tests.py
|
Python
|
bsd-3-clause
| 50,926
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
import os
from pex.interpreter import PythonInterpreter
from pants.backend.python.interpreter_cache import PythonInterpreterCache
from pants.backend.python.subsystems.python_repos import PythonRepos
from pants.backend.python.subsystems.python_setup import PythonSetup
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.backend.python.targets.python_target import PythonTarget
from pants.base.fingerprint_strategy import DefaultFingerprintHashingMixin, FingerprintStrategy
from pants.invalidation.cache_manager import VersionedTargetSet
from pants.task.task import Task
from pants.util.dirutil import safe_mkdir_for
class PythonInterpreterFingerprintStrategy(DefaultFingerprintHashingMixin, FingerprintStrategy):
def compute_fingerprint(self, python_target):
# Only consider the compatibility requirements in the fingerprint, as only
# those can affect the selected interpreter.
hash_elements_for_target = []
if python_target.compatibility:
hash_elements_for_target.extend(sorted(python_target.compatibility))
if not hash_elements_for_target:
return None
hasher = hashlib.sha1()
for element in hash_elements_for_target:
hasher.update(element)
return hasher.hexdigest()
class SelectInterpreter(Task):
"""Select an Python interpreter that matches the constraints of all targets in the working set."""
@classmethod
def implementation_version(cls):
# TODO(John Sirois): Fixup this task to use VTS results_dirs. Right now version bumps aren't
# effective in dealing with workdir data format changes.
return super(SelectInterpreter, cls).implementation_version() + [('SelectInterpreter', 2)]
@classmethod
def subsystem_dependencies(cls):
return super(SelectInterpreter, cls).subsystem_dependencies() + (PythonSetup, PythonRepos)
@classmethod
def product_types(cls):
return [PythonInterpreter]
def execute(self):
python_tgts_and_reqs = self.context.targets(
lambda tgt: isinstance(tgt, (PythonTarget, PythonRequirementLibrary))
)
if not python_tgts_and_reqs:
return
python_tgts = [tgt for tgt in python_tgts_and_reqs if isinstance(tgt, PythonTarget)]
fs = PythonInterpreterFingerprintStrategy()
with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
if (PythonSetup.global_instance().interpreter_search_paths
and PythonInterpreterCache.pex_python_paths()):
self.context.log.warn("Detected both PEX_PYTHON_PATH and "
"--python-setup-interpreter-search-paths. Ignoring "
"--python-setup-interpreter-search-paths.")
# If there are no relevant targets, we still go through the motions of selecting
# an interpreter, to prevent downstream tasks from having to check for this special case.
if invalidation_check.all_vts:
target_set_id = VersionedTargetSet.from_versioned_targets(
invalidation_check.all_vts).cache_key.hash
else:
target_set_id = 'no_targets'
interpreter_path_file = self._interpreter_path_file(target_set_id)
if not os.path.exists(interpreter_path_file):
self._create_interpreter_path_file(interpreter_path_file, python_tgts)
interpreter = self._get_interpreter(interpreter_path_file)
self.context.products.register_data(PythonInterpreter, interpreter)
def _create_interpreter_path_file(self, interpreter_path_file, targets):
interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
PythonRepos.global_instance(),
logger=self.context.log.debug)
interpreter = interpreter_cache.select_interpreter_for_targets(targets)
safe_mkdir_for(interpreter_path_file)
with open(interpreter_path_file, 'w') as outfile:
outfile.write(b'{}\n'.format(interpreter.binary))
for dist, location in interpreter.extras.items():
dist_name, dist_version = dist
outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))
def _interpreter_path_file(self, target_set_id):
return os.path.join(self.workdir, target_set_id, 'interpreter.info')
@staticmethod
def _get_interpreter(interpreter_path_file):
with open(interpreter_path_file, 'r') as infile:
lines = infile.readlines()
binary = lines[0].strip()
interpreter = PythonInterpreter.from_binary(binary, include_site_extras=False)
for line in lines[1:]:
dist_name, dist_version, location = line.strip().split('\t')
interpreter = interpreter.with_extra(dist_name, dist_version, location)
return interpreter
|
foursquare/pants
|
src/python/pants/backend/python/tasks/select_interpreter.py
|
Python
|
apache-2.0
| 4,991
|
# Copyright 2014 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VHD related operations.
Based on the "root/virtualization/v2" namespace available starting with
Hyper-V Server / Windows Server 2012.
"""
import os
if os.name == 'nt':
import wmi
from cinder.openstack.common import log as logging
from cinder.volume.drivers.windows import constants
from cinder.volume.drivers.windows import vhdutils
from cinder.volume.drivers.windows import windows_utils
LOG = logging.getLogger(__name__)
class VHDUtilsV2(vhdutils.VHDUtils):
_vhd_format_map = {
'vhd': 2,
'vhdx': 3,
}
def __init__(self):
self.utils = windows_utils.WindowsUtils()
self._conn = wmi.WMI(moniker='//./root/virtualization/v2')
def _get_resize_method(self):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
return image_man_svc.ResizeVirtualHardDisk
def convert_vhd(self, src, dest, vhd_type=constants.VHD_TYPE_DYNAMIC):
vhd_info = self._conn.Msvm_VirtualHardDiskSettingData.new()
ext = os.path.splitext(dest)[1][1:]
format = self._vhd_format_map.get(ext)
vhd_info.Type = vhd_type
vhd_info.Path = dest
vhd_info.Format = format
vhd_info.BlockSize = 0
vhd_info.LogicalSectorSize = 0
vhd_info.ParentPath = None
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.ConvertVirtualHardDisk(
SourcePath=src, VirtualDiskSettingData=vhd_info.GetText_(1))
self.utils.check_ret_val(ret_val, job_path)
|
theanalyst/cinder
|
cinder/volume/drivers/windows/vhdutilsv2.py
|
Python
|
apache-2.0
| 2,192
|
import time,datetime,re, hashlib
from dateutil import tz
import os
from .constants import YowConstants
import codecs, sys
import logging
import tempfile
import base64
import hashlib
import os.path, mimetypes
from .optionalmodules import PILOptionalModule, FFVideoOptionalModule
logger = logging.getLogger(__name__)
class Jid:
@staticmethod
def normalize(number):
if '@' in number:
return number
elif "-" in number:
return "%s@%s" % (number, YowConstants.WHATSAPP_GROUP_SERVER)
return "%s@%s" % (number, YowConstants.WHATSAPP_SERVER)
class HexTools:
decode_hex = codecs.getdecoder("hex_codec")
@staticmethod
def decodeHex(hexString):
result = HexTools.decode_hex(hexString)[0]
if sys.version_info >= (3,0):
result = result.decode('latin-1')
return result
class WATools:
@staticmethod
def generateIdentity():
return os.urandom(20)
@staticmethod
def getFileHashForUpload(filePath):
sha1 = hashlib.sha256()
f = open(filePath, 'rb')
try:
sha1.update(f.read())
finally:
f.close()
b64Hash = base64.b64encode(sha1.digest())
return b64Hash if type(b64Hash) is str else b64Hash.decode()
class StorageTools:
@staticmethod
def constructPath(*path):
path = os.path.join(*path)
fullPath = os.path.expanduser(os.path.join(YowConstants.PATH_STORAGE, path))
if not os.path.exists(os.path.dirname(fullPath)):
os.makedirs(os.path.dirname(fullPath))
return fullPath
@staticmethod
def getStorageForPhone(phone):
return StorageTools.constructPath(phone + '/')
@staticmethod
def writeIdentity(phone, identity):
path = StorageTools.getStorageForPhone(phone)
with open(os.path.join(path, "id"), 'wb') as idFile:
idFile.write(identity)
@staticmethod
def getIdentity(phone):
path = StorageTools.getStorageForPhone(phone)
out = None
idPath = os.path.join(path, "id")
if os.path.isfile(idPath):
with open(idPath, 'rb') as idFile:
out = idFile.read()
return out
@staticmethod
def writeNonce(phone, nonce):
path = StorageTools.getStorageForPhone(phone)
with open(os.path.join(path, "nonce"), 'wb') as idFile:
idFile.write(nonce.encode("latin-1") if sys.version_info >= (3,0) else nonce)
@staticmethod
def getNonce(phone):
path = StorageTools.getStorageForPhone(phone)
out = None
noncePath = os.path.join(path, "nonce")
if os.path.isfile(noncePath):
with open(noncePath, 'rb') as idFile:
out = idFile.read()
return out
class TimeTools:
@staticmethod
def parseIso(iso):
d=datetime.datetime(*map(int, re.split('[^\d]', iso)[:-1]))
return d
@staticmethod
def utcToLocal(dt):
utc = tz.gettz('UTC')
local = tz.tzlocal()
dtUtc = dt.replace(tzinfo=utc)
return dtUtc.astimezone(local)
@staticmethod
def utcTimestamp():
#utc = tz.gettz('UTC')
utcNow = datetime.datetime.utcnow()
return TimeTools.datetimeToTimestamp(utcNow)
@staticmethod
def datetimeToTimestamp(dt):
return time.mktime(dt.timetuple())
class ImageTools:
@staticmethod
def scaleImage(infile, outfile, imageFormat, width, height):
with PILOptionalModule() as imp:
Image = imp("Image")
im = Image.open(infile)
#Convert P mode images
if im.mode != "RGB":
im = im.convert("RGB")
im.thumbnail((width, height))
im.save(outfile, imageFormat)
return True
return False
@staticmethod
def getImageDimensions(imageFile):
with PILOptionalModule() as imp:
Image = imp("Image")
im = Image.open(imageFile)
return im.size
@staticmethod
def generatePreviewFromImage(image):
fd, path = tempfile.mkstemp()
preview = None
if ImageTools.scaleImage(image, path, "JPEG", YowConstants.PREVIEW_WIDTH, YowConstants.PREVIEW_HEIGHT):
fileObj = os.fdopen(fd, "rb+")
fileObj.seek(0)
preview = fileObj.read()
fileObj.close()
os.remove(path)
return preview
class MimeTools:
MIME_FILE = os.path.join(os.path.dirname(__file__), 'mime.types')
mimetypes.init() # Load default mime.types
mimetypes.init([MIME_FILE]) # Append whatsapp mime.types
decode_hex = codecs.getdecoder("hex_codec")
@staticmethod
def getMIME(filepath):
mimeType = mimetypes.guess_type(filepath)[0]
if mimeType is None:
raise Exception("Unsupported/unrecognized file type for: "+filepath);
return mimeType
@staticmethod
def getExtension(mimetype):
ext = mimetypes.guess_extension(mimetype)
if ext is None:
raise Exception("Unsupported/unrecognized mimetype: "+mimetype);
return ext
class VideoTools:
@staticmethod
def getVideoProperties(videoFile):
if ModuleTools.INSTALLED_FFVIDEO():
from ffvideo import VideoStream
s = VideoStream(videoFile)
return s.width, s.height, s.bitrate, s.duration #, s.codec_name
else:
logger.warn("Python ffvideo library not installed")
@staticmethod
def generatePreviewFromVideo(videoFile):
if ModuleTools.INSTALLED_FFVIDEO():
from ffvideo import VideoStream
fd, path = tempfile.mkstemp('.jpg')
stream = VideoStream(videoFile)
stream.get_frame_at_sec(0).image().save(path)
preview = ImageTools.generatePreviewFromImage(path)
os.remove(path)
return preview
else:
logger.warn("Python ffvideo library not installed")
|
dazzzl/yowsup
|
yowsup/common/tools.py
|
Python
|
gpl-3.0
| 5,861
|
"""
a helper class for *HTTP* URL's
@NOTE: surely someone else has written this before, but, I couldn't find one, so I wrote this one.
@author: SodaPhish <sodaphish@protonmail.ch>
TODO:
* support other protocols other than HTTP
* add a timeout to gethostbyname lookups
"""
import sys,socket
from urlparse import urlparse
from socket import gethostbyname
try:
import ipaddress
except:
print "you need to `pip install ipaddress` before proceeding"
sys.exit(2)
class InvalidURL(Exception):
"""
InvalidURL exception what we raise if something isn't a valid URL
"""
pass
class URL():
def __init__(self, urlstring):
obj = urlparse(urlstring)
#ParseResult(scheme='http', netloc='google.com', path='', params='', query='', fragment='')
#ParseResult(scheme='http', netloc='google.com', path='/rss', params='', query='g=fugly', fragment='')
#ParseResult(scheme='http', netloc='google.com:8080', path='/rss', params='', query='g=fugly', fragment='')
#NOTE: FTP IS NOT SUPPORTED YET
#ParseResult(scheme='ftp', netloc='username:password@google.com', path='/pub/mirror/x', params='', query='', fragment='')
if obj.scheme and obj.netloc:
if ':' in obj.netloc:
(host,port)=obj.netloc.split(':')
self.netloc = host
#TODO: validate this!!!!
self.port = int(port)
else:
self.netloc = obj.netloc
self.port = 80
if not self.is_ip(self.netloc) and not self.is_hostname(self.netloc):
raise InvalidURL
self.scheme = obj.scheme
self.path = obj.path
self.params = obj.params
self.query = obj.query
self.fragment = obj.fragment
def is_ip(self,target):
"""
returns false if target is not a valid IP address
"""
try:
ipaddress.ip_address(unicode(target))
except ipaddress.AddressValueError:
return False
except ValueError:
return False
return True
def is_hostname(self,target):
"""
returns false if target is not resolvable
"""
try:
addr = gethostbyname( target )
except socket.gaierror:
return False
return True
def __repr__(self):
if self.port:
if not self.query:
return "%s://%s:%d%s" % (self.scheme,self.netloc,self.port,self.path)
return "%s://%s:%d%s?%s" % (self.scheme,self.netloc,self.port,self.path,self.query)
else:
if not self.query:
return "%s://%s%s" % (self.scheme,self.netloc,self.path)
return "%s://%s%s?%s" % (self.scheme,self.netloc,self.path,self.query)
if __name__ == '__main__':
"""
testing of URL class
"""
url = URL("http://google.com")
url2 = URL("http://google.com/rss?g=fugly")
url3 = URL("http://google.com:8080/rss?g=fugly&dingos=fluffy")
#url4 = URL("ftp://username:password@google.com/pub/mirror/x")
print url
print url2
print url3
'''___EOF___'''
|
sodaphish/break
|
sp/net/URL.py
|
Python
|
mit
| 2,738
|
import serial
import time
import random
import atexit
import GmailHandler as gm
def main():
try:
s = serial.Serial('COM3', 9600) #port is 11 (for COM12), and baud rate is 9600
time.sleep(2) #wait for the Serial to initialize
counter = 0
noNewMessagesAnimationCounter = 0
while True:
if ord(s.read()) == 1:
print("Permission granted")
messages = gm.getInboxMessages()
time.sleep(5)
if (len(messages) <= 0):
if (noNewMessagesAnimationCounter > 16):
noNewMessagesAnimationCounter = 0
bottomText = noNewMessagesAnimationCounter * "-"
msg = "No new messages!" + '\n' + bottomText + '\n'
noNewMessagesAnimationCounter += 1
else:
amountOfMessages = str(len(messages)) if len(messages) < 10 else "9+"
currentMessage = messages[counter] if len(messages[counter]) < 16 else messages[counter][0:13] + "..."
msg = "New messages: " + amountOfMessages + '\n' + currentMessage + '\n'
if (counter == len(messages) - 1):
counter = 0
else:
counter += 1
print(msg)
s.write(msg.encode())
else:
print("Waiting for permission")
finally:
print("Quitting script")
quitSignal = "-1" + '\n'
s.write(quitSignal.encode())
if __name__ == '__main__':
main()
|
bramvbilsen/Arduino-Mail-Notifier
|
ArduinoMailNotifier.py
|
Python
|
mit
| 1,626
|
# coding: utf-8
from __future__ import unicode_literals
from __future__ import print_function
__author__ = 'dtrillo'
import requests
class Downloading:
""" Clase para recuperar código HTML de una URL """
def necesita_proxy(self):
url = "http://www.bing.es"
html = self._get_url(url)
if len(html) == 0 and len(self.proxy) > 0:
self.sesion.proxies = {"http": self.proxy, "https": self.proxy}
html = self._get_url(url)
if len(html) == 0:
self.sesion.proxies = {}
self.wInternet = False
def _get_url(self, url):
if self.wInternet == False: return ''
try:
r = self.sesion.get(url, timeout=self.timeout)
return r.text
except:
return ''
def __init__(self, proxy = '', t_timeout = 15, timesleep = 2, debug = False):
self.sesion = requests.Session()
self.proxy = proxy
self.wInternet = True
self.timeout = t_timeout
self.necesita_proxy()
self.debug = debug
if debug: print("Downloading ... creado!")
self.__timesleep = timesleep # Pendiente de ELIMINAR
def gethtml(self, url, fichHTML='', onlydownload=False):
""" Recupera HTML de url y lo guarda en fichHTML - Llama a gethtml2 """
if onlydownload: fichHTML = ''
texto = self._get_url(url)
tmp = " Recibido: %s" % url
#try:
if texto and fichHTML:
save_file(fichHTML, texto) # Grabo fichero
tmp += nl + " Guardando en ... %s" % fichHTML
#except Exception as ex:
# _, _, ex_traceback = sys.exc_info()
# log_traceback(ex, ex_traceback)
if self.debug:
print (tmp)
return texto, not self.wInternet
def gethtml2(self, url, fichHTML='', onlydownload=False):
""" Devuelve el HTML, y en caso de error, devuelve '' """
texto, lee = self.gethtml(url, fichHTML, onlydownload)
return texto
|
Paco1994/sportbot
|
sportbot/sportbot/downloads.py
|
Python
|
gpl-3.0
| 2,039
|
import re
from uuid import uuid1
from datetime import datetime, date
import dateutil.relativedelta
from icalendar import Calendar
from icalendar import Event
def display(cal):
return cal.to_ical().decode('utf-8').replace('\r\n', '\n').strip()
def get_ics(schedule):
cal = Calendar()
cal['version'] = '2.0'
cal['prodid'] = '-//CQUT//Syllabus//CN' # *mandatory elements* where the prodid can be changed, see RFC 5445
start_monday = date(2017, 2, 20) # 开学第一周星期一的时间
dict_week = {'一': 0, '二': 1, '三': 2, '四': 3, '五': 4, '六': 5, '日': 6}
# 设置课程时间
dict_day = {1: dateutil.relativedelta.relativedelta(hours=8, minutes=0),
2: dateutil.relativedelta.relativedelta(hours=8, minutes=50),
3: dateutil.relativedelta.relativedelta(hours=9, minutes=50),
4: dateutil.relativedelta.relativedelta(hours=10, minutes=40),
5: dateutil.relativedelta.relativedelta(hours=11, minutes=30),
6: dateutil.relativedelta.relativedelta(hours=11, minutes=30),
7: dateutil.relativedelta.relativedelta(hours=14, minutes=55),
8: dateutil.relativedelta.relativedelta(hours=15, minutes=45),
9: dateutil.relativedelta.relativedelta(hours=16, minutes=25),
10: dateutil.relativedelta.relativedelta(hours=17, minutes=25),
11: dateutil.relativedelta.relativedelta(hours=18, minutes=30),
12: dateutil.relativedelta.relativedelta(hours=19, minutes=20),
13: dateutil.relativedelta.relativedelta(hours=20, minutes=10)}
for line in schedule:
event = Event()
print(line)
# line should be like this: ['汇编语言程序设计', '周三第7,8节', '第10-10周|双周', '第1实验楼B403-A', '刘小洋(刘小洋)']
info_day = re.findall(r'周(.*?)第(\d+),(\d+)?,?(\d+)节', line[1], re.S | re.M)
info_day = info_day[0]
print(info_day)
# info_day should be like this: ('三', '7', '8')
info_week = re.findall(r'第(\d+)-(\d+)周', line[2], re.S | re.M)
info_week = info_week[0]
print(info_week)
# info_week should be like this: ('10', '10')
dtstart_date = start_monday + dateutil.relativedelta.relativedelta(
weeks=(int(info_week[0]) - 1)) + dateutil.relativedelta.relativedelta(
days=int(dict_week[info_day[0]]))
dtstart_datetime = datetime.combine(dtstart_date, datetime.min.time())
ss = dict_day[int(info_day[1])]
print(ss)
dtstart = dtstart_datetime + dict_day[int(info_day[1])]
print('dtstart' + str(dtstart))
# 有可能是三节课或者是两节课
# 我们的课持续45分钟(中间有5分钟课间时间)
dtend = dtstart + dateutil.relativedelta.relativedelta(hours=1, minutes=35) # 45+45+5=95
if info_day[2] != '': # 三节课
dtend += dateutil.relativedelta.relativedelta(hours=0, minutes=50) # 95+45+5
event.add('uid', str(uuid1()) + '@CQUT')
event.add('summary', line[0])
event.add('dtstamp', datetime.now())
event.add('dtstart', dtstart)
event.add('dtend', dtend)
if line[2].find('|') == -1:
interval = 1
count = int(info_week[1]) - int(info_week[0]) + 1
else:
interval = 2
count = int(info_week[1]) - int(info_week[0]) / 2 + 1
# 如果有单双周的课 那么这些课隔一周上一次
event.add('rrule',
{'freq': 'weekly', 'interval': interval,
'count': count})
# 设定重复次数
event.add('location', line[3])
# 设定重复地点
cal.add_component(event)
return cal
def generate_ics(schedule):
print("获取成功!")
print("\n课表是...")
for line in schedule:
print(line)
print("\n正在生成 ics 文件...")
ics = get_ics(schedule)
print(display(ics))
print("生成成功!")
file_name = r'outputs\output.ics'
print("\n正在保存到..." + file_name)
with open(file_name, 'wb') as f:
f.write(ics.to_ical())
if f:
print('保存成功!')
else:
print('保存失败!')
if __name__ == '__main__':
data = [['大学物理学【Ⅱ(2)】', '周日第1,2,3节', '第1-10周', '1教0516', '韦建卫']]
generate_ics(schedule=data)
|
zhanganyuan/ReCalendar
|
schedule_to_ics.py
|
Python
|
mit
| 4,510
|
# function to call the main analysis/synthesis functions in software/models/sineModel.py
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import get_window
import os, sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../models/'))
import utilFunctions as UF
import sineModel as SM
def main(inputFile='../../sounds/bendir.wav', window='hamming', M=2001, N=2048, t=-80, minSineDur=0.02,
maxnSines=150, freqDevOffset=10, freqDevSlope=0.001):
"""
Perform analysis/synthesis using the sinusoidal model
inputFile: input sound file (monophonic with sampling rate of 44100)
window: analysis window type (rectangular, hanning, hamming, blackman, blackmanharris)
M: analysis window size; N: fft size (power of two, bigger or equal than M)
t: magnitude threshold of spectral peaks; minSineDur: minimum duration of sinusoidal tracks
maxnSines: maximum number of parallel sinusoids
freqDevOffset: frequency deviation allowed in the sinusoids from frame to frame at frequency 0
freqDevSlope: slope of the frequency deviation, higher frequencies have bigger deviation
"""
# size of fft used in synthesis
Ns = 512
# hop size (has to be 1/4 of Ns)
H = 128
# read input sound
fs, x = UF.wavread(inputFile)
# compute analysis window
w = get_window(window, M)
# analyze the sound with the sinusoidal model
tfreq, tmag, tphase = SM.sineModelAnal(x, fs, w, N, H, t, maxnSines, minSineDur, freqDevOffset, freqDevSlope)
# synthesize the output sound from the sinusoidal representation
y = SM.sineModelSynth(tfreq, tmag, tphase, Ns, H, fs)
# output sound file name
outputFile = 'output_sounds/' + os.path.basename(inputFile)[:-4] + '_sineModel.wav'
# write the synthesized sound obtained from the sinusoidal synthesis
UF.wavwrite(y, fs, outputFile)
# create figure to show plots
plt.figure(figsize=(12, 9))
# frequency range to plot
maxplotfreq = 5000.0
# plot the input sound
plt.subplot(3,1,1)
plt.plot(np.arange(x.size)/float(fs), x)
plt.axis([0, x.size/float(fs), min(x), max(x)])
plt.ylabel('amplitude')
plt.xlabel('time (sec)')
plt.title('input sound: x')
# plot the sinusoidal frequencies
plt.subplot(3,1,2)
if (tfreq.shape[1] > 0):
numFrames = tfreq.shape[0]
frmTime = H*np.arange(numFrames)/float(fs)
tfreq[tfreq<=0] = np.nan
plt.plot(frmTime, tfreq)
plt.axis([0, x.size/float(fs), 0, maxplotfreq])
plt.title('frequencies of sinusoidal tracks')
# plot the output sound
plt.subplot(3,1,3)
plt.plot(np.arange(y.size)/float(fs), y)
plt.axis([0, y.size/float(fs), min(y), max(y)])
plt.ylabel('amplitude')
plt.xlabel('time (sec)')
plt.title('output sound: y')
plt.tight_layout()
plt.ion()
plt.show()
if __name__ == "__main__":
main()
|
zangsir/sms-tools
|
software/models_interface/sineModel_function.py
|
Python
|
agpl-3.0
| 2,756
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Julien Veyssier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from urlparse import urlsplit, parse_qsl, urlparse
from datetime import datetime, timedelta
from weboob.deprecated.browser import Browser, BrowserIncorrectPassword
from weboob.capabilities.bank import Transfer, TransferError
from .pages import LoginPage, LoginErrorPage, AccountsPage, UserSpacePage, EmptyPage, \
OperationsPage, CardPage, ComingPage, NoOperationsPage, InfoPage, \
TransfertPage, ChangePasswordPage, VerifCodePage
__all__ = ['CICBrowser']
# Browser
class CICBrowser(Browser):
PROTOCOL = 'https'
DOMAIN = 'www.cic.fr'
CERTHASH = '9f41522275058310a6fb348504daeadd16ae852a686a91383b10ad045da76d29'
ENCODING = 'iso-8859-1'
USER_AGENT = Browser.USER_AGENTS['wget']
PAGES = {'https://www.cic.fr/.*/fr/banques/particuliers/index.html': LoginPage,
'https://www.cic.fr/.*/fr/identification/default.cgi': LoginErrorPage,
'https://www.cic.fr/.*/fr/banque/situation_financiere.cgi': AccountsPage,
'https://www.cic.fr/.*/fr/banque/situation_financiere.html': AccountsPage,
'https://www.cic.fr/.*/fr/banque/espace_personnel.aspx': UserSpacePage,
'https://www.cic.fr/.*/fr/banque/mouvements.cgi.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/mouvements.html.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/mvts_instance.cgi.*': ComingPage,
'https://www.cic.fr/.*/fr/banque/nr/nr_devbooster.aspx.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/operations_carte\.cgi.*': CardPage,
'https://www.cic.fr/.*/fr/banque/CR/arrivee\.asp.*': NoOperationsPage,
'https://www.cic.fr/.*/fr/banque/BAD.*': InfoPage,
'https://www.cic.fr/.*/fr/banque/.*Vir.*': TransfertPage,
'https://www.cic.fr/.*/fr/validation/change_password.cgi': ChangePasswordPage,
'https://www.cic.fr/.*/fr/validation/verif_code.cgi.*': VerifCodePage,
'https://www.cic.fr/.*/fr/': EmptyPage,
'https://www.cic.fr/.*/fr/banques/index.html': EmptyPage,
'https://www.cic.fr/.*/fr/banque/paci_beware_of_phishing.html.*': EmptyPage,
'https://www.cic.fr/.*/fr/validation/(?!change_password|verif_code).*': EmptyPage,
}
currentSubBank = None
def is_logged(self):
return not self.is_on_page(LoginPage) and not self.is_on_page(LoginErrorPage)
def home(self):
return self.location('https://www.cic.fr/sb/fr/banques/particuliers/index.html')
def login(self):
assert isinstance(self.username, basestring)
assert isinstance(self.password, basestring)
if not self.is_on_page(LoginPage):
self.location('https://www.cic.fr/', no_login=True)
self.page.login(self.username, self.password)
if not self.is_logged() or self.is_on_page(LoginErrorPage):
raise BrowserIncorrectPassword()
self.getCurrentSubBank()
def get_accounts_list(self):
if not self.is_on_page(AccountsPage):
self.location('https://www.cic.fr/%s/fr/banque/situation_financiere.cgi' % self.currentSubBank)
return self.page.get_list()
def get_account(self, id):
assert isinstance(id, basestring)
l = self.get_accounts_list()
for a in l:
if a.id == id:
return a
return None
def getCurrentSubBank(self):
# the account list and history urls depend on the sub bank of the user
url = urlparse(self.geturl())
self.currentSubBank = url.path.lstrip('/').split('/')[0]
def list_operations(self, page_url):
if page_url.startswith('/') or page_url.startswith('https'):
self.location(page_url)
else:
self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, page_url))
go_next = True
while go_next:
if not self.is_on_page(OperationsPage):
return
for op in self.page.get_history():
yield op
go_next = self.page.go_next()
def get_history(self, account):
transactions = []
last_debit = None
for tr in self.list_operations(account._link_id):
# to prevent redundancy with card transactions, we do not
# store 'RELEVE CARTE' transaction.
if tr.raw != 'RELEVE CARTE':
transactions.append(tr)
elif last_debit is None:
last_debit = (tr.date - timedelta(days=10)).month
coming_link = self.page.get_coming_link() if self.is_on_page(OperationsPage) else None
if coming_link is not None:
for tr in self.list_operations(coming_link):
transactions.append(tr)
month = 0
for card_link in account._card_links:
v = urlsplit(card_link)
args = dict(parse_qsl(v.query))
# useful with 12 -> 1
if int(args['mois']) < month:
month = month + 1
else:
month = int(args['mois'])
for tr in self.list_operations(card_link):
if month > last_debit:
tr._is_coming = True
transactions.append(tr)
transactions.sort(key=lambda tr: tr.rdate, reverse=True)
return transactions
def transfer(self, account, to, amount, reason=None):
# access the transfer page
transfert_url = 'WI_VPLV_VirUniSaiCpt.asp?RAZ=ALL&Cat=6&PERM=N&CHX=A'
self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, transfert_url))
# fill the form
self.select_form(name='FormVirUniSaiCpt')
self['IDB'] = [account[-1]]
self['ICR'] = [to[-1]]
self['MTTVIR'] = '%s' % str(amount).replace('.', ',')
if reason is not None:
self['LIBDBT'] = reason
self['LIBCRT'] = reason
self.submit()
# look for known errors
content = unicode(self.response().get_data(), self.ENCODING)
insufficient_amount_message = u'Montant insuffisant.'
maximum_allowed_balance_message = u'Solde maximum autorisé dépassé.'
if content.find(insufficient_amount_message) != -1:
raise TransferError('The amount you tried to transfer is too low.')
if content.find(maximum_allowed_balance_message) != -1:
raise TransferError('The maximum allowed balance for the target account has been / would be reached.')
# look for the known "all right" message
ready_for_transfer_message = u'Confirmez un virement entre vos comptes'
if not content.find(ready_for_transfer_message):
raise TransferError('The expected message "%s" was not found.' % ready_for_transfer_message)
# submit the confirmation form
self.select_form(name='FormVirUniCnf')
submit_date = datetime.now()
self.submit()
# look for the known "everything went well" message
content = unicode(self.response().get_data(), self.ENCODING)
transfer_ok_message = u'Votre virement a été exécuté ce jour'
if not content.find(transfer_ok_message):
raise TransferError('The expected message "%s" was not found.' % transfer_ok_message)
# We now have to return a Transfer object
transfer = Transfer(submit_date.strftime('%Y%m%d%H%M%S'))
transfer.amount = amount
transfer.origin = account
transfer.recipient = to
transfer.date = submit_date
return transfer
|
laurent-george/weboob
|
modules/cic/browser.py
|
Python
|
agpl-3.0
| 8,378
|
#!/usr/bin/env python
# coding:utf-8
__author__ = 'coderzh'
import os
import re
import subprocess
from datetime import datetime
# replace to vim or others if your like
# EDITOR = ['MarkdownPad2.exe']
EDITOR = ['atom']
if __name__ == '__main__':
post_name = raw_input("Post'title: ")
post_path = 'post/{year}/{date_format}-{post_name}.md'.format(
year=datetime.now().year,
date_format=datetime.now().strftime('%Y-%m-%d'),
post_name=post_name
)
subprocess.call(['hugo', 'new', post_path])
# replace template value
post_rel_path = os.path.join('content', post_path)
with open(post_rel_path, 'r') as f:
content = f.read()
url = '/{date_format}/{post_name}'.format(
date_format=datetime.now().strftime('%Y/%m/%d'),
post_name=post_name
)
replace_patterns =[
(re.compile(r'title:(.*)'), 'title: "%s"' % post_name),
(re.compile(r'url:(.*)'), 'url: "%s/"' % url),
(re.compile(r'\n---'), r'\n\n---'),
]
for regex, replace_with in replace_patterns:
content = regex.sub(replace_with, content)
with open(post_rel_path, 'w') as f:
f.write(content)
subprocess.Popen(EDITOR + [post_rel_path])
|
longlizl/lilong-hugo-blog
|
create-post.py
|
Python
|
mit
| 1,234
|
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from keycheck.forms import addKey
from keycheck.models import GpgKey, Mail
from utils import updateUserMails, makeAdmin
def index(request):
'''
the index view should give an overview of keybiz's capabilities and show
some kind of login form. Whether or not a user is logged in should not
matter.
'''
return render(request, 'keycheck/index.html', {})
@login_required
def manage(request):
'''
the manage view allows users to review their email addresses and uploaded
keys. Users can upload a key which will be used to sign all the addresses
that also are present to the system.
'''
form = None
errmsg = None
keymails = []
newmails = updateUserMails(request)
mails = Mail.objects.filter(user=request.user)
if request.method == "POST":
form = addKey(request.POST)
if form.is_valid():
key = GpgKey(keydata=form.cleaned_data['keydata'])
keymails = key.getMails()
if keymails == []:
errmsg = "Could not extract mails from key, have you added your addresses to that key?"
elif keymails is False:
errmsg = "Could not read GPG data, is that key valid?"
else:
# save mails and add key to mails
keycount = 0
for m in mails:
if str(m) in keymails and key not in m.gpgkey.all():
if keycount == 0:
key.save() #save key iff we found an address.
m.gpgkey.add(key)
keycount += 1
if keycount > 0:
for m in mails:
m.save()
m.sign(key)
else:
errmsg = "None of the key's uids matched your registered email addresses."
else:
form = addKey()
return render(request, 'keycheck/manage.html', {'form': form, 'mail': mails, 'keymail': keymails, 'errmsg': errmsg})
@login_required
def sign(request, keyId, sign=None):
gpgkey = GpgKey.objects.get(pk=keyId)
userkey = [] #has the user an uid with this key attached?
for m in Mail.objects.filter(user=request.user):
if gpgkey in m.gpgkey.all():
userkey.append(m)
errmsg = None
if userkey == []:
errmsg = "Key not found."
signKey = request.GET.get('sign', False)
if signKey:
for m in Mail.objects.filter(user=request.user):
m.save()
m.sign(gpgkey)
elif not signKey:
pass
else:
errmsg = "Error processing your request, please try again."
return render(request, 'keycheck/sign.html', {'gpgkey': gpgkey, 'errmsg': errmsg, 'signKey': signKey})
|
nomnomtom/keybiz
|
keycheck/views.py
|
Python
|
agpl-3.0
| 2,422
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Jean Gabes, naparuba@gmail.com
# Sebastien Coavoux, s.coavoux@free.fr
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from .alignak_test import AlignakTest
class TestBusinessCorrelatorRecursive(AlignakTest):
def setUp(self):
super(TestBusinessCorrelatorRecursive, self).setUp()
self.setup_with_file('cfg/cfg_business_correlator_recursive.cfg',
dispatching=True)
assert self.conf_is_correct
self._sched = self._scheduler
def test_recursive(self):
""" BR - recursive do not break python
ht34-peret-2-dif0, son of ht34-peret-2
ht34-peret-2-dif1, son of ht34-peret-2
ht34-peret-2 host state is 2,1,1 of (fid0 | dif1)
"""
# Get the standard hosts
host_dif0 = self._sched.hosts.find_by_name("ht34-peret-2-dif0")
host_dif0.act_depend_of = [] # no host checks on critical check results
host_dif1 = self._sched.hosts.find_by_name("ht34-peret-2-dif1")
host_dif1.act_depend_of = [] # no host checks on critical check results
# Get the BR main host - not a real host but a BR one...
host_main = self._sched.hosts.find_by_name("ht34-peret-2")
host_main.act_depend_of = [] # no host checks on critical check results
host_main.__class__.enable_problem_impacts_states_change = False
# Is a Business Rule, not a simple host...
assert host_main.got_business_rule
assert host_main.business_rule is not None
bp_rule = host_main.business_rule
print(("Host BR: %s" % bp_rule))
# Host BR:
# "Op:None Val:(u'1', u'1', u'1') Sons:['
# "Op:of: Val:(u'2', u'1', u'1') Sons:['
# "Op:host Val:(u'0', u'0', u'0') Sons:['c832bb0ad22c4700b16697cccbb6b782'] IsNot:False",
# "Op:host Val:(u'0', u'0', u'0') Sons:['596b9f36d1e94848ab145e3b43464645'] IsNot:False"
# '] IsNot:False"
# '] IsNot:False"
self.scheduler_loop(3, [
[host_dif0, 2, 'DOWN | value1=1 value2=2'],
[host_dif1, 2, 'DOWN | rtt=10']
])
print(("Host dif-0 state: %s / %s" % (host_dif0.state_type, host_dif0.state)))
print(("Host dif-1 state: %s / %s" % (host_dif1.state_type, host_dif1.state)))
assert 'DOWN' == host_dif0.state
assert 'HARD' == host_dif0.state_type
assert 'DOWN' == host_dif1.state
assert 'HARD' == host_dif1.state_type
# When all is ok, the BP rule state is 4: undetermined!
state = bp_rule.get_state(self._sched.hosts, self._sched.services)
assert 4 == state
|
Alignak-monitoring/alignak
|
tests/test_business_correlator_recursive_bp_rules.py
|
Python
|
agpl-3.0
| 4,287
|
import account_bank_statement
import account_invoice
import account_journal
|
3dfxsoftware/cbss-addons
|
account_bank_statement_vauxoo/model/__init__.py
|
Python
|
gpl-2.0
| 76
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.