code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
""" Functions to solve for antenna/station gain
"""
from processing_components.calibration.calibration import solve_gaintable
|
SKA-ScienceDataProcessor/algorithm-reference-library
|
wrappers/serial/calibration/calibration.py
|
Python
|
apache-2.0
| 127
|
# -*- coding: utf-8 -*-
import KBEngine
import KBExtend
from KBEDebug import *
from interfaces.GameObject import GameObject
class NPC(GameObject):
def __init__(self):
GameObject.__init__(self)
|
harmy/kbengine
|
demo/res/scripts/client/NPC.py
|
Python
|
lgpl-3.0
| 206
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from tempest.api.image import base
from tempest.common import image as common_image
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest.lib import exceptions
from tempest import test
CONF = config.CONF
def get_container_and_disk_format():
a_formats = ['ami', 'ari', 'aki']
container_format = CONF.image.container_formats[0]
disk_format = CONF.image.disk_formats[0]
if container_format in a_formats and container_format != disk_format:
msg = ("The container format and the disk format don't match. "
"Container format: %(container)s, Disk format: %(disk)s." %
{'container': container_format, 'disk': disk_format})
raise exceptions.InvalidConfiguration(message=msg)
return container_format, disk_format
class CreateRegisterImagesTest(base.BaseV1ImageTest):
"""Here we test the registration and creation of images."""
@test.idempotent_id('3027f8e6-3492-4a11-8575-c3293017af4d')
def test_register_then_upload(self):
# Register, then upload an image
properties = {'prop1': 'val1'}
container_format, disk_format = get_container_and_disk_format()
image = self.create_image(name='New Name',
container_format=container_format,
disk_format=disk_format,
is_public=False,
properties=properties)
self.assertIn('id', image)
self.assertEqual('New Name', image.get('name'))
self.assertFalse(image.get('is_public'))
self.assertEqual('queued', image.get('status'))
for key, val in properties.items():
self.assertEqual(val, image.get('properties')[key])
# Now try uploading an image file
image_file = six.BytesIO(data_utils.random_bytes())
body = self.client.update_image(image['id'], data=image_file)['image']
self.assertIn('size', body)
self.assertEqual(1024, body.get('size'))
@test.idempotent_id('69da74d9-68a9-404b-9664-ff7164ccb0f5')
def test_register_remote_image(self):
# Register a new remote image
container_format, disk_format = get_container_and_disk_format()
body = self.create_image(name='New Remote Image',
container_format=container_format,
disk_format=disk_format, is_public=False,
location=CONF.image.http_image,
properties={'key1': 'value1',
'key2': 'value2'})
self.assertIn('id', body)
self.assertEqual('New Remote Image', body.get('name'))
self.assertFalse(body.get('is_public'))
self.assertEqual('active', body.get('status'))
properties = body.get('properties')
self.assertEqual(properties['key1'], 'value1')
self.assertEqual(properties['key2'], 'value2')
@test.idempotent_id('6d0e13a7-515b-460c-b91f-9f4793f09816')
def test_register_http_image(self):
container_format, disk_format = get_container_and_disk_format()
image = self.create_image(name='New Http Image',
container_format=container_format,
disk_format=disk_format, is_public=False,
copy_from=CONF.image.http_image)
self.assertIn('id', image)
self.assertEqual('New Http Image', image.get('name'))
self.assertFalse(image.get('is_public'))
waiters.wait_for_image_status(self.client, image['id'], 'active')
self.client.show_image(image['id'])
@test.idempotent_id('05b19d55-140c-40d0-b36b-fafd774d421b')
def test_register_image_with_min_ram(self):
# Register an image with min ram
container_format, disk_format = get_container_and_disk_format()
properties = {'prop1': 'val1'}
body = self.create_image(name='New_image_with_min_ram',
container_format=container_format,
disk_format=disk_format,
is_public=False,
min_ram=40,
properties=properties)
self.assertIn('id', body)
self.assertEqual('New_image_with_min_ram', body.get('name'))
self.assertFalse(body.get('is_public'))
self.assertEqual('queued', body.get('status'))
self.assertEqual(40, body.get('min_ram'))
for key, val in properties.items():
self.assertEqual(val, body.get('properties')[key])
self.client.delete_image(body['id'])
class ListImagesTest(base.BaseV1ImageTest):
"""Here we test the listing of image information"""
@classmethod
def skip_checks(cls):
super(ListImagesTest, cls).skip_checks()
if (len(CONF.image.container_formats) < 2
or len(CONF.image.disk_formats) < 2):
skip_msg = ("%s skipped as multiple container formats "
"or disk formats are not available." % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def resource_setup(cls):
super(ListImagesTest, cls).resource_setup()
# We add a few images here to test the listing functionality of
# the images API
a_formats = ['ami', 'ari', 'aki']
(cls.container_format,
cls.container_format_alt) = CONF.image.container_formats[:2]
cls.disk_format, cls.disk_format_alt = CONF.image.disk_formats[:2]
if cls.container_format in a_formats:
cls.disk_format = cls.container_format
if cls.container_format_alt in a_formats:
cls.disk_format_alt = cls.container_format_alt
img1 = cls._create_remote_image('one', cls.container_format,
cls.disk_format)
img2 = cls._create_remote_image('two', cls.container_format_alt,
cls.disk_format_alt)
img3 = cls._create_remote_image('dup', cls.container_format,
cls.disk_format)
img4 = cls._create_remote_image('dup', cls.container_format,
cls.disk_format)
img5 = cls._create_standard_image('1', cls.container_format_alt,
cls.disk_format_alt, 42)
img6 = cls._create_standard_image('2', cls.container_format_alt,
cls.disk_format_alt, 142)
img7 = cls._create_standard_image('33', cls.container_format,
cls.disk_format, 142)
img8 = cls._create_standard_image('33', cls.container_format,
cls.disk_format, 142)
cls.created_set = set(cls.created_images)
# same container format
cls.same_container_format_set = set((img1, img3, img4, img7, img8))
# same disk format
cls.same_disk_format_set = set((img2, img5, img6))
# 1x with size 42
cls.size42_set = set((img5,))
# 3x with size 142
cls.size142_set = set((img6, img7, img8,))
# dup named
cls.dup_set = set((img3, img4))
@classmethod
def _create_remote_image(cls, name, container_format, disk_format):
"""Create a new remote image and return newly-registered image-id"""
name = 'New Remote Image %s' % name
location = CONF.image.http_image
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False,
location=location)
return image['id']
@classmethod
def _create_standard_image(cls, name, container_format,
disk_format, size):
"""Create a new standard image and return newly-registered image-id
Note that the size of the new image is a random number between
1024 and 4096
"""
image_file = six.BytesIO(data_utils.random_bytes(size))
name = 'New Standard Image %s' % name
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False, data=image_file)
return image['id']
@test.idempotent_id('246178ab-3b33-4212-9a4b-a7fe8261794d')
def test_index_no_params(self):
# Simple test to see all fixture images returned
images_list = self.client.list_images()['images']
image_list = [image['id'] for image in images_list]
for image_id in self.created_images:
self.assertIn(image_id, image_list)
@test.idempotent_id('f1755589-63d6-4468-b098-589820eb4031')
def test_index_disk_format(self):
images_list = self.client.list_images(
disk_format=self.disk_format_alt)['images']
for image in images_list:
self.assertEqual(image['disk_format'], self.disk_format_alt)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.same_disk_format_set <= result_set)
self.assertFalse(self.created_set - self.same_disk_format_set
<= result_set)
@test.idempotent_id('2143655d-96d9-4bec-9188-8674206b4b3b')
def test_index_container_format(self):
images_list = self.client.list_images(
container_format=self.container_format)['images']
for image in images_list:
self.assertEqual(image['container_format'], self.container_format)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.same_container_format_set <= result_set)
self.assertFalse(self.created_set - self.same_container_format_set
<= result_set)
@test.idempotent_id('feb32ac6-22bb-4a16-afd8-9454bb714b14')
def test_index_max_size(self):
images_list = self.client.list_images(size_max=42)['images']
for image in images_list:
self.assertTrue(image['size'] <= 42)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.size42_set <= result_set)
self.assertFalse(self.created_set - self.size42_set <= result_set)
@test.idempotent_id('6ffc16d0-4cbf-4401-95c8-4ac63eac34d8')
def test_index_min_size(self):
images_list = self.client.list_images(size_min=142)['images']
for image in images_list:
self.assertTrue(image['size'] >= 142)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.size142_set <= result_set)
self.assertFalse(self.size42_set <= result_set)
@test.idempotent_id('e5dc26d9-9aa2-48dd-bda5-748e1445da98')
def test_index_status_active_detail(self):
images_list = self.client.list_images(detail=True,
status='active',
sort_key='size',
sort_dir='desc')['images']
top_size = images_list[0]['size'] # We have non-zero sized images
for image in images_list:
size = image['size']
self.assertTrue(size <= top_size)
top_size = size
self.assertEqual(image['status'], 'active')
@test.idempotent_id('097af10a-bae8-4342-bff4-edf89969ed2a')
def test_index_name(self):
images_list = self.client.list_images(
detail=True,
name='New Remote Image dup')['images']
result_set = set(map(lambda x: x['id'], images_list))
for image in images_list:
self.assertEqual(image['name'], 'New Remote Image dup')
self.assertTrue(self.dup_set <= result_set)
self.assertFalse(self.created_set - self.dup_set <= result_set)
class UpdateImageMetaTest(base.BaseV1ImageTest):
@classmethod
def resource_setup(cls):
super(UpdateImageMetaTest, cls).resource_setup()
container_format, disk_format = get_container_and_disk_format()
cls.image_id = cls._create_standard_image('1', container_format,
disk_format, 42)
@classmethod
def _create_standard_image(cls, name, container_format,
disk_format, size):
"""Create a new standard image and return newly-registered image-id"""
image_file = six.BytesIO(data_utils.random_bytes(size))
name = 'New Standard Image %s' % name
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False, data=image_file,
properties={'key1': 'value1'})
return image['id']
@test.idempotent_id('01752c1c-0275-4de3-9e5b-876e44541928')
def test_list_image_metadata(self):
# All metadata key/value pairs for an image should be returned
resp = self.client.check_image(self.image_id)
resp_metadata = common_image.get_image_meta_from_headers(resp)
expected = {'key1': 'value1'}
self.assertEqual(expected, resp_metadata['properties'])
@test.idempotent_id('d6d7649c-08ce-440d-9ea7-e3dda552f33c')
def test_update_image_metadata(self):
# The metadata for the image should match the updated values
req_metadata = {'key1': 'alt1', 'key2': 'value2'}
resp = self.client.check_image(self.image_id)
metadata = common_image.get_image_meta_from_headers(resp)
self.assertEqual(metadata['properties'], {'key1': 'value1'})
metadata['properties'].update(req_metadata)
headers = common_image.image_meta_to_headers(
properties=metadata['properties'])
self.client.update_image(self.image_id, headers=headers)
resp = self.client.check_image(self.image_id)
resp_metadata = common_image.get_image_meta_from_headers(resp)
self.assertEqual(req_metadata, resp_metadata['properties'])
|
sebrandon1/tempest
|
tempest/api/image/v1/test_images.py
|
Python
|
apache-2.0
| 15,031
|
"""
Configuration of unit tests here.
"""
from __future__ import print_function
def setup_module():
""" Setup
"""
print(__name__, ': setup_conf_test() ***')
def teardown_module():
""" Teardown
"""
print(__name__, ': teardown_conf_test() ***')
import os
import sys
root = os.path.join(os.path.dirname(__file__))
package = os.path.join(root, '..')
app_root = os.path.join(root, '..', 'app')
sys.path.insert(0, os.path.abspath(package))
sys.path.insert(0, os.path.abspath(app_root))
|
BartGo/bottle-stack
|
tests/conf_test.py
|
Python
|
mit
| 510
|
import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'BACK-REF TRAN ID', 'number': '3'},
{'name': 'ACCOUNT NAME', 'number': '4'},
{'name': 'EVENT NAME', 'number': '5'},
{'name': 'EVENT TYPE', 'number': '6'},
{'name': 'Of Receipt', 'number': '7-'},
{'name': 'AMOUNT TRANSFERRED', 'number': '8'},
{'name': 'TOTAL AMOUNT TRANSFERRED', 'number': '9'},
{'name': 'AMENDED CD', 'number': '10'},
{'name': 'TRAN ID', 'number': '11'},
]
self.fields_names = self.hash_names(self.fields)
|
h4ck3rm1k3/FEC-Field-Documentation
|
fec/version/v5_1/SH3.py
|
Python
|
unlicense
| 805
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Apache Beam SDK for Python setup file."""
from __future__ import print_function
import os
import platform
import warnings
from distutils.version import StrictVersion
# Pylint and isort disagree here.
# pylint: disable=ungrouped-imports
import setuptools
from pkg_resources import DistributionNotFound
from pkg_resources import get_distribution
from setuptools.command.build_py import build_py
from setuptools.command.develop import develop
from setuptools.command.egg_info import egg_info
from setuptools.command.sdist import sdist
from setuptools.command.test import test
def get_version():
global_names = {}
exec(open(os.path.normpath('./apache_beam/version.py')).read(), global_names) # pylint: disable=exec-used
return global_names['__version__']
PACKAGE_NAME = 'apache-beam'
PACKAGE_VERSION = get_version()
PACKAGE_DESCRIPTION = 'Apache Beam SDK for Python'
PACKAGE_URL = 'https://beam.apache.org'
PACKAGE_DOWNLOAD_URL = 'https://pypi.python.org/pypi/apache-beam'
PACKAGE_AUTHOR = 'Apache Software Foundation'
PACKAGE_EMAIL = 'dev@beam.apache.org'
PACKAGE_KEYWORDS = 'apache beam'
PACKAGE_LONG_DESCRIPTION = '''
Apache Beam is a unified programming model for both batch and streaming
data processing, enabling efficient execution across diverse distributed
execution engines and providing extensibility points for connecting to
different technologies and user communities.
'''
REQUIRED_PIP_VERSION = '7.0.0'
_PIP_VERSION = get_distribution('pip').version
if StrictVersion(_PIP_VERSION) < StrictVersion(REQUIRED_PIP_VERSION):
warnings.warn(
"You are using version {0} of pip. " \
"However, version {1} is recommended.".format(
_PIP_VERSION, REQUIRED_PIP_VERSION
)
)
REQUIRED_CYTHON_VERSION = '0.28.1'
try:
_CYTHON_VERSION = get_distribution('cython').version
if StrictVersion(_CYTHON_VERSION) < StrictVersion(REQUIRED_CYTHON_VERSION):
warnings.warn(
"You are using version {0} of cython. " \
"However, version {1} is recommended.".format(
_CYTHON_VERSION, REQUIRED_CYTHON_VERSION
)
)
except DistributionNotFound:
# do nothing if Cython is not installed
pass
# Currently all compiled modules are optional (for performance only).
if platform.system() == 'Windows':
# Windows doesn't always provide int64_t.
cythonize = lambda *args, **kwargs: []
else:
try:
# pylint: disable=wrong-import-position
from Cython.Build import cythonize
except ImportError:
cythonize = lambda *args, **kwargs: []
REQUIRED_PACKAGES = [
'avro>=1.8.1,<2.0.0',
'crcmod>=1.7,<2.0',
'dill==0.2.6',
'grpcio>=1.8,<2',
'hdfs>=2.1.0,<3.0.0',
'httplib2>=0.8,<0.10',
'mock>=1.0.1,<3.0.0',
'oauth2client>=2.0.1,<5',
# grpcio 1.8.1 and above requires protobuf 3.5.0.post1.
'protobuf>=3.5.0.post1,<4',
'pytz>=2018.3',
'pyyaml>=3.12,<4.0.0',
'pyvcf>=0.6.8,<0.7.0',
'six>=1.9,<1.12',
'typing>=3.6.0,<3.7.0',
'futures>=3.1.1,<4.0.0',
'future>=0.16.0,<1.0.0',
]
REQUIRED_TEST_PACKAGES = [
'nose>=1.3.7',
'pyhamcrest>=1.9,<2.0',
]
GCP_REQUIREMENTS = [
# oauth2client >=4 only works with google-apitools>=0.5.18.
'google-apitools>=0.5.18,<=0.5.20',
'proto-google-cloud-datastore-v1>=0.90.0,<=0.90.4',
'googledatastore==7.0.1',
'google-cloud-pubsub==0.26.0',
'proto-google-cloud-pubsub-v1==0.15.4',
# GCP packages required by tests
'google-cloud-bigquery==0.25.0',
]
# We must generate protos after setup_requires are installed.
def generate_protos_first(original_cmd):
try:
# See https://issues.apache.org/jira/browse/BEAM-2366
# pylint: disable=wrong-import-position
import gen_protos
class cmd(original_cmd, object):
def run(self):
gen_protos.generate_proto_files()
super(cmd, self).run()
return cmd
except ImportError:
warnings.warn("Could not import gen_protos, skipping proto generation.")
return original_cmd
python_requires = '>=2.7'
if os.environ.get('BEAM_EXPERIMENTAL_PY3') is None:
python_requires += ',<3.0'
setuptools.setup(
name=PACKAGE_NAME,
version=PACKAGE_VERSION,
description=PACKAGE_DESCRIPTION,
long_description=PACKAGE_LONG_DESCRIPTION,
url=PACKAGE_URL,
download_url=PACKAGE_DOWNLOAD_URL,
author=PACKAGE_AUTHOR,
author_email=PACKAGE_EMAIL,
packages=setuptools.find_packages(),
package_data={'apache_beam': [
'*/*.pyx', '*/*/*.pyx', '*/*.pxd', '*/*/*.pxd', 'testing/data/*.yaml']},
ext_modules=cythonize([
'apache_beam/**/*.pyx',
'apache_beam/coders/coder_impl.py',
'apache_beam/metrics/execution.py',
'apache_beam/runners/common.py',
'apache_beam/runners/worker/logger.py',
'apache_beam/runners/worker/opcounters.py',
'apache_beam/runners/worker/operations.py',
'apache_beam/transforms/cy_combiners.py',
'apache_beam/utils/counters.py',
'apache_beam/utils/windowed_value.py',
]),
install_requires=REQUIRED_PACKAGES,
python_requires=python_requires,
test_suite='nose.collector',
tests_require=REQUIRED_TEST_PACKAGES,
extras_require={
'docs': ['Sphinx>=1.5.2,<2.0'],
'test': REQUIRED_TEST_PACKAGES,
'gcp': GCP_REQUIREMENTS
},
zip_safe=False,
# PyPI package information.
classifiers=[
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache License, Version 2.0',
keywords=PACKAGE_KEYWORDS,
entry_points={
'nose.plugins.0.10': [
'beam_test_plugin = test_config:BeamTestPlugin',
]},
cmdclass={
'build_py': generate_protos_first(build_py),
'develop': generate_protos_first(develop),
'egg_info': generate_protos_first(egg_info),
'sdist': generate_protos_first(sdist),
'test': generate_protos_first(test),
},
)
|
tgroh/incubator-beam
|
sdks/python/setup.py
|
Python
|
apache-2.0
| 6,961
|
# -*- coding: utf-8 -*-
from antidox import perspective
import unittest
import sys
if sys.version_info >= (3, 3):
from unittest import mock
else:
import mock
class TestPerspective(unittest.TestCase):
def test_contains_pii_true(self):
dlp_response = \
{
"result": {
"findings": [
{
"quote": "footfungusinbellybutton@gmail.com",
"infoType": {
"name": "EMAIL_ADDRESS"
},
"likelihood": "LIKELY",
"location": {
"byteRange": {
"start": "13",
"end": "46"
},
"codepointRange": {
"start": "13",
"end": "46"
}
},
"createTime": "2019-05-31T21:23:12.402Z"
},
{
"quote": "(206) 555-0123",
"infoType": {
"name": "PHONE_NUMBER"
},
"likelihood": "LIKELY",
"location": {
"byteRange": {
"start": "67",
"end": "81"
},
"codepointRange": {
"start": "67",
"end": "81"
}
},
"createTime": "2019-05-31T21:23:12.402Z"
}
]
}
}
has_pii = perspective.contains_pii(dlp_response)
self.assertTrue(has_pii)
def test_contains_pii_false(self):
dlp_response = \
{
"result": {}
}
has_pii = perspective.contains_pii(dlp_response)
self.assertEqual(has_pii, (False, None))
def test_contains_toxicity_true(self):
perspective_response = \
{
"attributeScores": {
"INSULT": {
"spanScores": [
{
"begin": 0,
"end": 14,
"score": {
"value": 0.8521307,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.8521307,
"type": "PROBABILITY"
}
},
"TOXICITY": {
"spanScores": [
{
"begin": 0,
"end": 14,
"score": {
"value": 0.96624386,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.96624386,
"type": "PROBABILITY"
}
},
"THREAT": {
"spanScores": [
{
"begin": 0,
"end": 14,
"score": {
"value": 0.39998722,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.39998722,
"type": "PROBABILITY"
}
}
},
"languages": [
"en"
],
"detectedLanguages": [
"en"
]
}
is_toxic = perspective.contains_toxicity(perspective_response)
self.assertTrue(is_toxic)
def test_contains_toxicity_false(self):
perspective_response = \
{
"attributeScores": {
"THREAT": {
"spanScores": [
{
"begin": 0,
"end": 35,
"score": {
"value": 0.09605787,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.09605787,
"type": "PROBABILITY"
}
},
"INSULT": {
"spanScores": [
{
"begin": 0,
"end": 35,
"score": {
"value": 0.07253261,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.07253261,
"type": "PROBABILITY"
}
},
"TOXICITY": {
"spanScores": [
{
"begin": 0,
"end": 35,
"score": {
"value": 0.072236896,
"type": "PROBABILITY"
}
}
],
"summaryScore": {
"value": 0.072236896,
"type": "PROBABILITY"
}
}
},
"languages": [
"en"
],
"detectedLanguages": [
"en"
]
}
is_toxic = perspective.contains_insult(perspective_response)
self.assertFalse(is_toxic)
def test_contains_toxicity_false(self):
perspective_response = \
{'attributeScores': {'TOXICITY': {'spanScores': [{'begin': 0, 'end': 25, 'score': {'value': 0.9312127, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.9312127, 'type': 'PROBABILITY'}}, 'THREAT': {'spanScores': [{'begin': 0, 'end': 25, 'score': {'value': 0.15875438, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.15875438, 'type': 'PROBABILITY'}}, 'INSULT': {'spanScores': [{'begin': 0, 'end': 25, 'score': {'value': 0.93682694, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.93682694, 'type': 'PROBABILITY'}}}, 'languages': ['en'], 'detectedLanguages': ['en']}
is_insult = perspective.contains_toxicity(perspective_response)
self.assertTrue(is_insult)
def contains_threat(perspective_response):
perspective_response = \
{'attributeScores': {'INSULT': {'spanScores': [{'begin': 0, 'end': 21, 'score': {'value': 0.55873775, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.55873775, 'type': 'PROBABILITY'}}, 'TOXICITY': {'spanScores': [{'begin': 0, 'end': 21, 'score': {'value': 0.9759337, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.9759337, 'type': 'PROBABILITY'}}, 'THREAT': {'spanScores': [{'begin': 0, 'end': 21, 'score': {'value': 0.9980843, 'type': 'PROBABILITY'}}], 'summaryScore': {'value': 0.9980843, 'type': 'PROBABILITY'}}}, 'languages': ['en'], 'detectedLanguages': ['en']}
is_threat = perspective.contains_toxicity(perspective_response)
self.assertTrue(is_threat)
def test_get_wikipage(self):
wiki_response = \
u"""{{talkheader|wp=yes|WT:NYC|WT:WPNYC}}
{{WPBS|1=
{{WikiProject Cities|class=project|importance=na}}
{{WikiProject New York City|class=project|importance=na}}
{{WikiProject New York|class=project|importance=na}}
{{WikiProject United States|class=project|importance=na}}
}}
{{Wikipedia:Wikipedia Signpost/WikiProject used|link=Wikipedia:Wikipedia Signpost/2012-12-31/WikiProject report|writer= [[User:Mabeenot|Mabeenot]]| ||day =31|month=December|year=2012}}
{{auto archiving notice|bot=MiszaBot II|botlink=User:MiszaBot II|age=60}}{{User:MiszaBot/config
|archiveheader = {{talkarchivenav}}
|maxarchivesize = 100K
|counter = 7
|minthreadsleft = 5
|minthreadstoarchive = 1
|algo = old(60d)
|archive = Wikipedia talk:WikiProject New York City/Archive %(counter)d
}}{{User:HBC Archive Indexerbot/OptIn|target=Wikipedia talk:WikiProject New York City/Archive index|mask=Wikipedia talk:WikiProject New York City/Archive <#>|leading_zeros=0|indexhere=no}}
{{TOC right}}
== Help with a park article? ==
Hi! I didn't know if anyone was willing to work on a park article or not - a student of mine created the article on [[St. James Park (Bronx)]]. The class ends this week and I'm not entirely sure if they will be back on to edit it, but at the present it lacks information and sourcing to establish how it's notable. I'm going to try to do as much as I can for it, but I'm admittedly kind of swamped with other classes so I wanted to see if anyone would be interested in this. 15:36, 7 May 2019 (UTC)
== Wikipedia:Naming conventions (US stations)/NYC Subway RfC ==
Just so everyone who would come here knows, there is an ongoing RfC at [[Wikipedia:Naming conventions (US stations)/NYC Subway RfC]] that WP:NYC might be interested in. {{sbb}} --<span style="border:1px solid #ffa500;background:#f3dddd;"> [[User:I dream of horses|I dream of horses]] </span><span style="border:1px solid #ffa500">{{small| If you reply here, please [[WP:ECHO|ping me]] by adding <nowiki>{{U|I dream of horses}}</nowiki> to your message }}</span> {{small|([[User talk:I dream of horses|talk to me]]) ([[Special:Contributions/I dream of horses|My edits]])}} @ 05:11, 12 June 2019 (UTC)"""
clean_text = \
u"""Help with a park article?
Hi! I didn't know if anyone was willing to work on a park article or not - a student of mine created the article on St. James Park (Bronx). The class ends this week and I'm not entirely sure if they will be back on to edit it, but at the present it lacks information and sourcing to establish how it's notable. I'm going to try to do as much as I can for it, but I'm admittedly kind of swamped with other classes so I wanted to see if anyone would be interested in this.
Wikipedia:Naming conventions (US stations)/NYC Subway RfC
Just so everyone who would come here knows, there is an ongoing RfC at Wikipedia:Naming conventions (US stations)/NYC Subway RfC that WP:NYC might be interested in. @ """
text = perspective.wiki_clean(wiki_response)
self.assertEqual(text.strip(), clean_text.strip())
class Test_BigQuery(unittest.TestCase):
def test_use_query(self):
fake_response_comments = [
{'cleaned_content': 'comment1'},
{'cleaned_content': 'comment2'}
]
not_big_q = mock.Mock()
mock_query_job = mock.Mock()
mock_query_job.result = mock.Mock(return_value=fake_response_comments)
not_big_q.query = mock.Mock(return_value=mock_query_job)
rows = perspective.use_query('cleaned_content', """SELECT 'cleaned_content' FROM 'fakeproject.fakedatbase.fakedataset' """, not_big_q)
self.assertEqual(type(rows[0]), str)
self.assertEqual(len(rows), len(fake_response_comments))
if __name__ == '__main__':
unittest.main()
|
conversationai/wikidetox
|
antidox/perspective_test.py
|
Python
|
apache-2.0
| 9,456
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#########################################################################
# Author: jonyqin
# Created Time: Thu 11 Sep 2014 03:55:41 PM CST
# File Name: demo.py
# Description: WXBizMsgCrypt 使用demo文件
#########################################################################
from WXBizMsgCrypt_v3 import WXBizMsgCrypt
if __name__ == "__main__":
"""
1.第三方回复加密消息给公众平台;
2.第三方收到公众平台发送的消息,验证消息的安全性,并对消息进行解密。
"""
encodingAESKey = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG"
to_xml = """ <xml><ToUserName><![CDATA[oia2TjjewbmiOUlr6X-1crbLOvLw]]></ToUserName><FromUserName><![CDATA[gh_7f083739789a]]></FromUserName><CreateTime>1407743423</CreateTime><MsgType> <![CDATA[video]]></MsgType><Video><MediaId><![CDATA[eYJ1MbwPRJtOvIEabaxHs7TX2D-HV71s79GUxqdUkjm6Gs2Ed1KF3ulAOA9H1xG0]]></MediaId><Title><![CDATA[testCallBackReplyVideo]]></Title><Descript ion><![CDATA[testCallBackReplyVideo]]></Description></Video></xml>"""
token = "spamtest"
nonce = "1320562132"
appid = "wx2c2769f8efd9abc2"
#测试加密接口
encryp_test = WXBizMsgCrypt(token,encodingAESKey,appid)
ret,encrypt_xml = encryp_test.EncryptMsg(to_xml,nonce)
assert ret==0
print(encrypt_xml)
#测试解密接口
timestamp = "1409735669"
msg_sign = "5d197aaffba7e9b25a30732f161a50dee96bd5fa"
from_xml = """<xml><ToUserName><![CDATA[gh_10f6c3c3ac5a]]></ToUserName><FromUserName><![CDATA[oyORnuP8q7ou2gfYjqLzSIWZf0rs]]></FromUserName><CreateTime>1409735668</CreateTime><MsgType><![CDATA[text]]></MsgType><Content><![CDATA[abcdteT]]></Content><MsgId>6054768590064713728</MsgId><Encrypt><![CDATA[hyzAe4OzmOMbd6TvGdIOO6uBmdJoD0Fk53REIHvxYtJlE2B655HuD0m8KUePWB3+LrPXo87wzQ1QLvbeUgmBM4x6F8PGHQHFVAFmOD2LdJF9FrXpbUAh0B5GIItb52sn896wVsMSHGuPE328HnRGBcrS7C41IzDWyWNlZkyyXwon8T332jisa+h6tEDYsVticbSnyU8dKOIbgU6ux5VTjg3yt+WGzjlpKn6NPhRjpA912xMezR4kw6KWwMrCVKSVCZciVGCgavjIQ6X8tCOp3yZbGpy0VxpAe+77TszTfRd5RJSVO/HTnifJpXgCSUdUue1v6h0EIBYYI1BD1DlD+C0CR8e6OewpusjZ4uBl9FyJvnhvQl+q5rv1ixrcpCumEPo5MJSgM9ehVsNPfUM669WuMyVWQLCzpu9GhglF2PE=]]></Encrypt></xml>"""
decrypt_test = WXBizMsgCrypt(token,encodingAESKey,appid)
ret ,decryp_xml = decrypt_test.DecryptMsg(from_xml, msg_sign, timestamp, nonce)
print(decryp_xml)
assert ret==0
|
sharkspeed/dororis
|
platforms/weixin/miniapp/miniapp_0_selfhost_server/wxserver-backup/Sample_v3.py
|
Python
|
bsd-2-clause
| 2,417
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
# 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio WebAccess Administrator Interface."""
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
# fill config variables:
import re
from cgi import escape
from invenio.base.i18n import gettext_set_language
from invenio.config import CFG_ACCESS_CONTROL_LEVEL_ACCOUNTS, \
CFG_ACCESS_CONTROL_LEVEL_GUESTS, CFG_ACCESS_CONTROL_LEVEL_SITE, \
CFG_ACCESS_CONTROL_LIMIT_REGISTRATION_TO_DOMAIN, \
CFG_ACCESS_CONTROL_NOTIFY_ADMIN_ABOUT_NEW_ACCOUNTS, \
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_ACTIVATION, \
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_DELETION, \
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_NEW_ACCOUNT, CFG_SITE_ADMIN_EMAIL, \
CFG_SITE_LANG, CFG_SITE_NAME, CFG_SITE_SECURE_URL, \
CFG_SITE_SUPPORT_EMAIL
from invenio.ext.email import send_email
from invenio.legacy.bibrank.adminlib import addadminbox, addcheckboxes, \
createhiddenform, tupletotable, tupletotable_onlyselected
from invenio.legacy.dbquery import rlike, run_sql, wash_table_column_name
from invenio.legacy.webpage import page
from invenio.legacy.webuser import email_valid_p, getUid, \
get_user_preferences, isGuestUser, page_not_authorized, \
set_user_preferences, update_Uid
from invenio.modules.access import control as acca
from invenio.modules.access import engine as acce
from invenio.modules.access.errors import InvenioWebAccessFireroleError
from invenio.modules.access.firerole import compile_role_definition, serialize
from invenio.modules.access.local_config import \
CFG_ACC_EMPTY_ROLE_DEFINITION_SRC, CFG_EXTERNAL_AUTHENTICATION, \
CFG_EXTERNAL_AUTH_DEFAULT, DELEGATEADDUSERROLE, MAXPAGEUSERS, \
MAXSELECTUSERS, SUPERADMINROLE, WEBACCESSACTION
from invenio.utils.url import redirect_to_url
from sqlalchemy.exc import OperationalError
def index(req, title='', body='', subtitle='', adminarea=2, authorized=0,
ln=CFG_SITE_LANG):
"""main function to show pages for webaccessadmin.
1. if user not logged in and administrator, show the mustlogin page
2. if used without body argument, show the startpage
3. show admin page with title, body, subtitle and navtrail.
authorized - if 1, don't check if the user is allowed to be webadmin
"""
navtrail_previous_links = \
'<a class="navtrail" href="%s/help/admin">Admin Area' \
'</a>' % (CFG_SITE_SECURE_URL,)
if body:
if adminarea == 1:
navtrail_previous_links += '> <a class=navtrail ' \
' href=%s/admin/webaccess/webaccessadmin.py/delegate_startarea>' \
'Delegate Rights</a> ' % (CFG_SITE_SECURE_URL, )
if adminarea >= 2 and adminarea < 9:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py>' \
'WebAccess Admin</a> ' % (CFG_SITE_SECURE_URL, )
if adminarea == 3:
navtrail_previous_links += '> <a class=navtrail ' \
'href=%s/admin/webaccess/webaccessadmin.py/rolearea>' \
'Role Administration</a> ' % (CFG_SITE_SECURE_URL, )
elif adminarea == 4:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py' \
'/actionarea>Action Administration</a> ' % (
CFG_SITE_SECURE_URL, )
elif adminarea == 5:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py' \
'/userarea>User Administration</a> ' % (CFG_SITE_SECURE_URL, )
elif adminarea == 6:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py' \
'/resetarea>Reset Authorizations</a> ' % (
CFG_SITE_SECURE_URL, )
elif adminarea == 7:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py' \
'/manageaccounts>Manage Accounts</a> ' % (
CFG_SITE_SECURE_URL, )
elif adminarea == 8:
navtrail_previous_links += '> ' \
'<a class="navtrail" href=%s/admin/webaccess/webaccessadmin.py' \
'/listgroups>List Groups</a> ' % (CFG_SITE_SECURE_URL, )
id_user = getUid(req)
(auth_code, auth_message) = is_adminuser(req)
if not authorized and auth_code != 0:
return mustloginpage(req, auth_message)
elif not body:
title = 'WebAccess Admin'
body = startpage()
elif not isinstance(body, str):
body = addadminbox(subtitle, datalist=body)
return page(title=title,
uid=id_user,
req=req,
body=body,
navtrail=navtrail_previous_links,
lastupdated=__lastupdated__)
def mustloginpage(req, message):
"""show a page asking the user to login."""
navtrail_previous_links = '<a class="navtrail" href="%s/admin/">' \
'Admin Area</a> > <a class="navtrail" href="%s/admin/webaccess/">' \
'WebAccess Admin</a> ' % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL)
return page_not_authorized(req=req, text=message,
navtrail=navtrail_previous_links)
def is_adminuser(req):
"""check if user is a registered administrator. """
return acce.acc_authorize_action(req, WEBACCESSACTION)
def perform_listgroups(req):
"""List all the existing groups."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
header = ['name']
groups = run_sql('select name from group')
output = tupletotable(header, groups, highlight_rows_p=True,
alternate_row_colors_p=True)
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
</dl>
"""
return index(req=req,
title='Group list',
subtitle='All the groups registered in the system',
body=[output, extra],
adminarea=2)
def perform_rolearea(req, grep=""):
"""create the role area menu page."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
header = ['id', 'name', 'description', 'firewall like role definition',
'users', 'authorizations / actions', 'role', '']
roles = acca.acc_get_all_roles()
roles2 = []
if grep:
try:
re_grep = re.compile(grep)
except Exception:
re_grep = None
grep = ''
else:
re_grep = None
for (id, name, desc, dummy, firerole_def_src) in roles:
if not firerole_def_src:
firerole_def_src = '' # Workaround for None.
if re_grep and not re_grep.search(name) and not re_grep.search(desc) \
and not re_grep.search(firerole_def_src):
# We're grepping for some word.
# Let's dig into the authorization then.
all_actions = acca.acc_find_possible_actions_all(id)
# FIXME: the acc_find_possible_actions_all is really an ugly
# function, but is the closest to what it's needed in order
# to retrieve all the authorization of a role.
for idx, row in enumerate(all_actions):
grepped = False
if idx % 2 == 0:
# even lines contains headers like in:
# ['role', 'action', '#', 'collection']
# the only useful text to grep is from index 3 onwards
for keyword in row[3:]:
if re_grep.search(keyword):
grepped = True
break
if grepped:
break
else:
# odd lines contains content like in:
# [1, 18L, 1, 'Theses']
# the useful text to grep is indirectly index 1
# which is indeed the id_action (needed to retrieve the
# action name) and from column 3 onwards.
if re_grep.search(acca.acc_get_action_name(row[1])):
break
for value in row[3:]:
if re_grep.search(value):
grepped = True
break
if grepped:
break
else:
# We haven't grepped anything!
# Let's skip to the next role then...
continue
if len(desc) > 30:
desc = desc[:30] + '...'
if firerole_def_src and len(firerole_def_src) > 30:
firerole_def_src = firerole_def_src[:30] + '...'
roles2.append([id, name, desc, firerole_def_src])
for col in [(('add', 'adduserrole'),
('delete', 'deleteuserrole'),),
(('add', 'addauthorization'),
('modify', 'modifyauthorizations'),
('remove', 'deleteroleaction')),
(('modify', 'modifyrole'),
('delete', 'deleterole')),
(('show details', 'showroledetails'), )]:
roles2[-1].append('<a href="%s?id_role=%s">%s</a>' %
(col[0][1], id, col[0][0]))
for (str, function) in col[1:]:
roles2[-1][-1] += ' / <a href="%s?id_role=%s">%s</a>' % \
(function, id, str)
output = """
<dl>
<dt>Users:</dt>
<dd>add or remove users from the access to a role and its priviliges.</dd>
<dt>Authorizations/Actions:</dt>
<dd>these terms means almost the same, but an authorization is a <br />
connection between a role and an action (possibly) containing arguments.
</dd>
<dt>Roles:</dt>
<dd>see all the information attached to a role and decide if you want
to<br />delete it.</dd>
</dl>
<!--make a search box-->
<table class="admin_wvar" cellspacing="0">
<tr><td>
<form>
Show only roles having any detail matching the regular expression:
<input type="text" name="grep" value="%s" />
<input type="submit" class="adminbutton" value="Search">
</form>
</td></tr></table>
""" % escape(grep)
output += tupletotable(header=header, tuple=roles2, highlight_rows_p=True,
alternate_row_colors_p=True)
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
</dl>
"""
return index(req=req,
title='Role Administration',
subtitle='administration with roles as access point',
body=[output, extra],
adminarea=2)
def perform_actionarea(req, grep=''):
"""create the action area menu page."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
if grep:
try:
re_grep = re.compile(grep)
except Exception:
re_grep = None
grep = ''
else:
re_grep = None
header = ['name', 'authorizations/roles', '']
actions = acca.acc_get_all_actions()
actions2 = []
for (id, name, description) in actions:
if re_grep and not re_grep.search(name) and \
not re_grep.search(description):
grepped = False
roles = acca.acc_get_action_roles(id)
for id_role, role_name, role_description in roles:
if re_grep.search(role_name) or \
re_grep.search(role_description):
grepped = True
break
elif re_grep.search(acca.acc_get_role_details(id_role)[3] or
''):
# Found in FireRole
grepped = True
break
else:
details = acca.acc_find_possible_actions(id_role, id)
if details:
for argument in details[0][1:]:
if re_grep.search(argument):
grepped = True
break
for values in details[1:]:
for value in values[1:]:
if re_grep.search(value):
grepped = True
break
if grepped:
break
if grepped:
break
if not grepped:
continue
actions2.append([name, description])
for col in [(('add', 'addauthorization'),
('modify', 'modifyauthorizations'),
('remove', 'deleteroleaction')),
(('show details', 'showactiondetails'), )]:
actions2[-1].append('<a href="%s?id_action=%s&reverse=1">%s'
'</a>' % (col[0][1], id, col[0][0]))
for (str, function) in col[1:]:
actions2[-1][-1] += ' / <a href="%s?id_action=%s&' \
'reverse=1">%s</a>' % (function, id, str)
output = """
<dl>
<dt>Authorizations/Roles:</dt>
<dd>these terms means almost the same, but an authorization is a <br />
connection between a role and an action (possibly) containing
arguments.</dd>
<dt>Actions:</dt>
<dd>see all the information attached to an action.</dd>
</dl>
<!--make a search box-->
<table class="admin_wvar" cellspacing="0">
<tr><td>
<form>
Show only actions having any detail matching the regular expression:
<input type="text" name="grep" value="%s" />
<input type="submit" class="adminbutton" value="Search">
</form>
</td></tr></table>
""" % escape(grep)
output += tupletotable(header=header, tuple=actions2,
highlight_rows_p=True, alternate_row_colors_p=True)
extra = """
<dl>
<dt><a href="addrole">Create new role</a>
<dd>go here to add a new role.
</dl>
"""
return index(req=req,
title='Action Administration',
subtitle='administration with actions as access point',
body=[output, extra],
adminarea=2)
def perform_userarea(req, email_user_pattern=''):
"""create area to show info about users. """
rlike_op = rlike()
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = 'step 1 - search for users'
output = """
<p>
search for users to display.
</p> """
# remove letters not allowed in an email
email_user_pattern = cleanstring_email(email_user_pattern)
text = ' <span class="adminlabel">1. search for user</span>\n'
text += ' <input class="admin_wvar" type="text" name="email_user_pattern"'\
' value="%s" />\n' % (email_user_pattern, )
output += createhiddenform(action="userarea",
text=text,
button="search for users")
if email_user_pattern:
try:
users1 = run_sql(
"""SELECT id, email
FROM "user"
WHERE email<>'' AND email """ + rlike_op + """ %s
ORDER BY email LIMIT %s""",
(email_user_pattern, MAXPAGEUSERS + 1))
except OperationalError:
users1 = ()
if not users1:
output += '<p>no matching users</p>'
else:
subtitle = 'step 2 - select what to do with user'
users = []
for (id, email) in users1[:MAXPAGEUSERS]:
users.append([id, email])
for col in [(('add', 'addroleuser'),
('remove', 'deleteuserrole')),
(('show details', 'showuserdetails'), )]:
users[-1].append('<a href="%s?'
'id_user=%s">%s</a>' %
(col[0][1], id, col[0][0]))
for (str, function) in col[1:]:
users[-1][-1] += ' / <a href="%s?' \
'id_user=%s&reverse=1">%s</a>' % \
(function, id, str)
output += '<p>found <strong>%s</strong> matching users:</p>' % \
(len(users1), )
output += tupletotable(header=['id', 'email', 'roles', ''],
tuple=users, highlight_rows_p=True,
alternate_row_colors_p=True)
if len(users1) > MAXPAGEUSERS:
output += '<p><strong>only showing the first %s users, ' \
'narrow your search...</strong></p>' % (MAXPAGEUSERS, )
return index(req=req,
title='User Administration',
subtitle=subtitle,
body=[output],
adminarea=2)
def perform_resetarea(req):
"""create the reset area menu page."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
output = """
<dl>
<dt><a href="resetdefaultsettings">Reset to Default Authorizations</a>
<dd>remove all changes that has been done to the roles and <br />
add only the default authorization settings.
<dt><a href="adddefaultsettings">Add Default Authorizations</a>
<dd>keep all changes and add the default authorization settings.
</dl>
"""
return index(req=req,
title='Reset Authorizations',
subtitle='reseting to or adding default authorizations',
body=[output],
adminarea=2)
def perform_resetdefaultsettings(req, superusers=[], confirm=0):
"""Reset default settings.
delete all roles, actions and authorizations presently in the database
and add only the default roles.
only selected users will be added to superadmin, rest is blank
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
# cleaning input
if type(superusers) == str:
superusers = [superusers]
# remove not valid e-mails
for email in superusers:
if not check_email(email):
superusers.remove(email)
# instructions
output = """
<p>
before you reset the settings, we need some users<br />
to connect to <strong>%s</strong>.<br />
enter as many e-mail addresses you want and press <strong>reset</strong>.
<br />
<strong>confirm reset settings</strong> when you have added enough e-mails.
<br />
<strong>%s</strong> is added as default.
</p>""" % (SUPERADMINROLE, CFG_SITE_ADMIN_EMAIL)
# add more superusers
output += """
<p>enter user e-mail addresses: </p>
<form action="resetdefaultsettings" method="POST">"""
for email in superusers:
output += ' <input type="hidden" name="superusers" value="%s" />' % (
email, )
output += """
<span class="adminlabel">e-mail</span>
<input class="admin_wvar" type="text" name="superusers" />
<input class="adminbutton" type="submit" value="add e-mail" />
</form>"""
if superusers:
# remove emails
output += """
<form action="resetdefaultsettings" method="POST">
have you entered wrong data?
<input class="adminbutton" type="submit" value="remove all e-mails" />
</form>
"""
# superusers confirm table
start = '<form action="resetdefaultsettings" method="POST">'
extra = ' <input type="hidden" name="confirm" value="1" />'
for email in superusers:
extra += '<input type="hidden" name="superusers" value="%s" />' % (
email, )
extra += ' <input class="adminbutton" type="submit" ' + \
'value="confirm to reset settings" />'
end = '</form>'
output += '<p><strong>reset default settings</strong> with ' + \
'the users below? </p>'
output += tupletotable(header=['e-mail address'],
tuple=superusers,
start=start,
extracolumn=extra,
end=end,
highlight_rows_p=True,
alternate_row_colors_p=True)
if confirm in [1, "1"]:
res = acca.acc_reset_default_settings(superusers)
if res:
output += '<p>successfully reset default settings</p>'
else:
output += '<p>sorry, could not reset default settings</p>'
return index(req=req,
title='Reset Default Settings',
subtitle='reset settings',
body=[output],
adminarea=6)
def perform_adddefaultsettings(req, superusers=[], confirm=0):
"""add the default settings, and keep everything else.
probably nothing will be deleted, except if there has been made changes
to the defaults.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
# cleaning input
if type(superusers) == str:
superusers = [superusers]
# remove not valid e-mails
for email in superusers:
if not check_email(email):
superusers.remove(email)
# instructions
output = """
<p>
before you add the settings, we need some users<br />
to connect to <strong>%s</strong>.<br />
enter as many e-mail addresses you want and press <strong>add</strong>.
<br />
<strong>confirm add settings</strong> when you have added enough e-mails.
<br />
<strong>%s</strong> is added as default.
</p>""" % (SUPERADMINROLE, CFG_SITE_ADMIN_EMAIL)
# add more superusers
output += """
<p>enter user e-mail addresses: </p>
<form action="adddefaultsettings" method="POST">"""
for email in superusers:
output += ' <input type="hidden" name="superusers" value="%s" />' % (
email, )
output += """
<span class="adminlabel">e-mail</span>
<input class="admin_wvar" type="text" name="superusers" />
<input class="adminbutton" type="submit" value="add e-mail" />
</form>
"""
if superusers:
# remove emails
output += """
<form action="adddefaultsettings" method="POST">
have you entered wrong data?
<input class="adminbutton" type="submit" value="remove all e-mails" />
</form>
"""
# superusers confirm table
start = '<form action="adddefaultsettings" method="POST">'
extra = ' <input type="hidden" name="confirm" value="1" />'
for email in superusers:
extra += '<input type="hidden" name="superusers" value="%s" />' % (
email, )
extra += ' <input class="adminbutton" type="submit" ' + \
'value="confirm to add settings" />'
end = '</form>'
output += '<p><strong>add default settings</strong> with the ' + \
'users below? </p>'
output += tupletotable(header=['e-mail address'],
tuple=superusers,
start=start,
extracolumn=extra,
end=end)
if confirm in [1, "1"]:
res = acca.acc_add_default_settings(superusers)
if res:
output += '<p>successfully added default settings</p>'
else:
output += '<p>sorry, could not add default settings</p>'
return index(req=req,
title='Add Default Settings',
subtitle='add settings',
body=[output],
adminarea=6)
def perform_manageaccounts(req, mtype='', content='', confirm=0):
"""start area for managing accounts."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = 'Overview'
fin_output = ''
fin_output += """
<table>
<tr>
<td><b>Menu</b></td>
</tr>
<tr>
<td>0. <small><a href="%s/admin/webaccess/webaccessadmin.py/manageaccounts?mtype=perform_showall">Show all</a></small></td>
<td>1. <small><a href="%s/admin/webaccess/webaccessadmin.py/manageaccounts?mtype=perform_accesspolicy#1">Access policy</a></small></td>
<td>2. <small><a href="%s/admin/webaccess/webaccessadmin.py/manageaccounts?mtype=perform_accountoverview#2">Account overview</a></small></td>
<td>3. <small><a href="%s/admin/webaccess/webaccessadmin.py/manageaccounts?mtype=perform_createaccount#3">Create account</a></small></td>
<td>4. <small><a href="%s/admin/webaccess/webaccessadmin.py/manageaccounts?mtype=perform_modifyaccounts#4">Edit accounts</a></small></td>
</tr>
</table>
""" % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL,
CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL)
if mtype == "perform_accesspolicy" and content:
fin_output += content
elif mtype == "perform_accesspolicy" or mtype == "perform_showall":
fin_output += perform_accesspolicy(req, callback='')
fin_output += "<br />"
if mtype == "perform_accountoverview" and content:
fin_output += content
elif mtype == "perform_accountoverview" or mtype == "perform_showall":
fin_output += perform_accountoverview(req, callback='')
fin_output += "<br />"
if mtype == "perform_createaccount" and content:
fin_output += content
elif mtype == "perform_createaccount" or mtype == "perform_showall":
fin_output += perform_createaccount(req, callback='')
fin_output += "<br />"
if mtype == "perform_modifyaccounts" and content:
fin_output += content
elif mtype == "perform_modifyaccounts" or mtype == "perform_showall":
fin_output += perform_modifyaccounts(req, callback='')
fin_output += "<br />"
if mtype == "perform_becomeuser" and content:
fin_output += content
elif mtype == "perform_becomeuser" or mtype == "perform_showall":
fin_output += perform_becomeuser(req, callback='')
fin_output += "<br />"
return index(req=req,
title='Manage Accounts',
subtitle=subtitle,
body=[fin_output],
adminarea=7,
authorized=1)
def perform_accesspolicy(req, callback='yes', confirm=0):
"""Modify default behaviour.
Modify default behaviour of a guest user or if new accounts should
automatically/manually be modified.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="1"></a>1. Access policy. <small>[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % CFG_SITE_SECURE_URL
account_policy = {}
account_policy[
0] = "Users can register new accounts. New accounts automatically activated."
account_policy[
1] = "Users can register new accounts. Admin users must activate the accounts."
account_policy[
2] = "Only admin can register new accounts. User cannot edit email address."
account_policy[
3] = "Only admin can register new accounts. User cannot edit email address or password."
account_policy[
4] = "Only admin can register new accounts. User cannot edit email address, password or login method."
account_policy[
5] = "Only admin can register new accounts. User cannot edit email address, password or login method and information about how to get an account is hidden from the login page."
site_policy = {}
site_policy[0] = "Normal operation of the site."
site_policy[1] = "Read-only site, all write operations temporarily closed."
site_policy[2] = "Site fully closed."
site_policy[3] = "Site fully closed. Database connection disabled."
output = "(Modifications must be done in access_control_config.py)<br />"
output += "<br /><b>Current settings:</b><br />"
output += "Site status: %s<br />" % (
site_policy[CFG_ACCESS_CONTROL_LEVEL_SITE])
output += "Guest accounts allowed: %s<br />" % (
CFG_ACCESS_CONTROL_LEVEL_GUESTS == 0 and "Yes" or "No")
output += "Account policy: %s<br />" % (
account_policy[CFG_ACCESS_CONTROL_LEVEL_ACCOUNTS])
output += "Allowed email addresses limited: %s<br />" % (
CFG_ACCESS_CONTROL_LIMIT_REGISTRATION_TO_DOMAIN and CFG_ACCESS_CONTROL_LIMIT_REGISTRATION_TO_DOMAIN or "Not limited")
output += "Send email to admin when new account: %s<br />" % (
CFG_ACCESS_CONTROL_NOTIFY_ADMIN_ABOUT_NEW_ACCOUNTS == 1 and "Yes" or "No")
output += "Send email to user after creating new account: %s<br />" % (
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_NEW_ACCOUNT == 1 and "Yes" or "No")
output += "Send email to user when account is activated: %s<br />" % (
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_ACTIVATION == 1 and "Yes" or "No")
output += "Send email to user when account is deleted/rejected: %s<br />" % (
CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_DELETION == 1 and "Yes" or "No")
output += "<br />"
output += "<b>Available 'login via' methods:</b><br />"
methods = CFG_EXTERNAL_AUTHENTICATION.keys()
methods.sort()
for system in methods:
output += """%s %s<br />""" % (
system, (CFG_EXTERNAL_AUTH_DEFAULT == system and "(Default)" or ""))
output += "<br /><b>Changing the settings:</b><br />"
output += "Currently, all changes must be done using your favourite editor, and the webserver restarted for changes to take effect. For the settings to change, either look in the guide or in access_control_config.py ."
body = [output]
if callback:
return perform_manageaccounts(req, "perform_accesspolicy", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_accountoverview(req, callback='yes', confirm=0):
"""Account overview.
Modify default behaviour of a guest user or if new accounts should
automatically/manually be modified.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="2"></a>2. Account overview.
<small>[<a title="See guide"
href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % \
CFG_SITE_SECURE_URL
output = ""
res = run_sql("""SELECT COUNT(*) FROM "user" WHERE email='' """)
output += "Guest accounts: %s<br />" % res[0][0]
res = run_sql("""SELECT COUNT(*) FROM "user" WHERE email!='' """)
output += "Registered accounts: %s<br />" % res[0][0]
res = run_sql(
"""SELECT COUNT(*) FROM "user" WHERE email!='' AND note='0' OR note IS NULL""")
output += "Inactive accounts: %s " % res[0][0]
if res[0][0] > 0:
output += ' [<a href="modifyaccounts?email_user_pattern=&limit_to=disabled&maxpage=25&page=1">Activate/Reject accounts</a>]'
res = run_sql("""SELECT COUNT(*) FROM "user" """)
output += "<br />Total nr of accounts: %s<br />" % res[0][0]
body = [output]
if callback:
return perform_manageaccounts(req, "perform_accountoverview", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_createaccount(req, email='', password='', callback='yes',
confirm=0):
"""Createa account.
Modify default behaviour of a guest user or if new accounts should
automatically/manually be modified.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="3"></a>3. Create account. <small>[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % CFG_SITE_SECURE_URL
output = ""
text = ' <span class="adminlabel">Email:</span>\n'
text += ' <input class="admin_wvar" type="text" name="email" value="%s" /><br />' % (
email, )
text += ' <span class="adminlabel">Password:</span>\n'
text += ' <input class="admin_wvar" type="text" name="password" value="%s" /><br />' % (
password, )
output += createhiddenform(action="createaccount",
text=text,
confirm=1,
button="Create")
if confirm in [1, "1"] and email and email_valid_p(email):
res = run_sql("""SELECT email FROM "user" WHERE email=%s""", (email,))
if not res:
from invenio_accounts.models import User
from invenio.ext.sqlalchemy import db
User.query.filter_by(id=1).delete()
u = User(email=email, password=password, note=1)
db.session.add(u)
db.session.commit()
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_NEW_ACCOUNT == 1:
emailsent = send_new_user_account_warning(
email, email, password) == 0
if password:
output += '<b><span class="info">Account created with password and activated.</span></b>'
else:
output += '<b><span class="info">Account created without password and activated.</span></b>'
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_NEW_ACCOUNT == 1:
if emailsent:
output += '<br /><b><span class="info">An email has been sent to the owner of the account.</span></b>'
else:
output += '<br /><b><span class="important">Could not send an email to the owner of the account.</span></b>'
else:
output += '<b><span class="info">An account with the same email already exists.</span></b>'
elif confirm in [1, "1"]:
output += '<b><span class="info">Please specify an valid email-address.</span></b>'
body = [output]
if callback:
return perform_manageaccounts(req, "perform_createaccount", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyaccountstatus(req, userID, email_user_pattern, limit_to,
maxpage, page, callback='yes', confirm=0):
"""set a disabled account to enabled and opposite."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
res = run_sql(
"""SELECT id, email, note FROM "user" WHERE id=%s""", (userID, ))
subtitle = ""
output = ""
if res:
if res[0][2] in [0, "0", None]:
run_sql("""UPDATE "user" SET note=1 WHERE id=%s""", (userID, ))
output += """<b><span class="info">The account '%s'
has been activated.</span></b>""" % res[0][1]
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_ACTIVATION == 1:
emailsent = send_account_activated_message(
res[0][1], res[0][1], '*****')
if emailsent:
output += """<br /><b><span class="info">An email has
been sent to the owner of the account.</span></b>"""
else:
output += """<br /><b><span class="info">Could not send an email to the owner of the account.</span></b>"""
elif res[0][2] in [1, "1"]:
run_sql("""UPDATE "user" SET note=0 WHERE id=%s""", (userID, ))
output += """<b><span class="info">The account '%s' has been set inactive.</span></b>""" % res[
0][1]
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_modifyaccounts(req, email_user_pattern, limit_to, maxpage, page, content=output, callback='yes')
else:
return addadminbox(subtitle, body)
def perform_editaccount(req, userID, mtype='', content='', callback='yes',
confirm=-1):
"""form to modify an account.
this method is calling other methods which again is calling this and
sending back the output of the method.
if callback, the method will call perform_editcollection, if not,
it will just return its output.
userID - id of the user
mtype - the method that called this method.
content - the output from that method.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
res = run_sql("""SELECT id, email FROM "user" WHERE id=%s""", (userID, ))
if not res:
if mtype == "perform_deleteaccount":
text = """<b><span class="info">The selected account has been deleted, to continue editing, go back to 'Manage Accounts'.</span></b>"""
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_DELETION == 1:
text += """<br /><b><span class="info">An email has been sent to the owner of the account.</span></b>"""
else:
text = """<b><span class="info">The selected accounts does not exist, please go back and select an account to edit.</span></b>"""
return index(req=req,
title='Edit Account',
subtitle="Edit account",
body=[text],
adminarea=7,
authorized=1)
fin_output = """
<table>
<tr>
<td><b>Menu</b></td>
</tr>
<tr>
<td>0. <small><a href="%s/admin/webaccess/webaccessadmin.py/editaccount?userID=%s">Show all</a></small></td>
<td>1. <small><a href="%s/admin/webaccess/webaccessadmin.py/editaccount?userID=%s&mtype=perform_modifylogindata">Modify login-data</a></small></td>
<td>2. <small><a href="%s/admin/webaccess/webaccessadmin.py/editaccount?userID=%s&mtype=perform_modifypreferences">Modify preferences</a></small></td>
</tr><tr>
<td>3. <small><a href="%s/admin/webaccess/webaccessadmin.py/editaccount?userID=%s&mtype=perform_deleteaccount">Delete account</a></small></td>
<td>4. <small><a href="%s/admin/webaccess/webaccessadmin.py/editaccount?userID=%s&mtype=perform_modifyapikeydata">Edit REST API Key</a></small></td>
</tr>
</table>
""" % (CFG_SITE_SECURE_URL, userID, CFG_SITE_SECURE_URL, userID, CFG_SITE_SECURE_URL, userID, CFG_SITE_SECURE_URL, userID, CFG_SITE_SECURE_URL, userID)
if mtype == "perform_modifylogindata" and content:
fin_output += content
elif mtype == "perform_modifylogindata" or not mtype:
fin_output += perform_modifylogindata(req, userID, callback='')
if mtype == "perform_modifypreferences" and content:
fin_output += content
elif mtype == "perform_modifypreferences" or not mtype:
fin_output += perform_modifypreferences(req, userID, callback='')
if mtype == "perform_deleteaccount" and content:
fin_output += content
elif mtype == "perform_deleteaccount" or not mtype:
fin_output += perform_deleteaccount(req, userID, callback='')
if mtype == "perform_modifyapikeydata" and content:
fin_output += content
elif mtype == "perform_modifyapikeydata" or not mtype:
fin_output += perform_modifyapikeydata(req, userID, callback='')
return index(req=req,
title='Edit Account',
subtitle="Edit account '%s'" % res[0][1],
body=[fin_output],
adminarea=7,
authorized=1)
def perform_becomeuser(req, userID='', callback='yes', confirm=0):
"""modify email and password of an account."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="5"></a>5. Became user. <small>[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#5">?</a>]</small>""" % CFG_SITE_SECURE_URL
res = run_sql("""SELECT email FROM "user" WHERE id=%s""", (userID, ))
output = ""
if res:
update_Uid(req, res[0][0])
redirect_to_url(req, CFG_SITE_SECURE_URL)
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_editaccount(req, userID, mtype='perform_becomeuser',
content=addadminbox(subtitle, body),
callback='yes')
else:
return addadminbox(subtitle, body)
def perform_modifylogindata(req, userID, nickname='', email='', password='',
callback='yes', confirm=0):
"""modify email and password of an account."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="1"></a>1. Edit login-data. <small>
[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#4">?
</a>]</small>""" % CFG_SITE_SECURE_URL
res = run_sql(
"""SELECT id, email, nickname FROM "user" WHERE id=%s""", (userID, ))
output = ""
if res:
if not email and not password:
email = res[0][1]
nickname = res[0][2]
text = ' <span class="adminlabel">Account id:</span>%s<br />\n' % userID
text = ' <span class="adminlabel">Nickname:</span>\n'
text += ' <input class="admin_wvar" type="text" name="nickname" value="%s" /><br />' % (
nickname, )
text += ' <span class="adminlabel">Email:</span>\n'
text += ' <input class="admin_wvar" type="text" name="email" value="%s" /><br />' % (
email, )
text += ' <span class="adminlabel">Password:</span>\n'
text += ' <input class="admin_wvar" type="text" name="password" value="%s" /><br />' % (
password, )
output += createhiddenform(action="modifylogindata",
text=text,
userID=userID,
confirm=1,
button="Modify")
if confirm in [1, "1"] and email and email_valid_p(email):
res = run_sql(
"""SELECT nickname FROM "user" WHERE nickname=%s AND id<>%s""", (nickname, userID))
if res:
output += '<b><span class="info">Sorry, the specified nickname is already used.</span></b>'
else:
res = run_sql(
"""UPDATE "user" SET email=%s WHERE id=%s""", (email, userID))
if password:
from invenio_accounts.models import User
from invenio.ext.sqlalchemy import db
u = User.query.filter_by(id=userID).first()
if u:
u.password = password
db.session.commit()
else:
output += '<b><span class="info">Password not modified.</span></b> '
res = run_sql(
"""UPDATE "user" SET nickname=%s WHERE id=%s""", (nickname, userID))
output += '<b><span class="info">Nickname/email and/or password modified.</span></b>'
elif confirm in [1, "1"]:
output += '<b><span class="info">Please specify an valid email-address.</span></b>'
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_editaccount(req, userID, mtype='perform_modifylogindata', content=addadminbox(subtitle, body), callback='yes')
else:
return addadminbox(subtitle, body)
def perform_modifypreferences(req, userID, login_method='', callback='yes',
confirm=0):
"""modify email and password of an account."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="2"></a>2. Modify preferences.
<small>[<a title="See guide"
href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % \
CFG_SITE_SECURE_URL
res = run_sql("""SELECT id, email FROM "user" WHERE id=%s""", (userID, ))
output = ""
if res:
user_pref = get_user_preferences(userID)
if confirm in [1, "1"]:
if login_method:
user_pref['login_method'] = login_method
set_user_preferences(userID, user_pref)
output += "Select default login method:<br />"
text = ""
methods = CFG_EXTERNAL_AUTHENTICATION.keys()
methods.sort()
for system in methods:
text += """<input type="radio" name="login_method" value="%s" %s>%s<br />""" % (
system, (user_pref['login_method'] == system and "checked" or ""), system)
output += createhiddenform(action="modifypreferences",
text=text,
confirm=1,
userID=userID,
button="Select")
if confirm in [1, "1"]:
if login_method:
output += """<b><span class="info">The login method has been changed</span></b>"""
else:
output += """<b><span class="info">Nothing to update</span></b>"""
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_editaccount(req, userID, mtype='perform_modifypreferences', content=addadminbox(subtitle, body), callback='yes')
else:
return addadminbox(subtitle, body)
def perform_deleteaccount(req, userID, callback='yes', confirm=0):
"""delete account."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="3"></a>3. Delete account. <small>[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % CFG_SITE_SECURE_URL
res = run_sql("""SELECT id, email FROM "user" WHERE id=%s""", (userID, ))
output = ""
if res:
if confirm in [0, "0"]:
text = '<b><span class="important">Are you sure you want to delete the account with email: "%s"?</span></b>' % res[
0][1]
output += createhiddenform(action="deleteaccount",
text=text,
userID=userID,
confirm=1,
button="Delete")
elif confirm in [1, "1"]:
run_sql("""DELETE FROM "user" WHERE id=%s""", (userID, ))
output += '<b><span class="info">Account deleted.</span></b>'
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_DELETION == 1:
send_account_deleted_message(res[0][1], res[0][1])
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_editaccount(req, userID, mtype='perform_deleteaccount', content=addadminbox(subtitle, body), callback='yes')
else:
return addadminbox(subtitle, body)
def perform_modifyapikeydata(req, userID, keyID='', status='', callback='yes',
confirm=0):
"""modify REST API keys of an account."""
raise RuntimeError('WebAPI keys have been removed.')
def perform_rejectaccount(req, userID, email_user_pattern, limit_to, maxpage,
page, callback='yes', confirm=0):
"""Delete account and send an email to the owner."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
res = run_sql(
"""SELECT id, email, note FROM "user" WHERE id=%s""", (userID, ))
output = ""
subtitle = ""
if res:
run_sql("""DELETE FROM "user" WHERE id=%s""", (userID, ))
output += '<b><span class="info">Account rejected and deleted.</span></b>'
if CFG_ACCESS_CONTROL_NOTIFY_USER_ABOUT_DELETION == 1:
if not res[0][2] or res[0][2] == "0":
emailsent = send_account_rejected_message(res[0][1], res[0][1])
elif res[0][2] == "1":
emailsent = send_account_deleted_message(res[0][1], res[0][1])
if emailsent:
output += """<br /><b><span class="info">An email has been sent to the owner of the account.</span></b>"""
else:
output += """<br /><b><span class="info">Could not send an email to the owner of the account.</span></b>"""
else:
output += '<b><span class="info">The account id given does not exist.</span></b>'
body = [output]
if callback:
return perform_modifyaccounts(req, email_user_pattern, limit_to, maxpage, page, content=output, callback='yes')
else:
return addadminbox(subtitle, body)
def perform_modifyaccounts(req, email_user_pattern='', limit_to=-1,
maxpage=MAXPAGEUSERS, page=1, content='',
callback='yes', confirm=0):
"""Modify accounts.
Modify default behaviour of a guest user or if new accounts should
automatically/manually be modified.
"""
rlike_op = rlike()
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
subtitle = """<a name="4"></a>4. Edit accounts. <small>[<a title="See guide" href="%s/help/admin/webaccess-admin-guide#4">?</a>]</small>""" % CFG_SITE_SECURE_URL
output = ""
# remove letters not allowed in an email
email_user_pattern = cleanstring_email(email_user_pattern)
try:
maxpage = int(maxpage)
except Exception:
maxpage = MAXPAGEUSERS
try:
page = int(page)
if page < 1:
page = 1
except Exception:
page = 1
text = ' <span class="adminlabel">Email (part of):</span>\n'
text += ' <input class="admin_wvar" type="text" name="email_user_pattern" value="%s" /><br />' % (
email_user_pattern, )
text += """<span class="adminlabel">Limit to:</span>
<select name="limit_to" class="admin_w200">
<option value="all" %s>All accounts</option>
<option value="enabled" %s>Active accounts</option>
<option value="disabled" %s>Inactive accounts</option>
</select><br />""" % ((limit_to == "all" and "selected" or ""), (limit_to == "enabled" and "selected" or ""), (limit_to == "disabled" and "selected" or ""))
text += """<span class="adminlabel">Accounts per page:</span>
<select name="maxpage" class="admin_wvar">
<option value="25" %s>25</option>
<option value="50" %s>50</option>
<option value="100" %s>100</option>
<option value="250" %s>250</option>
<option value="500" %s>500</option>
<option value="1000" %s>1000</option>
</select><br />""" % ((maxpage == 25 and "selected" or ""), (maxpage == 50 and "selected" or ""), (maxpage == 100 and "selected" or ""), (maxpage == 250 and "selected" or ""), (maxpage == 500 and "selected" or ""), (maxpage == 1000 and "selected" or ""))
output += createhiddenform(action="modifyaccounts",
text=text,
button="search for accounts")
if limit_to not in [-1, "-1"] and maxpage:
options = []
users1 = """SELECT id,email,note FROM "user" WHERE """
if limit_to == "enabled":
users1 += " email!='' AND note=1"
elif limit_to == "disabled":
users1 += " email!='' AND note=0 OR note IS NULL"
elif limit_to == "guest":
users1 += " email=''"
else:
users1 += " email!=''"
if email_user_pattern:
users1 += " AND email " + rlike_op + " %s"
options += [email_user_pattern]
users1 += " ORDER BY email LIMIT %s"
options += [maxpage * page + 1]
try:
users1 = run_sql(users1, tuple(options))
except OperationalError:
users1 = ()
if not users1:
output += '<b><span class="info">There are no accounts matching the email given.</span></b>'
else:
users = []
if maxpage * (page - 1) > len(users1):
page = len(users1) / maxpage + 1
for (id, email, note) in users1[maxpage * (page - 1):(maxpage * page)]:
users.append(
['', id, email, (note == "1" and '<strong class="info">Active</strong>' or '<strong class="important">Inactive</strong>')])
for col in [(((note == "1" and 'Inactivate' or 'Activate'), 'modifyaccountstatus'), ((note == "0" and 'Reject' or 'Delete'), 'rejectaccount'), ),
(('Edit account', 'editaccount'), ), ]:
users[-1].append('<a href="%s?userID=%s&email_user_pattern=%s&limit_to=%s&maxpage=%s&page=%s">%s</a>' % (
col[0][1], id, email_user_pattern, limit_to, maxpage, page, col[0][0]))
for (str, function) in col[1:]:
users[-1][-1] += ' / <a href="%s?userID=%s&email_user_pattern=%s&limit_to=%s&maxpage=%s&page=%s">%s</a>' % (
function, id, email_user_pattern, limit_to, maxpage, page, str)
users[-1].append('<a href=%s?userID=%s&email_user_pattern=%s&limit_to=%s&maxpage=%s&page=%s">%s</a>' % (
'becomeuser', id, email_user_pattern, limit_to, maxpage, page, 'Become user'))
last = ""
next = ""
if len(users1) > maxpage:
if page > 1:
last += '<b><span class="info"><a href="modifyaccounts?email_user_pattern=%s&limit_to=%s&maxpage=%s&page=%s">Last Page</a></span></b>' % (
email_user_pattern, limit_to, maxpage, (page - 1))
if len(users1[maxpage * (page - 1):(maxpage * page)]) == maxpage:
next += '<b><span class="info"><a href="modifyaccounts?email_user_pattern=%s&limit_to=%s&maxpage=%s&page=%s">Next page</a></span></b>' % (
email_user_pattern, limit_to, maxpage, (page + 1))
output += '<b><span class="info">Showing accounts %s-%s:</span></b>' % (
1 + maxpage * (page - 1), maxpage * page)
else:
output += '<b><span class="info">%s matching account(s):</span></b>' % len(
users1)
output += tupletotable(header=[last, 'id',
'email', 'Status', '', '', next], tuple=users)
else:
output += '<b><span class="info">Please select which accounts to find and how many to show per page.</span></b>'
if content:
output += "<br />%s" % content
body = [output]
if callback:
return perform_manageaccounts(req, "perform_modifyaccounts", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_delegate_startarea(req):
"""start area for lower level delegation of rights."""
# refuse access to guest users:
uid = getUid(req)
if isGuestUser(uid):
return index(req=req,
title='Delegate Rights',
adminarea=0,
authorized=0)
subtitle = 'select what to do'
output = ''
if is_adminuser(req)[0] == 0:
output += """
<p>
You are also allowed to be in the <a href="../webaccessadmin.py">Main Admin Area</a> which gives you<br />
the access to the full functionality of WebAccess.
</p>
"""
output += """
<dl>
<dt><a href="delegate_adduserrole">Connect users to roles</a></dt>
<dd>add users to the roles you have delegation rights to.</dd>
<dt><a href="delegate_deleteuserrole">Remove users from roles</a></dt>
<dd>remove users from the roles you have delegation rights to.</dd>
</dl>
<dl>
<dt><a href="delegate_adminsetup">Set up delegation rights</a></dt>
<dd>specialized area to set up the delegation rights used in the areas
above. <br />
you need to be a web administrator to access the area.</dd>
</dl>
"""
return index(req=req,
title='Delegate Rights',
subtitle=subtitle,
body=[output],
adminarea=0,
authorized=1)
def perform_delegate_adminsetup(req, id_role_admin=0, id_role_delegate=0,
confirm=0):
"""let the webadmins set up the delegation rights for the other roles.
id_role_admin - the role to be given delegation rights
id_role_delegate - the role over which the delegation rights are given
confirm - make the connection happen
"""
subtitle = 'step 1 - select admin role'
admin_roles = acca.acc_get_all_roles()
output = """
<p>
This is a specialized area to handle a task that also can be handled<br />
from the "add authorization" interface.
</p>
<p>
By handling the delegation rights here you get the advantage of<br />
not having to select the correct action <i>(%s)</i> or<br />
remembering the names of available roles.
</p>
""" % (DELEGATEADDUSERROLE, )
output += createroleselect(id_role=id_role_admin,
step=1,
button='select admin role',
name='id_role_admin',
action='delegate_adminsetup',
roles=admin_roles)
if str(id_role_admin) != '0':
subtitle = 'step 2 - select delegate role'
name_role_admin = acca.acc_get_role_name(id_role=id_role_admin)
delegate_roles_old = acca.acc_find_delegated_roles(
id_role_admin=id_role_admin)
delegate_roles = []
delegate_roles_old_names = []
for role in admin_roles:
if (role,) not in delegate_roles_old:
delegate_roles.append(role)
else:
delegate_roles_old_names.append(role[1])
if delegate_roles_old_names:
delegate_roles_old_names.sort()
names_str = ''
for name in delegate_roles_old_names:
if names_str:
names_str += ', '
names_str += name
output += '<p>previously selected roles: <strong>%s</strong>.</p>' % (
names_str, )
extra = """
<dl>
<dt><a href="modifyauthorizations?id_role=%s&id_action=%s">Remove delegated roles</a></dt>
<dd>use the standard administration area to remove delegation rights
you no longer want to be available.</dd>
</dl>
""" % (id_role_admin, acca.acc_get_action_id(name_action=DELEGATEADDUSERROLE))
else:
output += '<p>no previously selected roles.</p>'
output += createroleselect(id_role=id_role_delegate,
step=2,
button='select delegate role',
name='id_role_delegate',
action='delegate_adminsetup',
roles=delegate_roles,
id_role_admin=id_role_admin)
if str(id_role_delegate) != '0':
subtitle = 'step 3 - confirm to add delegation right'
name_role_delegate = acca.acc_get_role_name(
id_role=id_role_delegate)
output += """
<p>
<span class="warning"><strong>Warning:</strong> don't hand out delegation rights that can harm the system (e.g. delegating superrole).</span>
</p> """
output += createhiddenform(action="delegate_adminsetup",
text='let role <strong>%s</strong> delegate rights over role <strong>%s</strong>?' % (
name_role_admin, name_role_delegate),
id_role_admin=id_role_admin,
id_role_delegate=id_role_delegate,
confirm=1)
if int(confirm):
subtitle = 'step 4 - confirm delegation right added'
# res1 = acca.acc_add_role_action_arguments_names(name_role=name_role_admin,
# name_action=DELEGATEADDUSERROLE,
# arglistid=-1,
# optional=0,
# role=name_role_delegate)
res1 = acca.acc_add_authorization(name_role=name_role_admin,
name_action=DELEGATEADDUSERROLE,
optional=0,
role=name_role_delegate)
if res1:
output += '<p>confirm: role <strong>%s</strong> delegates role <strong>%s</strong>.' % (
name_role_admin, name_role_delegate)
else:
output += '<p>sorry, delegation right could not be added,<br />it probably already exists.</p>'
# see if right hand menu is available
try:
body = [output, extra]
except NameError:
body = [output]
return index(req=req,
title='Delegate Rights',
subtitle=subtitle,
body=body,
adminarea=1)
def perform_delegate_adduserrole(req, id_role=0, email_user_pattern='',
id_user=0, confirm=0):
"""let a lower level web admin add users to a limited set of roles.
id_role - the role to connect to a user
id_user - the user to connect to a role
confirm - make the connection happen
"""
rlike_op = rlike()
# finding the allowed roles for this user
id_admin = getUid(req)
id_action = acca.acc_get_action_id(name_action=DELEGATEADDUSERROLE)
actions = acca.acc_find_possible_actions_user(
id_user=id_admin, id_action=id_action)
allowed_roles = []
allowed_id_roles = []
for (id, arglistid, name_role_help) in actions[1:]:
id_role_help = acca.acc_get_role_id(name_role=name_role_help)
if id_role_help and \
[id_role_help, name_role_help, ''] not in allowed_roles:
allowed_roles.append([id_role_help, name_role_help, ''])
allowed_id_roles.append(str(id_role_help))
output = ''
if not allowed_roles:
subtitle = 'no delegation rights'
output += """
<p>
You do not have the delegation rights over any roles.<br />
If you think you should have such rights, contact a WebAccess Administrator.
</p>"""
extra = ''
else:
subtitle = 'step 1 - select role'
output += """
<p>
Lower level delegation of access rights to roles.<br />
An administrator with all rights have to give you these rights.
</p>"""
email_out = acca.acc_get_user_email(id_user=id_user)
name_role = acca.acc_get_role_name(id_role=id_role)
output += createroleselect(id_role=id_role, step=1, name='id_role',
action='delegate_adduserrole', roles=allowed_roles)
if str(id_role) != '0' and str(id_role) in allowed_id_roles:
subtitle = 'step 2 - search for users'
# remove letters not allowed in an email
email_user_pattern = cleanstring_email(email_user_pattern)
text = ' <span class="adminlabel">2. search for user </span>\n'
text += ' <input class="admin_wvar" type="text" name="email_user_pattern" value="%s" />\n' % (
email_user_pattern, )
output += createhiddenform(action="delegate_adduserrole",
text=text,
button="search for users",
id_role=id_role)
# pattern is entered
if email_user_pattern:
# users with matching email-address
try:
users1 = run_sql(
"""SELECT id, email FROM "user"
WHERE email<>'' AND email """ + rlike_op +
""" %s ORDER BY email """, (email_user_pattern, ))
except OperationalError:
users1 = ()
# users that are connected
try:
users2 = run_sql(
"""SELECT DISTINCT u.id, u.email
FROM "user" u
LEFT JOIN "user_accROLE" ur ON u.id = ur.id_user
WHERE ur."id_accROLE" = %s AND u.email """ +
rlike_op + """ %s ORDER BY u.email """,
(id_role, email_user_pattern))
except OperationalError:
users2 = ()
# no users that match the pattern
if not (users1 or users2):
output += '<p>no qualified users, try new search.</p>'
# too many matching users
elif len(users1) > MAXSELECTUSERS:
output += '<p><strong>%s hits</strong>, too many qualified users, specify more narrow search. (limit %s)</p>' % (
len(users1), MAXSELECTUSERS)
# show matching users
else:
subtitle = 'step 3 - select a user'
users = []
extrausers = []
for (id, email) in users1:
if (id, email) not in users2:
users.append([id, email, ''])
for (id, email) in users2:
extrausers.append([-id, email, ''])
output += createuserselect(id_user=id_user,
action="delegate_adduserrole",
step=3,
users=users,
extrausers=extrausers,
button="add this user",
id_role=id_role,
email_user_pattern=email_user_pattern)
try:
id_user = int(id_user)
except ValueError:
pass
# user selected already connected to role
if id_user < 0:
output += '<p>users in brackets are already attached to the role, try another one...</p>'
# a user is selected
elif email_out:
subtitle = "step 4 - confirm to add user"
output += createhiddenform(action="delegate_adduserrole",
text='add user <strong>%s</strong> to role <strong>%s</strong>?' % (
email_out, name_role),
id_role=id_role,
email_user_pattern=email_user_pattern,
id_user=id_user,
confirm=1)
# it is confirmed that this user should be added
if confirm:
# add user
result = acca.acc_add_user_role(
id_user=id_user, id_role=id_role)
if result and result[2]:
subtitle = 'step 5 - confirm user added'
output += '<p>confirm: user <strong>%s</strong> added to role <strong>%s</strong>.</p>' % (
email_out, name_role)
else:
subtitle = 'step 5 - user could not be added'
output += '<p>sorry, but user could not be added.</p>'
extra = """
<dl>
<dt><a href="delegate_deleteuserrole?id_role=%s">Remove users from role</a></dt>
<dd>remove users from the roles you have delegating rights to.</dd>
</dl>
""" % (id_role, )
return index(req=req,
title='Connect users to roles',
subtitle=subtitle,
body=[output, extra],
adminarea=1,
authorized=1)
def perform_delegate_deleteuserrole(req, id_role=0, id_user=0, confirm=0):
"""let a lower level web admin remove users from a limited set of roles.
id_role - the role to connect to a user
id_user - the user to connect to a role
confirm - make the connection happen
"""
subtitle = 'in progress...'
output = '<p>in progress...</p>'
# finding the allowed roles for this user
id_admin = getUid(req)
id_action = acca.acc_get_action_id(name_action=DELEGATEADDUSERROLE)
actions = acca.acc_find_possible_actions_user(
id_user=id_admin, id_action=id_action)
output = ''
if not actions:
subtitle = 'no delegation rights'
output += """
<p>
You do not have the delegation rights over any roles.<br />
If you think you should have such rights, contact a WebAccess Administrator.
</p>"""
extra = ''
else:
subtitle = 'step 1 - select role'
output += """
<p>
Lower level delegation of access rights to roles.<br />
An administrator with all rights have to give you these rights.
</p>"""
acca.acc_get_user_email(id_user=id_user)
name_role = acca.acc_get_role_name(id_role=id_role)
# create list of allowed roles
allowed_roles = []
allowed_id_roles = []
for (id, arglistid, name_role_help) in actions[1:]:
id_role_help = acca.acc_get_role_id(name_role=name_role_help)
if id_role_help and [id_role_help, name_role_help, ''] not in allowed_roles:
allowed_roles.append([id_role_help, name_role_help, ''])
allowed_id_roles.append(str(id_role_help))
output += createroleselect(id_role=id_role, step=1,
action='delegate_deleteuserrole', roles=allowed_roles)
if str(id_role) != '0' and str(id_role) in allowed_id_roles:
subtitle = 'step 2 - select user'
users = acca.acc_get_role_users(id_role)
output += createuserselect(id_user=id_user,
step=2,
action='delegate_deleteuserrole',
users=users,
id_role=id_role)
if str(id_user) != '0':
subtitle = 'step 3 - confirm delete of user'
email_user = acca.acc_get_user_email(id_user=id_user)
output += createhiddenform(action="delegate_deleteuserrole",
text='delete user %s from %s?'
% (headerstrong(user=id_user), headerstrong(role=id_role)),
id_role=id_role,
id_user=id_user,
confirm=1)
if confirm:
res = acca.acc_delete_user_role(
id_user=id_user, id_role=id_role)
if res:
subtitle = 'step 4 - confirm user deleted from role'
output += '<p>confirm: deleted user <strong>%s</strong> from role <strong>%s</strong>.</p>' % (
email_user, name_role)
else:
subtitle = 'step 4 - user could not be deleted'
output += 'sorry, but user could not be deleted<br />user is probably already deleted.'
extra = """
<dl>
<dt><a href="delegate_adduserrole?id_role=%s">Connect users to role</a></dt>
<dd>add users to the roles you have delegating rights to.</dd>
</dl>
""" % (id_role, )
return index(req=req,
title='Remove users from roles',
subtitle=subtitle,
body=[output, extra],
adminarea=1,
authorized=1)
def perform_showactiondetails(req, id_action):
"""show the details of an action."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
output = createactionselect(id_action=id_action,
action="showactiondetails",
step=1,
actions=acca.acc_get_all_actions(),
button="select action")
if id_action not in [0, '0']:
output += actiondetails(id_action=id_action)
extra = """
<dl>
<dt><a href="addauthorization?id_action=%s&reverse=1">Add new authorization</a></dt>
<dd>add an authorization.</dd>
<dt><a href="modifyauthorizations?id_action=%s&reverse=1">Modify authorizations</a></dt>
<dd>modify existing authorizations.</dd>
<dt><a href="deleteroleaction?id_action=%s&reverse=1">Remove role</a></dt>
<dd>remove all authorizations from action and a role.</dd>
</dl>
""" % (id_action, id_action, id_action)
body = [output, extra]
else:
output += '<p>no details to show</p>'
body = [output]
return index(req=req,
title='Show Action Details',
subtitle='show action details',
body=body,
adminarea=4)
def actiondetails(id_action=0):
"""show details of given action."""
output = ''
if id_action not in [0, '0']:
name_action = acca.acc_get_action_name(id_action=id_action)
output += '<p>action details:</p>'
output += tupletotable(
header=['id', 'name', 'description', 'allowedkeywords',
'optional'],
tuple=[acca.acc_get_action_details(id_action=id_action)])
roleshlp = acca.acc_get_action_roles(id_action=id_action)
if roleshlp:
roles = []
for (id, name, dummy) in roleshlp:
res = acca.acc_find_possible_actions(id, id_action)
if res:
authorization_details = tupletotable(
header=res[0], tuple=res[1:])
else:
authorization_details = 'no details to show'
roles.append(
[id,
'<a href="showroledetails?id_role=%s">%s</a>' %
(id, escape(name)),
authorization_details])
roletable = tupletotable(
header=['id', 'name', 'authorization details', ''],
tuple=roles)
output += '<p>roles connected to %s:</p>\n' % (
headerstrong(action=name_action, query=0), )
output += roletable
else:
output += '<p>no roles connected to %s.</p>\n' % (
headerstrong(action=name_action, query=0), )
else:
output += '<p>no details to show</p>'
return output
def perform_addrole(req, id_role=0, name_role='',
description='put description here.',
firerole_def_src=CFG_ACC_EMPTY_ROLE_DEFINITION_SRC,
confirm=0):
"""form to add a new role with these values.
name_role - name of the new role
description - optional description of the role
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
name_role = cleanstring(name_role)
title = 'Add Role'
subtitle = 'step 1 - give values to the requested fields'
output = """
<form action="addrole" method="POST">
<table><tbody><tr><td align='right' valign='top'>
<span class="adminlabel">role name </span>
</td><td>
<input class="admin_wvar" type="text" name="name_role" value="%s" />
</td></tr><tr><td align='right' valign='top'>
<span class="adminlabel">description </span>
</td><td>
<textarea class="admin_wvar" rows="6" cols="80" name="description">%s</textarea>
</td></tr><tr><td align='right' valign='top'>
<span class="adminlabel">firewall like role definition [<a href="/help/admin/webaccess-admin-guide#6">?</a>]</span>
</td><td>
<textarea class="admin_wvar" rows="6" cols="80" name="firerole_def_src">%s</textarea>
</td></tr>
<tr><td></td><td>See the <a href="listgroups" target="_blank">list of groups</a> for a hint about which group names you can use.</td></tr>
<tr><td></td><td>
<input class="adminbutton" type="submit" value="add role" />
</td></tr></tbody></table>
</form>
""" % (escape(name_role, '"'), escape(description),
escape(firerole_def_src))
if name_role:
# description must be changed before submitting
subtitle = 'step 2 - confirm to add role'
internaldesc = ''
if description != 'put description here.':
internaldesc = description
try:
firerole_def_ser = serialize(
compile_role_definition(firerole_def_src))
except InvenioWebAccessFireroleError as msg:
output += "<strong>%s</strong>" % msg
else:
text = """
add role with: <br />\n
name: <strong>%s</strong> <br />""" % (name_role, )
if internaldesc:
text += 'description: <strong>%s</strong>?\n' % (description, )
output += createhiddenform(action="addrole",
text=text,
name_role=escape(name_role, '"'),
description=escape(description, '"'),
firerole_def_src=escape(
firerole_def_src, '"'),
confirm=1)
if confirm not in ["0", 0]:
result = acca.acc_add_role(name_role=name_role,
description=internaldesc,
firerole_def_ser=firerole_def_ser,
firerole_def_src=firerole_def_src)
if result:
subtitle = 'step 3 - role added'
output += '<p>role added: </p>'
result = list(result)
result[3] = result[3].replace('\n', '<br/>')
result = tuple(result)
output += tupletotable(header=['id', 'role name', 'description', 'firewall like role definition'],
tuple=[result])
else:
subtitle = 'step 3 - role could not be added'
output += '<p>sorry, could not add role, <br />role with the same name probably exists.</p>'
id_role = acca.acc_get_role_id(name_role=name_role)
extra = """
<dl>
<dt><a href="addauthorization?id_role=%s">Add authorization</a></dt>
<dd>start adding new authorizations to role %s.</dd>
</dl>
<dt><a href="adduserrole?id_role=%s">Connect user</a></dt>
<dd>connect a user to role %s.</dd>
<dl>
</dl>""" % (id_role, name_role, id_role, name_role)
try:
body = [output, extra]
except NameError:
body = [output]
return index(req=req,
title=title,
body=body,
subtitle=subtitle,
adminarea=3)
def perform_modifyrole(req, id_role='0', name_role='',
description='put description here.',
firerole_def_src='', modified='0', confirm=0):
"""form to add a new role with these values.
name_role - name of the role to be changed
description - optional description of the role
firerole_def_src - optional firerole like definition of the role
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
ret = acca.acc_get_role_details(id_role)
if ret and modified == '0':
name_role = ret[1]
description = ret[2]
firerole_def_src = ret[3]
if not firerole_def_src or firerole_def_src == '' or \
firerole_def_src is None:
firerole_def_src = 'deny any'
name_role = cleanstring(name_role)
title = 'Modify Role'
subtitle = 'step 1 - give values to the requested fields and confirm to modify role'
output = """
<form action="modifyrole" method="POST">
<table><tbody><tr><td align='right' valign='top'>
<input type="hidden" name="id_role" value="%s" />
<span class="adminlabel">role name </span>
</td><td>
<input class="admin_wvar" type="text" name="name_role" value="%s" /> <br />
</td></tr><tr><td align='right' valign='top'>
<span class="adminlabel">description </span>
</td><td>
<textarea class="admin_wvar" rows="6" cols="80" name="description">%s</textarea> <br />
</td></tr><tr><td align='right' valign='top'>
<span class="adminlabel">firewall like role definition</span> [<a href="/help/admin/webaccess-admin-guide#6">?</a>]
</td><td>
<textarea class="admin_wvar" rows="6" cols="80" name="firerole_def_src">%s</textarea><br />
</td></tr>
<tr><td></td><td>See the <a href="listgroups" target="_blank">list of groups</a> for a hint about which group names you can use.</td></tr>
<tr><td></td><td>
<input class="adminbutton" type="submit" value="modify role" />
<input type="hidden" name="modified" value="1" />
</td></tr></tbody></table>
</form>
""" % (id_role, escape(name_role), escape(description),
escape(firerole_def_src))
if modified in [1, '1']:
# description must be changed before submitting
internaldesc = ''
if description != 'put description here.':
internaldesc = description
text = """
modify role with: <br />\n
name: <strong>%s</strong> <br />""" % (name_role, )
if internaldesc:
text += 'description: <strong>%s</strong>?<br />' % (description, )
text += 'firewall like role definition: <strong>%s</strong>' % firerole_def_src.replace(
'\n', '<br />')
try:
firerole_def_ser = serialize(
compile_role_definition(firerole_def_src))
except InvenioWebAccessFireroleError as msg:
subtitle = 'step 2 - role could not be modified'
output += '<p>sorry, could not modify role because of troubles with its definition:<br />%s</p>' % msg
else:
output += createhiddenform(action="modifyrole",
text=text,
id_role=id_role,
name_role=escape(name_role, True),
description=escape(description, True),
firerole_def_src=escape(
firerole_def_src, True),
modified=1,
confirm=1)
if confirm not in ["0", 0]:
result = acca.acc_update_role(id_role, name_role=name_role,
description=internaldesc, firerole_def_ser=firerole_def_ser, firerole_def_src=firerole_def_src)
if result:
subtitle = 'step 2 - role modified'
output += '<p>role modified: </p>'
output += tupletotable(header=['id', 'role name',
'description', 'firewall like role definition'],
tuple=[(id_role, name_role, description, firerole_def_src.replace('\n', '<br />'))])
else:
subtitle = 'step 2 - role could not be modified'
output += '<p>sorry, could not modify role, <br />please contact the administrator.</p>'
body = [output]
return index(req=req,
title=title,
body=body,
subtitle=subtitle,
adminarea=3)
def perform_deleterole(req, id_role="0", confirm=0):
"""select a role and show all connected information.
users - users that can access the role.
actions - actions with possible authorizations.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
title = 'Delete role'
subtitle = 'step 1 - select role to delete'
name_role = acca.acc_get_role_name(id_role=id_role)
output = createroleselect(id_role=id_role,
action="deleterole",
step=1,
roles=acca.acc_get_all_roles(),
button="delete role")
if id_role != "0" and name_role:
subtitle = 'step 2 - confirm delete of role'
output += roledetails(id_role=id_role)
output += createhiddenform(
action="deleterole",
text='delete role <strong>%s</strong> and all connections?' % (
name_role, ),
id_role=id_role,
confirm=1)
if confirm:
res = acca.acc_delete_role(id_role=id_role)
subtitle = 'step 3 - confirm role deleted'
if res:
output += "<p>confirm: role <strong>%s</strong> deleted.<br />" % (
name_role, )
output += "<strong>%s</strong> entries were removed.</p>" % (
res, )
else:
output += "<p>sorry, the role could not be deleted.</p>"
elif id_role != "0":
output += '<p>the role has been deleted...</p>'
return index(req=req,
title=title,
subtitle=subtitle,
body=[output],
adminarea=3)
def perform_showroledetails(req, id_role):
"""show the details of a role."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
output = createroleselect(id_role=id_role,
action="showroledetails",
step=1,
roles=acca.acc_get_all_roles(),
button="select role")
if id_role not in [0, '0']:
name_role = acca.acc_get_role_name(id_role=id_role)
output += roledetails(id_role=id_role)
extra = """
<dl>
<dt><a href="modifyrole?id_role=%(id_role)s">Modify role</a><dt>
<dd>modify the role you are seeing</dd>
<dt><a href="addauthorization?id_role=%(id_role)s">
Add new authorization</a></dt>
<dd>add an authorization.</dd>
<dt><a href="modifyauthorizations?id_role=%(id_role)s">
Modify authorizations</a></dt>
<dd>modify existing authorizations.</dd>
</dl>
<dl>
<dt><a href="adduserrole?id_role=%(id_role)s">Connect user</a></dt>
<dd>connect a user to role %(name_role)s.</dd>
<dt><a href="deleteuserrole?id_role=%(id_role)s">Remove user</a></dt>
<dd>remove a user from role %(name_role)s.</dd>
</dl>
""" % {'id_role': id_role, 'name_role': name_role}
body = [output, extra]
else:
output += '<p>no details to show</p>'
body = [output]
return index(req=req,
title='Show Role Details',
subtitle='show role details',
body=body,
adminarea=3)
def roledetails(id_role=0):
"""create the string to show details about a role."""
name_role = acca.acc_get_role_name(id_role=id_role)
usershlp = acca.acc_get_role_users(id_role)
users = []
for (id, email, dummy) in usershlp:
users.append(
[id, email,
'<a href="showuserdetails?id_user=%s">show user details</a>' %
(id, )])
usertable = tupletotable(header=['id', 'email'], tuple=users,
highlight_rows_p=True,
alternate_row_colors_p=True)
actionshlp = acca.acc_get_role_actions(id_role)
actions = []
for (action_id, name, dummy) in actionshlp:
res = acca.acc_find_possible_actions(id_role, action_id)
if res:
authorization_details = tupletotable(header=res[0], tuple=res[1:])
else:
authorization_details = 'no details to show'
actions.append(
[action_id, name, authorization_details,
'<a href="showactiondetails?id_role=%s&id_action=%s">'
'show action details</a>' % (id_role, action_id)])
actiontable = tupletotable(
header=['id', 'name', 'parameters', ''], tuple=actions)
# show role details
details = '<p>role details:</p>'
role_details = acca.acc_get_role_details(id_role=id_role)
if role_details[3] is None:
role_details[3] = ''
# Hack for preformatting firerole rules
role_details[3] = role_details[3].replace('\n', '<br />')
details += tupletotable(header=['id', 'name', 'description',
'firewall like role definition'],
tuple=[role_details])
# show connected users
details += '<p>users connected to %s:</p>' % (
headerstrong(role=name_role, query=0), )
if users:
details += usertable
else:
details += '<p>no users connected.</p>'
# show connected authorizations
details += '<p>authorizations for %s:</p>' % (
headerstrong(role=name_role, query=0), )
if actions:
details += actiontable
else:
details += '<p>no authorizations connected</p>'
return details
def perform_adduserrole(req, id_role='0', email_user_pattern='', id_user='0',
confirm=0):
"""create connection between user and role.
id_role - id of the role to add user to
email_user_pattern - search for users using this pattern
id_user - id of user to add to the role.
"""
rlike_op = rlike()
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
email_out = acca.acc_get_user_email(id_user=id_user)
name_role = acca.acc_get_role_name(id_role=id_role)
title = 'Connect user to role '
subtitle = 'step 1 - select a role'
output = createroleselect(id_role=id_role,
action="adduserrole",
step=1,
roles=acca.acc_get_all_roles())
# role is selected
if id_role != "0":
title += name_role
subtitle = 'step 2 - search for users'
# remove letters not allowed in an email
email_user_pattern = cleanstring_email(email_user_pattern)
text = ' <span class="adminlabel">2. search for user </span>\n'
text += ' <input class="admin_wvar" type="text" name="email_user_pattern" value="%s" />\n' % (
email_user_pattern, )
output += createhiddenform(action="adduserrole",
text=text,
button="search for users",
id_role=id_role)
# pattern is entered
if email_user_pattern:
# users with matching email-address
try:
users1 = run_sql("""SELECT id, email FROM "user"
WHERE email<>'' AND email """ +
rlike_op + """ %s ORDER BY email """,
(email_user_pattern, ))
except OperationalError:
users1 = ()
# users that are connected
try:
users2 = run_sql("""SELECT DISTINCT u.id, u.email
FROM "user" u LEFT JOIN "user_accROLE" ur ON u.id = ur.id_user
WHERE ur."id_accROLE" = %s AND u.email """ + rlike_op + """ %s
ORDER BY u.email """, (id_role, email_user_pattern))
except OperationalError:
users2 = ()
# no users that match the pattern
if not (users1 or users2):
output += '<p>no qualified users, try new search.</p>'
elif len(users1) > MAXSELECTUSERS:
output += '<p><strong>%s hits</strong>, too many qualified users, specify more narrow search. (limit %s)</p>' % (
len(users1), MAXSELECTUSERS)
# show matching users
else:
subtitle = 'step 3 - select a user'
users = []
extrausers = []
for (user_id, email) in users1:
if (user_id, email) not in users2:
users.append([user_id, email, ''])
for (user_id, email) in users2:
extrausers.append([-user_id, email, ''])
output += createuserselect(id_user=id_user,
action="adduserrole",
step=3,
users=users,
extrausers=extrausers,
button="add this user",
id_role=id_role,
email_user_pattern=email_user_pattern)
try:
id_user = int(id_user)
except ValueError:
pass
# user selected already connected to role
if id_user < 0:
output += '<p>users in brackets are already attached to the role, try another one...</p>'
# a user is selected
elif email_out:
subtitle = "step 4 - confirm to add user"
output += createhiddenform(action="adduserrole",
text='add user <strong>%s</strong> to role <strong>%s</strong>?' % (
email_out, name_role),
id_role=id_role,
email_user_pattern=email_user_pattern,
id_user=id_user,
confirm=1)
# it is confirmed that this user should be added
if confirm:
# add user
result = acca.acc_add_user_role(
id_user=id_user, id_role=id_role)
if result and result[2]:
subtitle = 'step 5 - confirm user added'
output += '<p>confirm: user <strong>%s</strong> added to role <strong>%s</strong>.</p>' % (
email_out, name_role)
else:
subtitle = 'step 5 - user could not be added'
output += '<p>sorry, but user could not be added.</p>'
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
</dl>
"""
if str(id_role) != "0":
extra += """
<dl>
<dt><a href="deleteuserrole?id_role=%s">Remove users</a></dt>
<dd>remove users from role %s.</dd>
<dt><a href="showroleusers?id_role=%s">Connected users</a></dt>
<dd>show all users connected to role %s.</dd>
</dl>
<dl>
<dt><a href="addauthorization?id_role=%s">Add authorization</a></dt>
<dd>start adding new authorizations to role %s.</dd>
</dl>
""" % (id_role, name_role, id_role, name_role, id_role, name_role)
return index(req=req,
title=title,
subtitle=subtitle,
body=[output, extra],
adminarea=3)
def perform_addroleuser(req, email_user_pattern='', id_user='0', id_role='0',
confirm=0):
"""delete connection between role and user.
id_role - id of role to disconnect
id_user - id of user to disconnect.
"""
rlike_op = rlike()
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
email_out = acca.acc_get_user_email(id_user=id_user)
name_role = acca.acc_get_role_name(id_role=id_role)
# used to sort roles, and also to determine right side links
con_roles = []
not_roles = []
title = 'Connect user to roles'
subtitle = 'step 1 - search for users'
# clean email search string
email_user_pattern = cleanstring_email(email_user_pattern)
text = ' <span class="adminlabel">1. search for user </span>\n'
text += ' <input class="admin_wvar" type="text" name="email_user_pattern" value="%s" />\n' % (
email_user_pattern, )
output = createhiddenform(action='addroleuser',
text=text,
button='search for users',
id_role=id_role)
if email_user_pattern:
subtitle = 'step 2 - select user'
try:
users1 = run_sql("""SELECT id, email FROM "user"
WHERE email<>'' AND email """ + rlike_op +
""" %s ORDER BY email """,
(email_user_pattern, ))
except OperationalError:
users1 = ()
users = []
for (id, email) in users1:
users.append([id, email, ''])
# no users
if not users:
output += '<p>no qualified users, try new search.</p>'
# too many users
elif len(users) > MAXSELECTUSERS:
output += '<p><strong>%s hits</strong>, too many qualified users, specify more narrow search. (limit %s)</p>' % (
len(users), MAXSELECTUSERS)
# ok number of users
else:
output += createuserselect(id_user=id_user,
action='addroleuser',
step=2,
users=users,
button='select user',
email_user_pattern=email_user_pattern)
if int(id_user):
subtitle = 'step 3 - select role'
# roles the user is connected to
role_ids = acca.acc_get_user_roles(id_user=id_user)
# all the roles, lists are sorted on the background of these...
all_roles = acca.acc_get_all_roles()
# sort the roles in connected and not connected roles
for (id, name, description, dummy, dummy) in all_roles:
if id in role_ids:
con_roles.append([-id, name, description])
else:
not_roles.append([id, name, description])
# create roleselect
output += createroleselect(
id_role=id_role,
action='addroleuser',
step=3,
roles=not_roles,
extraroles=con_roles,
extrastamp='(connected)',
button='add this role',
email_user_pattern=email_user_pattern,
id_user=id_user)
if int(id_role) < 0:
name_role = acca.acc_get_role_name(id_role=-int(id_role))
output += '<p>role %s already connected to the user, try another one...<p>' % (
name_role, )
elif int(id_role):
subtitle = 'step 4 - confirm to add role to user'
output += createhiddenform(
action='addroleuser',
text='add role <strong>%s</strong> to user <strong>%s</strong>?' % (
name_role, email_out),
email_user_pattern=email_user_pattern,
id_user=id_user,
id_role=id_role,
confirm=1)
if confirm:
# add role
result = acca.acc_add_user_role(
id_user=id_user, id_role=id_role)
if result and result[2]:
subtitle = 'step 5 - confirm role added'
output += '<p>confirm: role <strong>%s</strong> added to user <strong>%s</strong>.</p>' % (
name_role, email_out)
else:
subtitle = 'step 5 - role could not be added'
output += '<p>sorry, but role could not be added</p>'
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
"""
if int(id_user) and con_roles:
extra += """
</dl>
<dl>
<dt><a href="deleteuserrole?id_user=%s&reverse=1">Remove roles</a></dt>
<dd>disconnect roles from user %s.</dd>
</dl>
""" % (id_user, email_out)
if int(id_role):
if int(id_role) < 0:
id_role = -int(id_role)
extra += """
<dl>
<dt><a href="deleteuserrole?id_role=%s">Remove users</a></dt>
<dd>disconnect users from role %s.<dd>
</dl>
""" % (id_role, name_role)
return index(req=req,
title=title,
subtitle=subtitle,
body=[output, extra],
adminarea=5)
def perform_deleteuserrole(req, id_role='0', id_user='0', reverse=0,
confirm=0):
"""delete connection between role and user.
id_role - id of role to disconnect
id_user - id of user to disconnect.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
title = 'Remove user from role'
email_user = acca.acc_get_user_email(id_user=id_user)
name_role = acca.acc_get_role_name(id_role=id_role)
output = ''
if reverse in [0, '0']:
adminarea = 3
subtitle = 'step 1 - select the role'
output += createroleselect(id_role=id_role,
action="deleteuserrole",
step=1,
roles=acca.acc_get_all_roles())
if id_role != "0":
subtitle = 'step 2 - select the user'
output += createuserselect(id_user=id_user,
action="deleteuserrole",
step=2,
users=acca.acc_get_role_users(
id_role=id_role),
id_role=id_role)
else:
adminarea = 5
# show only if user is connected to a role, get users connected to
# roles
users = run_sql("""SELECT DISTINCT(u.id), u.email, u.note
FROM "user" u LEFT JOIN "user_accROLE" ur
ON u.id = ur.id_user
WHERE ur."id_accROLE" != 'NULL' AND u.email != ''
ORDER BY u.email """)
has_roles = 1
# check if the user is connected to any roles
for (id, email, note) in users:
if str(id) == str(id_user):
break
# user not connected to a role
else:
subtitle = 'step 1 - user not connected'
output += '<p>no need to remove roles from user <strong>%s</strong>,<br />user is not connected to any roles.</p>' % (
email_user, )
has_roles, id_user = 0, '0' # stop the rest of the output below...
# user connected to roles
if has_roles:
output += createuserselect(id_user=id_user,
action="deleteuserrole",
step=1,
users=users,
reverse=reverse)
if id_user != "0":
subtitle = 'step 2 - select the role'
role_ids = acca.acc_get_user_roles(id_user=id_user)
all_roles = acca.acc_get_all_roles()
roles = []
for (id, name, desc, dummy, dummy) in all_roles:
if id in role_ids:
roles.append([id, name, desc])
output += createroleselect(id_role=id_role,
action="deleteuserrole",
step=2,
roles=roles,
id_user=id_user,
reverse=reverse)
if id_role != '0' and id_user != '0':
subtitle = 'step 3 - confirm delete of user'
output += createhiddenform(
action="deleteuserrole",
text='delete user %s from %s?' % (
headerstrong(user=id_user), headerstrong(role=id_role)),
id_role=id_role,
id_user=id_user,
reverse=reverse,
confirm=1)
if confirm:
res = acca.acc_delete_user_role(id_user=id_user, id_role=id_role)
if res:
subtitle = 'step 4 - confirm delete of user'
output += '<p>confirm: deleted user <strong>%s</strong> from role <strong>%s</strong>.</p>' % (
email_user, name_role)
else:
subtitle = 'step 4 - user could not be deleted'
output += 'sorry, but user could not be deleted<br />user is probably already deleted.'
extra = ''
if str(id_role) != "0":
extra += """
<dl>
<dt><a href="adduserrole?id_role=%s">Connect user</a></dt>
<dd>add users to role %s.</dd>
""" % (id_role, name_role)
if int(reverse):
extra += """
<dt><a href="deleteuserrole?id_role=%s">Remove user</a></dt>
<dd>remove users from role %s.</dd> """ % (id_role, name_role)
extra += '</dl>'
if str(id_user) != "0":
extra += """
<dl>
<dt><a href="addroleuser?email_user_pattern=%s&id_user=%s">Connect role</a></dt>
<dd>add roles to user %s.</dd>
""" % (email_user, id_user, email_user)
if not int(reverse):
extra += """
<dt><a href="deleteuserrole?id_user=%s&email_user_pattern=%s&reverse=1">Remove role</a></dt>
<dd>remove roles from user %s.</dd> """ % (id_user, email_user, email_user)
extra += '</dl>'
if extra:
body = [output, extra]
else:
body = [output]
return index(req=req,
title=title,
subtitle=subtitle,
body=body,
adminarea=adminarea)
def perform_showuserdetails(req, id_user=0):
"""show the details of a user."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
if id_user not in [0, '0']:
output = userdetails(id_user=id_user)
email_user = acca.acc_get_user_email(id_user=id_user)
extra = """
<dl>
<dt><a href="addroleuser?id_user=%s&email_user_pattern=%s">Connect role</a></dt>
<dd>connect a role to user %s.</dd>
<dt><a href="deleteuserrole?id_user=%s&reverse=1">Remove role</a></dt>
<dd>remove a role from user %s.</dd>
</dl>
""" % (id_user, email_user, email_user, id_user, email_user)
body = [output, extra]
else:
body = ['<p>no details to show</p>']
return index(req=req,
title='Show User Details',
subtitle='show user details',
body=body,
adminarea=5)
def userdetails(id_user=0):
"""create the string to show details about a user."""
# find necessary details
email_user = acca.acc_get_user_email(id_user=id_user)
userroles = acca.acc_get_user_roles(id_user=id_user)
conn_roles = []
# find connected roles
for (id, name, desc, dummy, dummy) in acca.acc_get_all_roles():
if id in userroles:
conn_roles.append([id, name, desc])
conn_roles[-1].append(
'<a href="showroledetails?id_role=%s">show details</a>' %
(id, ))
if conn_roles:
# print details
details = '<p>roles connected to user <strong>%s</strong></p>' % (
email_user, )
details += tupletotable(header=['id', 'name',
'description', ''], tuple=conn_roles)
else:
details = '<p>no roles connected to user <strong>%s</strong>.</p>' % (
email_user, )
return details
def perform_addauthorization(req, id_role="0", id_action="0", optional=0, reverse="0", confirm=0, **keywords):
"""form to add new connection between user and role.
id_role - role to connect
id_action - action to connect
reverse - role or action first?
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
# values that might get used
name_role = acca.acc_get_role_name(id_role=id_role) or id_role
name_action = acca.acc_get_action_name(id_action=id_action) or id_action
optional = optional == 'on' and 1 or int(optional)
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
</dl>
"""
# create the page according to which step the user is on
# role -> action -> arguments
if reverse in ["0", 0]:
adminarea = 3
subtitle = 'step 1 - select role'
output = createroleselect(id_role=id_role,
action="addauthorization",
step=1,
roles=acca.acc_get_all_roles(),
reverse=reverse)
if str(id_role) != "0":
subtitle = 'step 2 - select action'
rolacts = acca.acc_get_role_actions(id_role)
allhelp = acca.acc_get_all_actions()
allacts = []
for r in allhelp:
if r not in rolacts:
allacts.append(r)
output += createactionselect(id_action=id_action,
action="addauthorization",
step=2,
actions=rolacts,
extraactions=allacts,
id_role=id_role,
reverse=reverse)
# action -> role -> arguments
else:
adminarea = 4
subtitle = 'step 1 - select action'
output = createactionselect(id_action=id_action,
action="addauthorization",
step=1,
actions=acca.acc_get_all_actions(),
reverse=reverse)
if str(id_action) != "0":
subtitle = 'step 2 - select role'
actroles = acca.acc_get_action_roles(id_action)
allhelp = acca.acc_get_all_roles()
allroles = []
for r in allhelp:
if r not in actroles:
allroles.append(r)
output += createroleselect(id_role=id_role,
action="addauthorization",
step=2,
roles=actroles,
extraroles=allroles,
id_action=id_action,
reverse=reverse)
# ready for step 3 no matter which direction we took to get here
if id_action != "0" and id_role != "0":
# links to adding authorizations in the other direction
if str(reverse) == "0":
extra += """
<dl>
<dt><a href="addauthorization?id_action=%s&reverse=1">Add authorization</a></dt>
<dd>add authorizations to action %s.</dd>
</dl> """ % (id_action, name_action)
else:
extra += """
<dl>
<dt><a href="addauthorization?id_role=%s">Add authorization</a></dt>
<dd>add authorizations to role %s.</dd>
</dl> """ % (id_role, name_role)
subtitle = 'step 3 - enter values for the keywords\n'
output += """
<form action="addauthorization" method="POST">
<input type="hidden" name="id_role" value="%s">
<input type="hidden" name="id_action" value="%s">
<input type="hidden" name="reverse" value="%s">
""" % (id_role, id_action, reverse)
# the actions argument keywords
res_keys = acca.acc_get_action_keywords(id_action=id_action)
# res used to display existing authorizations
# res used to determine if showing "create connection without
# arguments"
res_auths = acca.acc_find_possible_actions(id_role, id_action)
if not res_keys:
# action without arguments
if not res_auths:
output += """
<input type="hidden" name="confirm" value="1">
create connection between %s?
<input class="adminbutton" type="submit" value="confirm">
</form>
""" % (headerstrong(role=name_role, action=name_action, query=0), )
else:
output += '<p><strong>connection without arguments is already created.</strong></p>'
else:
# action with arguments
optionalargs = acca.acc_get_action_is_optional(id_action=id_action)
output += '<span class="adminlabel">3. authorized arguments</span><br />'
if optionalargs:
# optional arguments
output += """
<p>
<input type="radio" name="optional" value="1" %s />
connect %s to %s for any arguments <br />
<input type="radio" name="optional" value="0" %s />
connect %s to %s for only these argument cases:
</p>
""" % (optional and 'checked="checked"' or '', name_role, name_action, not optional and 'checked="checked"' or '', name_role, name_action)
# list the arguments
allkeys = 1
for key in res_keys:
output += '<span class="adminlabel" style="margin-left: 30px;">%s </span>\n <input class="admin_wvar" type="text" name="%s"' % (
key, key)
try:
# = cleanstring_argumentvalue(keywords[key])
val = keywords[key]
if val:
output += 'value="%s" ' % (escape(val, True), )
else:
allkeys = 0
except KeyError:
allkeys = 0
output += ' /> <br />\n'
output = output[
:-8] + ' <input class="adminbutton" type="submit" value="create authorization -->" />\n'
output += '</form>\n'
# ask for confirmation
if str(allkeys) != "0" or optional:
keys = keywords.keys()
keys.reverse()
subtitle = 'step 4 - confirm add of authorization\n'
text = """
create connection between <br />
%s <br />
""" % (headerstrong(role=name_role, action=name_action, query=0), )
if optional:
text += 'withouth arguments'
keywords = {}
else:
for key in keys:
text += '<strong>%s</strong>: %s \n' % (
escape(key), escape(keywords[key]))
output += createhiddenform(action="addauthorization",
text=text,
id_role=id_role,
id_action=id_action,
reverse=reverse,
confirm=1,
optional=optional,
**keywords)
# show existing authorizations, found authorizations further up in the code...
# res_auths = acca.acc_find_possible_actions(id_role, id_action)
output += '<p>existing authorizations:</p>'
if res_auths:
output += tupletotable(header=res_auths[0], tuple=res_auths[1:])
# shortcut to modifying authorizations
extra += """
<dl>
<dt><a href="modifyauthorizations?id_role=%s&id_action=%s&reverse=%s">Modify authorizations</a></dt>
<dd>modify the existing authorizations.</dd>
</dl> """ % (id_role, id_action, reverse)
else:
output += '<p>no details to show</p>'
# user confirmed to add entries
if confirm:
subtitle = 'step 5 - confirm authorization added'
res1 = acca.acc_add_authorization(name_role=name_role,
name_action=name_action,
optional=optional,
**keywords)
if res1:
res2 = acca.acc_find_possible_actions(id_role, id_action)
arg = res1[0][3] # the arglistid
new = [res2[0]]
for row in res2[1:]:
if int(row[0]) == int(arg):
new.append(row)
newauths = tupletotable(header=new[0], tuple=new[1:])
newentries = tupletotable(
header=['role id', 'action id', 'argument id', '#'], tuple=res1)
st = 'style="vertical-align: top"'
output += """
<p>new authorization and entries:</p>
<table><tr>
<td class="admintd" %s>%s</td>
<td class="admintd" %s>%s</td>
</tr></table> """ % (st, newauths, st, newentries)
else:
output += '<p>sorry, authorization could not be added,<br />it probably already exists</p>'
# trying to put extra link on the right side
try:
body = [output, extra]
except NameError:
body = [output]
return index(req=req,
title='Create entry for new authorization',
subtitle=subtitle,
body=body,
adminarea=adminarea)
def perform_deleteroleaction(req, id_role="0", id_action="0", reverse=0,
confirm=0):
"""delete all connections between a role and an action.
id_role - id of the role
id_action - id of the action
reverse - 0: ask for role first
1: ask for action first
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
title = 'Remove action from role '
if reverse in ["0", 0]:
# select role -> action
adminarea = 3
subtitle = 'step 1 - select a role'
output = createroleselect(id_role=id_role,
action="deleteroleaction",
step=1,
roles=acca.acc_get_all_roles(),
reverse=reverse)
if id_role != "0":
rolacts = acca.acc_get_role_actions(id_role=id_role)
subtitle = 'step 2 - select the action'
output += createactionselect(id_action=id_action,
action="deleteroleaction",
step=2,
actions=rolacts,
reverse=reverse,
id_role=id_role,
button="remove connection and all authorizations")
else:
# select action -> role
adminarea = 4
subtitle = 'step 1 - select an action'
output = createactionselect(id_action=id_action,
action="deleteroleaction",
step=1,
actions=acca.acc_get_all_actions(),
reverse=reverse)
if id_action != "0":
actroles = acca.acc_get_action_roles(id_action=id_action)
subtitle = 'step 2 - select the role'
output += createroleselect(id_role=id_role,
action="deleteroleaction",
step=2,
roles=actroles,
button="remove connection and all authorizations",
id_action=id_action,
reverse=reverse)
if id_action != "0" and id_role != "0":
subtitle = 'step 3 - confirm to remove authorizations'
# ask for confirmation
res = acca.acc_find_possible_actions(id_role, id_action)
if res:
output += '<p>authorizations that will be deleted:</p>'
output += tupletotable(header=res[0], tuple=res[1:])
output += createhiddenform(action="deleteroleaction",
text='remove %s from %s' % (
headerstrong(action=id_action), headerstrong(role=id_role)),
confirm=1,
id_role=id_role,
id_action=id_action,
reverse=reverse)
else:
output += 'no authorizations'
# confirmation is given
if confirm:
subtitle = 'step 4 - confirm authorizations removed '
res = acca.acc_delete_role_action(
id_role=id_role, id_action=id_action)
if res:
output += '<p>confirm: removed %s from %s<br />' % (
headerstrong(action=id_action), headerstrong(role=id_role))
output += '<strong>%s</strong> entries were removed.</p>' % (
res, )
else:
output += '<p>sorry, no entries could be removed.</p>'
return index(req=req,
title=title,
subtitle=subtitle,
body=[output],
adminarea=adminarea)
def perform_modifyauthorizations(req, id_role="0", id_action="0", reverse=0,
confirm=0, errortext='', sel='', authids=[]):
"""Modify authorizations.
given ids of a role and an action, show all possible action combinations
with checkboxes and allow user to access other functions.
id_role - id of the role
id_action - id of the action
reverse - 0: ask for role first
1: ask for action first
sel - which button and modification that is selected
errortext - text to print when no connection exist between role and action
authids - ids of checked checkboxes
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
name_role = acca.acc_get_role_name(id_role)
name_action = acca.acc_get_action_name(id_action)
output = ''
try:
id_role, id_action, reverse = int(
id_role), int(id_action), int(reverse)
except ValueError:
pass
extra = """
<dl>
<dt><a href="addrole">Create new role</a></dt>
<dd>go here to add a new role.</dd>
</dl>
"""
if id_role or id_action:
extra += '\n<dl>\n'
if id_role and id_action:
extra += """
<dt><a href="addauthorization?id_role=%s&id_action=%s&reverse=%s">Add authorizations</a></dt>
<dd>add an authorization to the existing ones.</dd> """ % \
(id_role, id_action, reverse)
if id_role:
extra += """
<dt><a href="addauthorization?id_role=%s">Add authorizations</a></dt>
<dd>add to role %s.</dd> """ % (id_role, name_role)
if id_action:
extra += """
<dt><a href="addauthorization?id_action=%s&reverse=1">Add authorizations</a></dt>
<dd>add to action %s.</dd> """ % (id_action, name_action)
extra += '\n</dl>\n'
if not reverse:
# role -> action
adminarea = 3
subtitle = 'step 1 - select the role'
output += createroleselect(id_role=str(id_role),
action="modifyauthorizations",
step=1,
roles=acca.acc_get_all_roles(),
reverse=reverse)
if id_role:
rolacts = acca.acc_get_role_actions(id_role=id_role)
subtitle = 'step 2 - select the action'
output += createactionselect(id_action=str(id_action),
action="modifyauthorizations",
step=2,
actions=rolacts,
id_role=id_role,
reverse=reverse)
else:
adminarea = 4
# action -> role
subtitle = 'step 1 - select the action'
output += createactionselect(id_action=str(id_action),
action="modifyauthorizations",
step=1,
actions=acca.acc_get_all_actions(),
reverse=reverse)
if id_action:
actroles = acca.acc_get_action_roles(id_action=id_action)
subtitle = 'step 2 - select the role'
output += createroleselect(id_role=str(id_role),
action="modifyauthorizations",
step=2,
roles=actroles,
id_action=id_action,
reverse=reverse)
if errortext:
output += '<p>%s</p>' % (errortext, )
if id_role and id_action:
# adding to main area
if type(authids) is not list:
authids = [authids]
subtitle = 'step 3 - select groups and modification'
# get info
res = acca.acc_find_possible_actions(id_role, id_action)
# clean the authids
hiddenids = []
if sel in ['delete selected']:
hiddenids = authids[:]
elif sel in ['split groups', 'merge groups']:
for authid in authids:
arghlp = res[int(authid)][0]
if authid not in hiddenids and arghlp not in [-1, '-1', 0, '0']:
hiddenids.append(authid)
authids = hiddenids[:]
if confirm:
# do selected modification and output with new authorizations
if sel == 'split groups':
res = splitgroups(id_role, id_action, authids)
elif sel == 'merge groups':
res = mergegroups(id_role, id_action, authids)
elif sel == 'delete selected':
res = deleteselected(id_role, id_action, authids)
authids = []
res = acca.acc_find_possible_actions(id_role, id_action)
output += 'authorizations after <strong>%s</strong>.<br />\n' % (
sel, )
elif sel and authids:
output += 'confirm choice of authorizations and modification.<br />\n'
else:
output += 'select authorizations and perform modification.<br />\n'
if not res:
errortext = 'all connections deleted, try different '
if reverse in ["0", 0]:
return perform_modifyauthorizations(req=req, id_role=id_role, errortext=errortext + 'action.')
else:
return perform_modifyauthorizations(req=req, id_action=id_action, reverse=reverse, errortext=errortext + 'role.')
# display
output += modifyauthorizationsmenu(
id_role, id_action, header=res[0], tuple=res[1:], checked=authids, reverse=reverse)
if sel and authids:
subtitle = 'step 4 - confirm to perform modification'
# form with hidden authids
output += '<form action="%s" method="POST">\n' % (
'modifyauthorizations', )
for hiddenid in hiddenids:
output += '<input type="hidden" name="authids" value="%s" />\n' % (
hiddenid, )
# choose what to do
if sel == 'split groups':
output += '<p>split groups containing:</p>'
elif sel == 'merge groups':
output += '<p>merge groups containing:</p>'
elif sel == 'delete selected':
output += '<p>delete selected entries:</p>'
extracolumn = '<input type="checkbox" name="confirm" value="1" />\n'
extracolumn += '<input class="adminbutton" type="submit" value="confirm" />\n'
# show the entries here...
output += tupletotable_onlyselected(header=res[0],
tuple=res[1:],
selected=hiddenids,
extracolumn=extracolumn)
output += '<input type="hidden" name="id_role" value="%s" />\n' \
% (id_role, )
output += '<input type="hidden" name="id_action" value="%s" />\n' \
% (id_action, )
output += '<input type="hidden" name="sel" value="%s" />\n' \
% (sel, )
output += '<input type="hidden" name="reverse" value="%s" />\n' \
% (reverse, )
output += '</form>'
# tried to perform modification without something selected
elif sel and not authids and not confirm:
output += '<p>no valid groups selected</p>'
# trying to put extra link on the right side
try:
body = [output, extra]
except NameError:
body = [output]
# Display the page
return index(req=req,
title='Modify Authorizations',
subtitle=subtitle,
body=body,
adminarea=adminarea)
def modifyauthorizationsmenu(id_role, id_action, tuple=[], header=[],
checked=[], reverse=0):
"""create table with header and checkboxes, used for multiple choice.
makes use of tupletotable to add the actual table
id_role - selected role, hidden value in the form
id_action - selected action, hidden value in the form
tuple - all rows to be put in the table (with checkboxes)
header - column headers, empty strings added at start and end
checked - ids of rows to be checked
"""
if not tuple:
return 'no authorisations...'
argnum = len(acca.acc_get_action_keywords(id_action=id_action))
tuple2 = []
for t in tuple:
tuple2.append(t[:])
tuple2 = addcheckboxes(datalist=tuple2, name='authids', startindex=1,
checked=checked)
hidden = '<input type="hidden" name="id_role" value="%s" /> \n' % \
(id_role, )
hidden += '<input type="hidden" name="id_action" value="%s" /> \n' \
% (id_action, )
hidden += '<input type="hidden" name="reverse" value="%s" /> \n' \
% (reverse, )
button = '<input type="submit" class="adminbutton" ' \
'value="delete selected" name="sel" />\n'
if argnum > 1:
button += '<input type="submit" class="adminbutton" ' \
'value="split groups" name="sel" />\n'
button += '<input type="submit" class="adminbutton" ' \
'value="merge groups" name="sel" />\n'
hdrstr = ''
for h in [''] + header + ['']:
hdrstr += ' <th class="adminheader">%s</th>\n' % (h, )
if hdrstr:
hdrstr = ' <tr>\n%s\n </tr>\n' % (hdrstr, )
output = '<form action="modifyauthorizations" method="POST">\n'
output += '<table class="admin_wvar_nomargin"> \n'
output += hdrstr
output += '<tr><td>%s</td></tr>\n' % (hidden, )
align = ['admintdleft'] * len(tuple2[0])
try:
align[1] = 'admintdright'
except IndexError:
pass
output += '<tr>'
for i in range(len(tuple2[0])):
output += '<td class="%s">%s</td>\n' % (align[i], tuple2[0][i])
output += '<td rowspan="%s" style="vertical-align: bottom">\n%s\n</td>\n' \
% (len(tuple2), button)
output += '</tr>\n'
for row in tuple2[1:]:
output += ' <tr>\n'
for i in range(len(row)):
output += '<td class="%s">%s</td>\n' % (align[i], row[i])
output += ' </tr>\n'
output += '</table>\n</form>\n'
return output
def splitgroups(id_role=0, id_action=0, authids=[]):
"""Split groups.
get all the old ones, gather up the arglistids find a list of
arglistidgroups to be split, unique get all actions in groups outside
of the old ones, (old arglistid is allowed).
show them like in showselect.
"""
if not id_role or not id_action or not authids:
return 0
# find all the actions
datalist = acca.acc_find_possible_actions(id_role, id_action)
if type(authids) is str:
authids = [authids]
for i in range(len(authids)):
authids[i] = int(authids[i])
# argumentlistids of groups to be split
splitgrps = []
for authid in authids:
hlp = datalist[authid][0]
if hlp not in splitgrps and authid in range(1, len(datalist)):
splitgrps.append(hlp)
# split groups and return success or failure
result = 1
for splitgroup in splitgrps:
result = 1 and acca.acc_split_argument_group(id_role, id_action,
splitgroup)
return result
def mergegroups(id_role=0, id_action=0, authids=[]):
"""Merge groups.
get all the old ones, gather up the argauthids find a list
of arglistidgroups to be split, unique get all actions in groups
outside of the old ones, (old arglistid is allowed).
show them like in showselect.
"""
if not id_role or not id_action or not authids:
return 0
datalist = acca.acc_find_possible_actions(id_role, id_action)
if type(authids) is str:
authids = [authids]
for i in range(len(authids)):
authids[i] = int(authids[i])
# argumentlistids of groups to be merged
mergegroups = []
for authid in authids:
hlp = datalist[authid][0]
if hlp not in mergegroups and authid in range(1, len(datalist)):
mergegroups.append(hlp)
# merge groups and return success or failure
if acca.acc_merge_argument_groups(id_role, id_action, mergegroups):
return 1
else:
return 0
def deleteselected(id_role=0, id_action=0, authids=[]):
"""delete checked authorizations/possible actions, ids in authids.
id_role - role to delete from
id_action - action to delete from
authids - listids for which possible actions to delete.
"""
if not id_role or not id_action or not authids:
return 0
if type(authids) in [str, int]:
authids = [authids]
for i in range(len(authids)):
authids[i] = int(authids[i])
result = acca.acc_delete_possible_actions(id_role=id_role,
id_action=id_action,
authids=authids)
return result
def headeritalic(**ids):
"""transform keyword=value pairs to string with value in italics.
**ids - a dictionary of pairs to create string from
"""
output = ''
value = ''
table = ''
for key in ids.keys():
if key in ['User', 'user']:
value, table = 'email', 'user'
elif key in ['Role', 'role']:
value, table = 'name', 'accROLE'
elif key in ['Action', 'action']:
value, table = 'name', 'accACTION'
else:
if output:
output += ' and '
output += ' %s <i>%s</i>' % (key, ids[key])
continue
res = run_sql("""SELECT "%%s" FROM "%s" WHERE id = %%s""" %
wash_table_column_name(
table), (value, ids[key])) # kwalitee: disable=sql
if res:
if output:
output += ' and '
output += ' %s <i>%s</i>' % (key, res[0][0])
return output
def headerstrong(query=1, **ids):
"""transform keyword=value pairs to string with value in strong text.
**ids - a dictionary of pairs to create string from
query - 1 -> try to find names to ids of role, user and action.
0 -> do not try to find names, use the value passed on
"""
output = ''
value = ''
table = ''
for key in ids.keys():
if key in ['User', 'user']:
value, table = 'email', 'user'
elif key in ['Role', 'role']:
value, table = 'name', 'accROLE'
elif key in ['Action', 'action']:
value, table = 'name', 'accACTION'
else:
if output:
output += ' and '
output += ' %s <strong>%s</strong>' % (key, ids[key])
continue
if query:
res = run_sql(
"""SELECT "%%s" FROM "%s" WHERE id = %%s""" %
wash_table_column_name(
table), (value, ids[key])) # kwalitee: disable=sql
if res:
if output:
output += ' and '
output += ' %s <strong>%s</strong>' % (key, res[0][0])
else:
if output:
output += ' and '
output += ' %s <strong>%s</strong>' % (key, ids[key])
return output
def startpage():
"""create the menu for the startpage."""
body = """
<table class="admin_wvar" width="100%" summary="">
<thead>
<tr>
<th class="adminheaderleft">selection for WebAccess Admin</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<dl>
<dt><a href="webaccessadmin.py/rolearea">Role Area</a></dt>
<dd>main area to configure administration rights and authorization rules.</dd>
<dt><a href="webaccessadmin.py/actionarea">Action Area</a></dt>
<dd>configure administration rights with the actions as starting point.</dd>
<dt><a href="webaccessadmin.py/userarea">User Area</a></dt>
<dd>configure administration rights with the users as starting point.</dd>
<dt><a href="webaccessadmin.py/resetarea">Reset Area</a></dt>
<dd>reset roles, actions and authorizations.</dd>
<dt><a href="webaccessadmin.py/manageaccounts">Manage accounts Area</a></dt>
<dd>manage user accounts.</dd>
<dt><a href="webaccessadmin.py/delegate_startarea">Delegate Rights - With Restrictions</a></dt>
<dd>delegate your rights for some roles.</dd>
</dl>
</td>
</tr>
</tbody>
</table>"""
return body
def rankarea():
"""Rank area."""
return "Rankmethod area"
def perform_simpleauthorization(req, id_role=0, id_action=0):
"""Show a page with overview of auths.
show a page with simple overview of authorizations between a
connected role and action.
"""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
res = acca.acc_find_possible_actions(id_role, id_action)
if res:
extra = createhiddenform(action='modifyauthorizations',
button='modify authorizations',
id_role=id_role,
id_action=id_action)
output = '<p>authorizations for %s:</p>' % \
(headerstrong(action=id_action, role=id_role), )
output += tupletotable(header=res[0], tuple=res[1:], extracolumn=extra)
else:
output = 'no details to show'
return index(req=req,
title='Simple authorization details',
subtitle='simple authorization details',
body=[output],
adminarea=3)
def perform_showroleusers(req, id_role=0):
"""show a page with simple overview of a role and connected users."""
(auth_code, auth_message) = is_adminuser(req)
if auth_code != 0:
return mustloginpage(req, auth_message)
res = acca.acc_get_role_users(id_role=id_role)
name_role = acca.acc_get_role_name(id_role=id_role)
if res:
users = []
for (role_id, name, dummy) in res:
users.append([role_id, name, '<a href="showuserdetails?'
'id_user=%s">show user details</a>' % (role_id, )])
output = '<p>users connected to %s:</p>' \
% (headerstrong(role=id_role), )
output += tupletotable(header=['id', 'name', ''], tuple=users)
else:
output = 'no users connected to role <strong>%s</strong>' \
% (name_role, )
extra = """
<dl>
<dt><a href="adduserrole?id_role=%s">Connect user</a></dt>
<dd>connect users to the role.</dd>
</dl>
""" % (id_role, )
return index(req=req,
title='Users connected to role %s' % (name_role, ),
subtitle='simple details',
body=[output, extra],
adminarea=3)
def createselect(id_input="0", label="", step=0, name="",
action="", list=[], extralist=[], extrastamp='',
button="", **hidden):
"""create form with select and hidden values.
id - the one to choose as selected if exists
label - label shown to the left of the select
name - the name of the select on which to reference it
list - primary list to select from
extralist - list of options to be put in paranthesis
extrastamp - stamp extralist entries with this if not ''
usually paranthesis around the entry
button - the value/text to be put on the button
**hidden - name=value pairs to be put as hidden in the form.
"""
step = step and '%s. ' % step or ''
output = '<form action="%s" method="POST">\n' % (action, )
output += ' <span class="adminlabel">%s</span>\n' % (step + label, )
output += ' <select name="%s" class="admin_w200">\n' % (name, )
if not list and not extralist:
output += ' <option value="0">*** no %ss to select from ***' \
'</option>\n' % (label.split()[-1], )
else:
output += ' <option value="0">*** %s ***</option>\n' % (label, )
for elem in list:
elem_id = elem[0]
email = elem[1]
if str(elem_id) == id_input:
output += ' <option value="%s" selected="selected">' \
'%s</option>\n' % (elem_id, email)
else:
output += ' <option value="%s">%s</option>\n' \
% (elem_id, email)
for elem in extralist:
elem_id = elem[0]
email = elem[1]
if str(elem_id) == id_input:
if not extrastamp:
output += ' <option value="%s" selected="selected">' \
'(%s)</option>\n' % (elem_id, email)
else:
output += ' <option value="%s">%s %s</option>\n' \
% (elem_id, email, extrastamp)
elif not extrastamp:
output += ' <option value="%s">(%s)</option>\n' \
% (elem_id, email)
else:
output += ' <option value="%s">%s %s</option>\n' \
% (elem_id, email, extrastamp)
output += ' </select>\n'
for key in hidden.keys():
output += ' <input type="hidden" name="%s" value="%s" />\n' \
% (key, hidden[key])
output += ' <input class="adminbutton" type="submit" value="%s" />\n' \
% (button, )
output += '</form>\n'
return output
def createactionselect(id_action="0", label="select action", step=0,
name="id_action", action="", actions=[],
extraactions=[], extrastamp='',
button="select action", **hidden):
"""create a select for roles in a form. see createselect."""
return createselect(id_input=id_action, label=label, step=step, name=name,
action=action, list=actions, extralist=extraactions,
extrastamp=extrastamp, button=button, **hidden)
def createroleselect(id_role="0", label="select role", step=0, name="id_role",
action="", roles=[], extraroles=[], extrastamp='',
button="select role", **hidden):
"""create a select for roles in a form. see createselect."""
return createselect(id_input=id_role, label=label, step=step, name=name,
action=action, list=roles, extralist=extraroles,
extrastamp=extrastamp, button=button, **hidden)
def createuserselect(id_user="0", label="select user", step=0, name="id_user",
action="", users=[], extrausers=[],
extrastamp='(connected)', button="select user", **hidden):
"""create a select for users in a form.see createselect."""
return createselect(id_input=id_user, label=label, step=step, name=name,
action=action, list=users, extralist=extrausers,
extrastamp=extrastamp, button=button, **hidden)
def cleanstring(txt='', comma=0):
"""clean all the strings before submitting to access control admin.
remove characters not letter, number or underscore, also remove leading
underscores and numbers. return cleaned string.
str - string to be cleaned
comma - 1 -> allow the comma to divide multiple arguments
0 -> wash commas as well
"""
# remove not allowed characters
txt = re.sub(r'[^a-zA-Z0-9_,]', '', txt)
# split string on commas
items = txt.split(',')
txt = ''
for item in items:
if not item:
continue
if comma and txt:
txt += ','
# create valid variable names
txt += re.sub(r'^([0-9_])*', '', item)
return txt
def cleanstring_argumentvalue(txt=''):
"""clean the value of an argument before submitting it.
allowed characters: a-z A-Z 0-9 _ * and space
txt - string to be cleaned
"""
# remove not allowed characters
txt = re.sub(r'[^a-zA-Z0-9_ *.]', '', txt)
# trim leading and ending spaces
txt = re.sub(r'^ *| *$', '', txt)
return txt
def cleanstring_email(txt=''):
"""clean the string and return a valid email address.
txt - string to be cleaned
"""
# remove not allowed characters
txt = re.sub(r'[^a-zA-Z0-9_.@-]', '', txt)
return txt
def check_email(txt=''):
"""control that submitted emails are correct.
this little check is not very good, but better than nothing.
"""
r = re.compile(r'(.)+\@(.)+\.(.)+')
return r.match(txt) and 1 or 0
def send_account_activated_message(account_email, send_to, password,
ln=CFG_SITE_LANG):
"""Send account activated message.
Send an email to the address given by send_to about the new activated
account.
"""
_ = gettext_set_language(ln)
sub = _(
"Your account on '%(x_name)s' has been activated",
x_name=CFG_SITE_NAME)
body = _("Your account earlier created on '%(x_name)s' has "
"been activated:",
x_name=CFG_SITE_NAME) + '\n\n'
body += ' ' + _("Username/Email:") + " %s\n" % account_email
body += ' ' + _("Password:") + " %s\n" % ("*" * len(str(password)))
body += "\n---------------------------------"
body += "\n%s" % CFG_SITE_NAME
return send_email(CFG_SITE_SUPPORT_EMAIL, send_to, sub, body, header='')
def send_new_user_account_warning(new_account_email, send_to, password, ln=CFG_SITE_LANG):
"""Send email to new user account.
Send an email to the address given by send_to about the new account
new_account_email.
"""
_ = gettext_set_language(ln)
sub = _("Account created on '%(x_name)s'", x_name=CFG_SITE_NAME)
body = _("An account has been created for you on '%(x_name)s':",
x_name=CFG_SITE_NAME) + '\n\n'
body += ' ' + _("Username/Email:") + " %s\n" % new_account_email
body += ' ' + _("Password:") + " %s\n" % ("*" * len(str(password)))
body += "\n---------------------------------"
body += "\n%s" % CFG_SITE_NAME
return send_email(CFG_SITE_SUPPORT_EMAIL, send_to, sub, body, header='')
def send_account_rejected_message(new_account_email, send_to,
ln=CFG_SITE_LANG):
"""Send email of recjection.
Send an email to the address given by send_to about the new account
new_account_email.
"""
_ = gettext_set_language(ln)
sub = _("Account rejected on '%(x_name)s'", x_name=CFG_SITE_NAME)
body = _("Your request for an account has been rejected on '%(x_name)s':",
x_name=CFG_SITE_NAME) + '\n\n'
body += ' ' + \
_("Username/Email: %(x_email)s", x_email=new_account_email) + "\n"
body += "\n---------------------------------"
body += "\n%s" % CFG_SITE_NAME
return send_email(CFG_SITE_SUPPORT_EMAIL, send_to, sub, body, header='')
def send_account_deleted_message(new_account_email, send_to, ln=CFG_SITE_LANG):
"""Semd email of deletion.
Send an email to the address given by send_to about the new account
new_account_email.
"""
_ = gettext_set_language(ln)
sub = _("Account deleted on '%(x_name)s'", x_name=CFG_SITE_NAME)
body = _("Your account on '%(x_name)s' has been deleted:",
x_name=CFG_SITE_NAME) + '\n\n'
body += ' ' + _("Username/Email:") + " %s\n" % new_account_email
body += "\n---------------------------------"
body += "\n%s" % CFG_SITE_NAME
return send_email(CFG_SITE_SUPPORT_EMAIL, send_to, sub, body, header='')
|
ludmilamarian/invenio
|
invenio/modules/access/admin_lib.py
|
Python
|
gpl-2.0
| 154,307
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Protocol for object that have measurement keys."""
from typing import AbstractSet, Any, Dict, FrozenSet, Optional, Tuple, TYPE_CHECKING
from typing_extensions import Protocol
from cirq import value
from cirq._doc import doc_private
if TYPE_CHECKING:
import cirq
# This is a special indicator value used by the inverse method to determine
# whether or not the caller provided a 'default' argument.
RaiseTypeErrorIfNotProvided: Any = ([],)
class SupportsMeasurementKey(Protocol):
r"""An object that is a measurement and has a measurement key or keys.
Measurement keys are used in referencing the results of a measurement.
Users are free to implement one of the following. Do not implement multiple
of these returning different values. The protocol behavior will be
unexpected in such a case.
1. `_measurement_key_objs_` returning an iterable of `MeasurementKey`s
2. `_measurement_key_obj_` returning one `MeasurementKey`
3. `_measurement_key_names_` returning an iterable of strings
4. `_measurement_key_name_` returning one string
Note: Measurements, in contrast to general quantum channels, are
distinguished by the recording of the quantum operation that occurred.
That is a general quantum channel may enact the evolution
$$
\rho \rightarrow \sum_k A_k \rho A_k^\dagger
$$
where as a measurement enacts the evolution
$$
\rho \rightarrow A_k \rho A_k^\dagger
$$
conditional on the measurement outcome being $k$.
"""
@doc_private
def _is_measurement_(self) -> bool:
"""Return if this object is (or contains) a measurement."""
@doc_private
def _measurement_key_obj_(self) -> 'cirq.MeasurementKey':
"""Return the key object that will be used to identify this measurement.
When a measurement occurs, either on hardware, or in a simulation,
this is the key value under which the results of the measurement
will be stored.
"""
@doc_private
def _measurement_key_objs_(self) -> AbstractSet['cirq.MeasurementKey']:
"""Return the key objects for measurements performed by the receiving object.
When a measurement occurs, either on hardware, or in a simulation,
these are the key values under which the results of the measurements
will be stored.
"""
@doc_private
def _measurement_key_name_(self) -> str:
"""Return the string key that will be used to identify this measurement.
When a measurement occurs, either on hardware, or in a simulation,
this is the key value under which the results of the measurement
will be stored.
"""
@doc_private
def _measurement_key_names_(self) -> AbstractSet[str]:
"""Return the string keys for measurements performed by the receiving object.
When a measurement occurs, either on hardware, or in a simulation,
these are the key values under which the results of the measurements
will be stored.
"""
@doc_private
def _with_measurement_key_mapping_(self, key_map: Dict[str, str]):
"""Return a copy of this object with the measurement keys remapped.
This method allows measurement keys to be reassigned at runtime.
"""
def measurement_key_obj(val: Any, default: Any = RaiseTypeErrorIfNotProvided):
"""Get the single measurement key object for the given value.
Args:
val: The value which has one measurement key.
default: Determines the fallback behavior when `val` doesn't have
a measurement key. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned if the value
does not have `_measurement_key_name_`.
Returns:
If `val` has a `_measurement_key_obj[s]_` method and its result is not
`NotImplemented`, that result is returned. Otherwise, if a default
value was specified, the default value is returned.
Raises:
TypeError: `val` doesn't have a _measurement_key_obj[s]_ method (or that method
returned NotImplemented) and also no default value was specified.
ValueError: `val` has multiple measurement keys.
"""
result = measurement_key_objs(val)
if len(result) == 1:
return next(iter(result))
if len(result) > 1:
raise ValueError(f'Got multiple measurement keys ({result!r}) from {val!r}.')
if default is not RaiseTypeErrorIfNotProvided:
return default
raise TypeError(f"Object of type '{type(val)}' had no measurement keys.")
def measurement_key_name(val: Any, default: Any = RaiseTypeErrorIfNotProvided):
"""Get the single measurement key for the given value.
Args:
val: The value which has one measurement key.
default: Determines the fallback behavior when `val` doesn't have
a measurement key. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned if the value
does not have `_measurement_key_name_`.
Returns:
If `val` has a `_measurement_key_name_` method and its result is not
`NotImplemented`, that result is returned. Otherwise, if a default
value was specified, the default value is returned.
Raises:
TypeError: `val` doesn't have a _measurement_key_name_ method (or that method
returned NotImplemented) and also no default value was specified.
ValueError: `val` has multiple measurement keys.
"""
result = measurement_key_names(val)
if len(result) == 1:
return next(iter(result))
if len(result) > 1:
raise ValueError(f'Got multiple measurement keys ({result!r}) from {val!r}.')
if default is not RaiseTypeErrorIfNotProvided:
return default
raise TypeError(f"Object of type '{type(val)}' had no measurement keys.")
def _measurement_key_objs_from_magic_methods(
val: Any,
) -> Optional[AbstractSet['cirq.MeasurementKey']]:
"""Uses the measurement key related magic methods to get the `MeasurementKey`s for this
object."""
getter = getattr(val, '_measurement_key_objs_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented and result is not None:
return set(result)
getter = getattr(val, '_measurement_key_obj_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented and result is not None:
return {result}
return result
def _measurement_key_names_from_magic_methods(val: Any) -> Optional[AbstractSet[str]]:
"""Uses the measurement key related magic methods to get the key strings for this object."""
getter = getattr(val, '_measurement_key_names_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented and result is not None:
return set(result)
getter = getattr(val, '_measurement_key_name_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented and result is not None:
return {result}
return result
def measurement_key_objs(val: Any) -> AbstractSet['cirq.MeasurementKey']:
"""Gets the measurement key objects of measurements within the given value.
Args:
val: The value which has the measurement key.
Returns:
The measurement key objects of the value. If the value has no measurement,
the result is the empty set.
"""
result = _measurement_key_objs_from_magic_methods(val)
if result is not NotImplemented and result is not None:
return result
key_strings = _measurement_key_names_from_magic_methods(val)
if key_strings is not NotImplemented and key_strings is not None:
return {value.MeasurementKey.parse_serialized(key_str) for key_str in key_strings}
return set()
def measurement_key_names(val: Any) -> AbstractSet[str]:
"""Gets the measurement key strings of measurements within the given value.
Args:
val: The value which has the measurement key.
allow_decompose: Defaults to True. When true, composite operations that
don't directly specify their measurement keys will be decomposed in
order to find measurement keys within the decomposed operations. If
not set, composite operations will appear to have no measurement
keys. Used by internal methods to stop redundant decompositions from
being performed.
Returns:
The measurement keys of the value. If the value has no measurement,
the result is the empty set.
"""
result = _measurement_key_names_from_magic_methods(val)
if result is not NotImplemented and result is not None:
return result
key_objs = _measurement_key_objs_from_magic_methods(val)
if key_objs is not NotImplemented and key_objs is not None:
return {str(key_obj) for key_obj in key_objs}
return set()
def _is_measurement_from_magic_method(val: Any) -> Optional[bool]:
"""Uses `is_measurement` magic method to determine if this object is a measurement."""
getter = getattr(val, '_is_measurement_', None)
return NotImplemented if getter is None else getter()
def is_measurement(val: Any) -> bool:
"""Determines whether or not the given value is a measurement (or contains one).
Measurements are identified by the fact that any of them may have an `_is_measurement_` method
or `cirq.measurement_keys` returns a non-empty result for them.
Args:
val: The value which to evaluate.
allow_decompose: Defaults to True. When true, composite operations that
don't directly specify their `_is_measurement_` property will be decomposed in
order to find any measurements keys within the decomposed operations.
"""
result = _is_measurement_from_magic_method(val)
if isinstance(result, bool):
return result
keys = measurement_key_objs(val)
return keys is not NotImplemented and bool(keys)
def with_measurement_key_mapping(val: Any, key_map: Dict[str, str]):
"""Remaps the target's measurement keys according to the provided key_map.
This method can be used to reassign measurement keys at runtime, or to
assign measurement keys from a higher-level object (such as a Circuit).
"""
getter = getattr(val, '_with_measurement_key_mapping_', None)
return NotImplemented if getter is None else getter(key_map)
def with_key_path(val: Any, path: Tuple[str, ...]):
"""Adds the path to the target's measurement keys.
The path usually refers to an identifier or a list of identifiers from a subcircuit that
used to contain the target. Since a subcircuit can be repeated and reused, these paths help
differentiate the actual measurement keys.
"""
getter = getattr(val, '_with_key_path_', None)
return NotImplemented if getter is None else getter(path)
def with_key_path_prefix(val: Any, prefix: Tuple[str, ...]):
"""Prefixes the path to the target's measurement keys.
The path usually refers to an identifier or a list of identifiers from a subcircuit that
used to contain the target. Since a subcircuit can be repeated and reused, these paths help
differentiate the actual measurement keys.
Args:
val: The value whose path to prefix.
prefix: The prefix to apply to the value's path.
"""
getter = getattr(val, '_with_key_path_prefix_', None)
return NotImplemented if getter is None else getter(prefix)
def with_rescoped_keys(
val: Any,
path: Tuple[str, ...],
bindable_keys: FrozenSet['cirq.MeasurementKey'] = None,
):
"""Rescopes any measurement and control keys to the provided path, given the existing keys.
The path usually refers to an identifier or a list of identifiers from a subcircuit that
used to contain the target. Since a subcircuit can be repeated and reused, these paths help
differentiate the actual measurement keys.
This function is generally for internal use in decomposing or iterating subcircuits.
Args:
val: The value to rescope.
path: The prefix to apply to the value's path.
bindable_keys: The keys that can be bound to at the current scope.
"""
getter = getattr(val, '_with_rescoped_keys_', None)
result = NotImplemented if getter is None else getter(path, bindable_keys or frozenset())
return result if result is not NotImplemented else val
|
quantumlib/Cirq
|
cirq-core/cirq/protocols/measurement_key_protocol.py
|
Python
|
apache-2.0
| 13,195
|
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
default_app_config = 'pdc.apps.repository.apps.RepositoryConfig'
|
tzhaoredhat/automation
|
pdc/apps/repository/__init__.py
|
Python
|
mit
| 174
|
from graphene_sqlalchemy import SQLAlchemyObjectType
from graphene import relay
from .model import Audit
class AuditSchema(SQLAlchemyObjectType):
class Meta:
model = Audit
interfaces = (relay.Node, )
|
maschinendeck/frauk
|
backend/audit/schema.py
|
Python
|
mit
| 221
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 23:05:26 2015
@author: Wasit
"""
import random
y=-1
x=random.randint(0,32)
while x!=y:
y=int(raw_input("guess a number:"))
if y<x:
print y," is LESS than the corrected number"
if x<y:
print y," is LARGER than the corrected number"
print y," is correct!"
|
wasit7/tutorials
|
iddcdt_day2/01guess.py
|
Python
|
mit
| 334
|
# Copyright (c) 2010-2015 Advanced Micro Devices, Inc.
# All rights reserved.
#
# For use for simulation and test purposes only
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
from __future__ import print_function
import m5
from m5.objects import *
from m5.defines import buildEnv
from m5.util import addToPath
import os, optparse, sys
addToPath('../')
from common import Options
from ruby import Ruby
# Get paths we might need.
config_path = os.path.dirname(os.path.abspath(__file__))
config_root = os.path.dirname(config_path)
m5_root = os.path.dirname(config_root)
parser = optparse.OptionParser()
Options.addNoISAOptions(parser)
parser.add_option("--maxloads", metavar="N", default=100,
help="Stop after N loads")
parser.add_option("-f", "--wakeup_freq", metavar="N", default=10,
help="Wakeup every N cycles")
parser.add_option("-u", "--num-compute-units", type="int", default=1,
help="number of compute units in the GPU")
parser.add_option("--num-cp", type="int", default=0,
help="Number of GPU Command Processors (CP)")
# not super important now, but to avoid putting the number 4 everywhere, make
# it an option/knob
parser.add_option("--cu-per-sqc", type="int", default=4, help="number of CUs \
sharing an SQC (icache, and thus icache TLB)")
parser.add_option("--simds-per-cu", type="int", default=4, help="SIMD units" \
"per CU")
parser.add_option("--wf-size", type="int", default=64,
help="Wavefront size(in workitems)")
parser.add_option("--wfs-per-simd", type="int", default=10, help="Number of " \
"WF slots per SIMD")
#
# Add the ruby specific and protocol specific options
#
Ruby.define_options(parser)
execfile(os.path.join(config_root, "common", "Options.py"))
(options, args) = parser.parse_args()
#
# Set the default cache size and associativity to be very small to encourage
# races between requests and writebacks.
#
options.l1d_size="256B"
options.l1i_size="256B"
options.l2_size="512B"
options.l3_size="1kB"
options.l1d_assoc=2
options.l1i_assoc=2
options.l2_assoc=2
options.l3_assoc=2
# This file can support multiple compute units
assert(options.num_compute_units >= 1)
n_cu = options.num_compute_units
options.num_sqc = int((n_cu + options.cu_per_sqc - 1) / options.cu_per_sqc)
if args:
print("Error: script doesn't take any positional arguments")
sys.exit(1)
#
# Create the ruby random tester
#
# Check to for the GPU_RfO protocol. Other GPU protocols are non-SC and will
# not work with the Ruby random tester.
assert(buildEnv['PROTOCOL'] == 'GPU_RfO')
# The GPU_RfO protocol does not support cache flushes
check_flush = False
tester = RubyTester(check_flush=check_flush,
checks_to_complete=options.maxloads,
wakeup_frequency=options.wakeup_freq,
deadlock_threshold=1000000)
#
# Create the M5 system. Note that the Memory Object isn't
# actually used by the rubytester, but is included to support the
# M5 memory size == Ruby memory size checks
#
system = System(cpu=tester, mem_ranges=[AddrRange(options.mem_size)])
# Create a top-level voltage domain and clock domain
system.voltage_domain = VoltageDomain(voltage=options.sys_voltage)
system.clk_domain = SrcClockDomain(clock=options.sys_clock,
voltage_domain=system.voltage_domain)
Ruby.create_system(options, False, system)
# Create a seperate clock domain for Ruby
system.ruby.clk_domain = SrcClockDomain(clock=options.ruby_clock,
voltage_domain=system.voltage_domain)
tester.num_cpus = len(system.ruby._cpu_ports)
#
# The tester is most effective when randomization is turned on and
# artifical delay is randomly inserted on messages
#
system.ruby.randomization = True
for ruby_port in system.ruby._cpu_ports:
#
# Tie the ruby tester ports to the ruby cpu read and write ports
#
if ruby_port.support_data_reqs and ruby_port.support_inst_reqs:
tester.cpuInstDataPort = ruby_port.slave
elif ruby_port.support_data_reqs:
tester.cpuDataPort = ruby_port.slave
elif ruby_port.support_inst_reqs:
tester.cpuInstPort = ruby_port.slave
# Do not automatically retry stalled Ruby requests
ruby_port.no_retry_on_stall = True
#
# Tell each sequencer this is the ruby tester so that it
# copies the subblock back to the checker
#
ruby_port.using_ruby_tester = True
# -----------------------
# run simulation
# -----------------------
root = Root( full_system = False, system = system )
root.system.mem_mode = 'timing'
# Not much point in this being higher than the L1 latency
m5.ticks.setGlobalFrequency('1ns')
# instantiate configuration
m5.instantiate()
# simulate until program terminates
exit_event = m5.simulate(options.abs_max_tick)
print('Exiting @ tick', m5.curTick(), 'because', exit_event.getCause())
|
vineodd/PIMSim
|
GEM5Simulation/gem5/configs/example/ruby_gpu_random_test.py
|
Python
|
gpl-3.0
| 6,436
|
# Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Transpose convolutional module."""
from typing import Optional, Sequence, Union
import numpy as np
from sonnet.src import base
from sonnet.src import initializers
from sonnet.src import once
from sonnet.src import types
from sonnet.src import utils
import tensorflow as tf
def smart_concat(v1, v2):
if isinstance(v1, tf.Tensor) or isinstance(v2, tf.Tensor):
return tf.concat([v1, v2], 0)
else:
return v1 + v2
def smart_lambda(func, v1, v2):
if isinstance(v1, tf.Tensor) or isinstance(v2, tf.Tensor):
return func(v1, v2)
else:
return [func(x, y) for (x, y) in zip(v1, v2)]
class ConvNDTranspose(base.Module):
"""An N-dimensional transpose convolutional module.
Attributes:
w: Weight variable. Note is `None` until module is connected.
b: Biases variable. Note is `None` until module is connected.
input_shape: The input shape of the first set of inputs. Note is `None`
until module is connected.
"""
def __init__(self,
num_spatial_dims: int,
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
output_shape: Optional[types.ShapeLike] = None,
stride: Union[int, Sequence[int]] = 1,
rate: Union[int, Sequence[int]] = 1,
padding: str = "SAME",
with_bias: bool = True,
w_init: Optional[initializers.Initializer] = None,
b_init: Optional[initializers.Initializer] = None,
data_format: Optional[str] = None,
name: Optional[str] = None):
"""Constructs a `ConvNDTranspose` module.
Args:
num_spatial_dims: Number of spatial dimensions of the input.
output_channels: Number of output channels.
kernel_shape: Sequence of integers (of length num_spatial_dims), or an
integer representing kernel shape. `kernel_shape` will be expanded to
define a kernel size in all dimensions.
output_shape: Output shape of the spatial dimensions of a transpose
convolution. Can be either an iterable of integers or a
`TensorShape` of length `num_spatial_dims`. If a `None` value is given,
a default shape is automatically calculated.
stride: Sequence of integers (of length num_spatial_dims), or an integer.
`stride` will be expanded to define stride in all dimensions.
rate: Sequence of integers (of length num_spatial_dims), or integer that
is used to define dilation rate in all dimensions. 1 corresponds to
standard ND convolution, `rate > 1` corresponds to dilated convolution.
padding: Padding algorithm, either "SAME" or "VALID".
with_bias: Boolean, whether to include bias parameters. Default `True`.
w_init: Optional initializer for the weights. By default the weights are
initialized truncated random normal values with a standard deviation of
`1 / sqrt(input_feature_size)`, which is commonly used when the
inputs are zero centered (see https://arxiv.org/abs/1502.03167v3).
b_init: Optional initializer for the bias. By default the bias is
initialized to zero.
data_format: The data format of the input.
name: Name of the module.
"""
super().__init__(name=name)
if not 1 <= num_spatial_dims <= 3:
raise ValueError(
"We only support transpose convolution operations for "
"num_spatial_dims=1, 2 or 3, received num_spatial_dims={}.".format(
num_spatial_dims))
self._num_spatial_dims = num_spatial_dims
self._output_channels = output_channels
self._kernel_shape = kernel_shape
self._output_shape = output_shape
self._stride = stride
self._rate = rate
if padding == "SAME" or padding == "VALID":
self._padding = padding
else:
raise TypeError("ConvNDTranspose only takes string padding, please "
"provide either `SAME` or `VALID`.")
self._data_format = data_format
self._channel_index = utils.get_channel_index(data_format)
self._with_bias = with_bias
self._w_init = w_init
if with_bias:
self._b_init = b_init if b_init is not None else initializers.Zeros()
elif b_init is not None:
raise ValueError("When not using a bias the b_init must be None.")
def __call__(self, inputs):
self._initialize(inputs)
if self._output_shape is None:
output_shape = self._get_output_shape(inputs)
if self._channel_index == 1:
output_shape = smart_concat([self._output_channels], output_shape)
else:
output_shape = smart_concat(output_shape, [self._output_channels])
else:
output_shape = self._output_shape
output_shape = smart_concat([tf.shape(inputs)[0]], output_shape)
outputs = tf.nn.conv_transpose(
input=inputs,
filters=self.w,
output_shape=output_shape,
strides=self._stride,
padding=self._padding,
data_format=self._data_format,
dilations=self._rate,
name=None)
if self._with_bias:
outputs = tf.nn.bias_add(outputs, self.b, data_format=self._data_format)
return outputs
@once.once
def _initialize(self, inputs):
utils.assert_rank(inputs, self._num_spatial_dims + 2)
self.input_channels = inputs.shape[self._channel_index]
if self.input_channels is None:
raise ValueError("The number of input channels must be known")
self._dtype = inputs.dtype
if self._output_shape is not None:
if len(self._output_shape) != self._num_spatial_dims:
raise ValueError(
"The output_shape must be of length {} but instead was {}.".format(
self._num_spatial_dims, len(self._output_shape)))
if self._channel_index == 1:
self._output_shape = [self._output_channels] + list(self._output_shape)
else:
self._output_shape = list(self._output_shape) + [self._output_channels]
self.w = self._make_w()
if self._with_bias:
self.b = tf.Variable(
self._b_init((self._output_channels,), self._dtype), name="b")
def _make_w(self):
"""Makes and returns the variable representing the weight."""
kernel_shape = utils.replicate(self._kernel_shape, self._num_spatial_dims,
"kernel_shape")
weight_shape = kernel_shape + (self._output_channels, self.input_channels)
if self._w_init is None:
# See https://arxiv.org/abs/1502.03167v3.
fan_in_shape = kernel_shape + (self.input_channels,)
stddev = 1 / np.sqrt(np.prod(fan_in_shape))
self._w_init = initializers.TruncatedNormal(stddev=stddev)
return tf.Variable(self._w_init(weight_shape, self._dtype), name="w")
def _get_output_shape(self, inputs):
input_shape = inputs.shape if inputs.shape.is_fully_defined() else tf.shape(
inputs)
if self._channel_index == 1:
input_size = input_shape[2:]
else:
input_size = input_shape[1:-1]
stride = utils.replicate(self._stride, self._num_spatial_dims, "stride")
output_shape = smart_lambda(lambda x, y: x * y, input_size, stride)
if self._padding == "VALID":
kernel_shape = utils.replicate(self._kernel_shape, self._num_spatial_dims,
"kernel_shape")
rate = utils.replicate(self._rate, self._num_spatial_dims, "rate")
effective_kernel_shape = [
(shape - 1) * rate + 1 for (shape, rate) in zip(kernel_shape, rate)
]
output_shape = smart_lambda(lambda x, y: x + y - 1, output_shape,
effective_kernel_shape)
return output_shape
class Conv1DTranspose(ConvNDTranspose):
"""A 1D transpose convolutional module."""
def __init__(self,
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
output_shape: Optional[types.ShapeLike] = None,
stride: Union[int, Sequence[int]] = 1,
rate: Union[int, Sequence[int]] = 1,
padding: str = "SAME",
with_bias: bool = True,
w_init: Optional[initializers.Initializer] = None,
b_init: Optional[initializers.Initializer] = None,
data_format: str = "NWC",
name: Optional[str] = None):
"""Constructs a `Conv1DTranspose` module.
Args:
output_channels: Number of output channels.
kernel_shape: Sequence of integers (of length 1), or an integer
representing kernel shape. `kernel_shape` will be expanded to define a
kernel size in all dimensions.
output_shape: Output shape of the spatial dimensions of a transpose
convolution. Can be either an integer or an iterable of integers or
`Dimension`s, or a `TensorShape` (of length 1). If a `None` value is
given, a default shape is automatically calculated.
stride: Sequence of integers (of length 1), or an integer. `stride` will
be expanded to define stride in all dimensions.
rate: Sequence of integers (of length 1), or integer that is used to
define dilation rate in all dimensions. 1 corresponds to standard 1D
convolution, `rate > 1` corresponds to dilated convolution.
padding: Padding algorithm, either "SAME" or "VALID".
with_bias: Boolean, whether to include bias parameters. Default `True`.
w_init: Optional initializer for the weights. By default the weights are
initialized truncated random normal values with a standard deviation of
`1 / sqrt(input_feature_size)`, which is commonly used when the
inputs are zero centered (see https://arxiv.org/abs/1502.03167v3).
b_init: Optional initializer for the bias. By default the bias is
initialized to zero.
data_format: The data format of the input.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=1,
output_channels=output_channels,
kernel_shape=kernel_shape,
output_shape=output_shape,
stride=stride,
rate=rate,
padding=padding,
with_bias=with_bias,
w_init=w_init,
b_init=b_init,
data_format=data_format,
name=name)
class Conv2DTranspose(ConvNDTranspose):
"""A 2D transpose convolutional module."""
def __init__(self,
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
output_shape: Optional[types.ShapeLike] = None,
stride: Union[int, Sequence[int]] = 1,
rate: Union[int, Sequence[int]] = 1,
padding: str = "SAME",
with_bias: bool = True,
w_init: Optional[initializers.Initializer] = None,
b_init: Optional[initializers.Initializer] = None,
data_format: str = "NHWC",
name: Optional[str] = None):
"""Constructs a `Conv2DTranspose` module.
Args:
output_channels: An integer, The number of output channels.
kernel_shape: Sequence of integers (of length 2), or an integer
representing kernel shape. `kernel_shape` will be expanded to define a
kernel size in all dimensions.
output_shape: Output shape of the spatial dimensions of a transpose
convolution. Can be either an integer or an iterable of integers or
`Dimension`s, or a `TensorShape` (of length 2). If a `None` value is
given, a default shape is automatically calculated.
stride: Sequence of integers (of length 2), or an integer. `stride` will
be expanded to define stride in all dimensions.
rate: Sequence of integers (of length 2), or integer that is used to
define dilation rate in all dimensions. 1 corresponds to standard 2D
convolution, `rate > 1` corresponds to dilated convolution.
padding: Padding algorithm, either "SAME" or "VALID".
with_bias: Boolean, whether to include bias parameters. Default `True`.
w_init: Optional initializer for the weights. By default the weights are
initialized truncated random normal values with a standard deviation of
`1 / sqrt(input_feature_size)`, which is commonly used when the
inputs are zero centered (see https://arxiv.org/abs/1502.03167v3).
b_init: Optional initializer for the bias. By default the bias is
initialized to zero.
data_format: The data format of the input.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=2,
output_channels=output_channels,
kernel_shape=kernel_shape,
output_shape=output_shape,
stride=stride,
rate=rate,
padding=padding,
with_bias=with_bias,
w_init=w_init,
b_init=b_init,
data_format=data_format,
name=name)
class Conv3DTranspose(ConvNDTranspose):
"""A 3D transpose convolutional module."""
def __init__(self,
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
output_shape: Optional[types.ShapeLike] = None,
stride: Union[int, Sequence[int]] = 1,
rate: Union[int, Sequence[int]] = 1,
padding: str = "SAME",
with_bias: bool = True,
w_init: Optional[initializers.Initializer] = None,
b_init: Optional[initializers.Initializer] = None,
data_format: str = "NDHWC",
name: Optional[str] = None):
"""Constructs a `Conv3DTranspose` module.
Args:
output_channels: An integer, The number of output channels.
kernel_shape: Sequence of integers (of length 3), or an integer
representing kernel shape. `kernel_shape` will be expanded to define a
kernel size in all dimensions.
output_shape: Output shape of the spatial dimensions of a transpose
convolution. Can be either an integer or an iterable of integers or
`Dimension`s, or a `TensorShape` (of length 3). If a None value is
given, a default shape is automatically calculated.
stride: Sequence of integers (of length 3), or an integer. `stride` will
be expanded to define stride in all dimensions.
rate: Sequence of integers (of length 3), or integer that is used to
define dilation rate in all dimensions. 1 corresponds to standard 3D
convolution, `rate > 1` corresponds to dilated convolution.
padding: Padding algorithm, either "SAME" or "VALID".
with_bias: Boolean, whether to include bias parameters. Default `True`.
w_init: Optional initializer for the weights. By default the weights are
initialized truncated random normal values with a standard deviation of
`1 / sqrt(input_feature_size)`, which is commonly used when the
inputs are zero centered (see https://arxiv.org/abs/1502.03167v3).
b_init: Optional initializer for the bias. By default the bias is
initialized to zero.
data_format: The data format of the input.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=3,
output_channels=output_channels,
kernel_shape=kernel_shape,
output_shape=output_shape,
stride=stride,
rate=rate,
padding=padding,
with_bias=with_bias,
w_init=w_init,
b_init=b_init,
data_format=data_format,
name=name)
|
deepmind/sonnet
|
sonnet/src/conv_transpose.py
|
Python
|
apache-2.0
| 16,137
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def printHelloWorld():
print 'Hello World'
|
pdehaye/edx-presenter
|
presentations/skeleton/src/hello.py
|
Python
|
agpl-3.0
| 92
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('master', '0028_localesnulos'),
('imports', '0006_ventacompleta'),
]
operations = [
migrations.AddField(
model_name='ventacompleta',
name='periodo',
field=models.ForeignKey(default=b'', to='master.Periodo'),
preserve_default=True,
),
]
|
Teino1978-Corp/Teino1978-Corp-light_.gitignore
|
light_imports_migrations_0007_ventacompleta_periodo.py
|
Python
|
mit
| 499
|
import sys
import signal
import serial
import threading
import time
import RPi.GPIO as GPIO
import Adafruit_CharLCD as LCD
from screens import SCREENS
class Ammonia(object):
"""Class to interact with EC, ORP, and TEMP probes and an HD44780 character LCD display."""
# debounce time
DEBOUNCE = 0.1
# channel selection pins
A_PIN = 16
B_PIN = 18
# plate buttons
BUTTONS = (LCD.LEFT, LCD.RIGHT, LCD.UP, LCD.DOWN, LCD.SELECT)
# probes channels
EC_CHANNEL = 1
ORP_CHANNEL = 2
TEMP_CHANNEL = 3
# initialization screen
INIT_SCREEN = 'welcome'
# custom characters
RIGHT_ARROW_CHAR = 0
RIGHT_ARROW_CHAR_BITMAP = (
0b00000000,
0b00001000,
0b00001100,
0b00001110,
0b00001100,
0b00001000,
0b00000000,
0b00000000
)
DOUBLE_ARROW_CHAR = 1
DOUBLE_ARROW_CHAR_BITMAP = [
0b00000100,
0b00001110,
0b00011111,
0b00000000,
0b00011111,
0b00001110,
0b00000100,
0b00000000
]
def __init__(self):
self._setup_serial()
self._setup_LCD()
self._setup_GPIO()
self.current_screen_daemon = None
self.daemon_should_run = False
prev_keys = ['%s_prev' % x for x in self.BUTTONS]
time_keys = ['%s_time' % x for x in self.BUTTONS]
self.inputs_state = dict(zip(prev_keys + time_keys, [GPIO.HIGH]*5 + [0]*5))
def _setup_serial(self):
self.serial = serial.Serial('/dev/ttyAMA0', 38400)
def _setup_LCD(self):
self.lcd = LCD.Adafruit_CharLCDPlate()
self._create_custom_char(self.RIGHT_ARROW_CHAR, self.RIGHT_ARROW_CHAR_BITMAP)
self._create_custom_char(self.DOUBLE_ARROW_CHAR, self.DOUBLE_ARROW_CHAR_BITMAP)
self.lcd.clear()
def _setup_GPIO(self):
GPIO.setmode(GPIO.BOARD)
# channel selector pins
GPIO.setup(self.A_PIN, GPIO.OUT)
GPIO.setup(self.B_PIN, GPIO.OUT)
def _create_custom_char(self, location, bitmap):
self.lcd.write8(LCD.LCD_SETCGRAMADDR | ((location & 7) << 3), False)
for line in bitmap:
self.lcd.write8(line, True)
self.lcd.write8(LCD.LCD_SETDDRAMADDR, False)
def read_message(self):
message = ''
data = self.serial.read()
while data != "\r":
message = message + data
data = self.serial.read()
return message
def select_channel(self, number):
number = number - 1
b_value = number % 2
number = number / 2
a_value = number % 2
GPIO.output(self.A_PIN, GPIO.LOW if a_value == 0 else GPIO.HIGH)
GPIO.output(self.B_PIN, GPIO.LOW if b_value == 0 else GPIO.HIGH)
def _call_method(self, name, args=()):
return getattr(self, '_%s' % name)(*args)
def _get_screen_class(self, target):
return SCREENS[target]
def _handle_input(self):
for button in self.current_screen_instance.buttons():
curr_key = '%s_curr' % button
prev_key = '%s_prev' % button
time_key = '%s_time' % button
self.inputs_state[curr_key] = self.lcd._mcp.input(button)
# if the switch changed, due to bounce or pressing...
if self.inputs_state[curr_key] != self.inputs_state[prev_key]:
# reset the debouncing timer
self.inputs_state[time_key] = time.time()
if time.time() - self.inputs_state[time_key] > self.DEBOUNCE:
# whatever the switch is at, its been there for a long time so
# lets settle on it!
if self.lcd.is_pressed(button):
action = self.current_screen_instance.action(button)
self._call_method(action['method'], action['args'])
self.inputs_state[prev_key] = self.inputs_state[curr_key]
def _transition_to(self, target):
# wait for current thread to join
if self.current_screen_daemon:
self.daemon_should_run = False
self.lcd.clear()
self.lcd.message("Please wait...")
self.current_screen_daemon.join(10)
# create a new instance of target screen class
self.current_screen = target
TargetScreenClass = self._get_screen_class(target)
self.current_screen_instance = TargetScreenClass(self)
self.current_screen_instance.screen_init()
# start update thread if screen_update method is defined
target_method = getattr(self.current_screen_instance, 'screen_update', False)
if target_method:
self.daemon_should_run = True
self.current_screen_daemon = threading.Thread(target=target_method)
self.current_screen_daemon.daemon = True
self.current_screen_daemon.start()
def _transition_to_item(self):
target = self.current_screen_instance.current_item_name()
self._transition_to(target)
def _select_next_item(self):
self.current_screen_instance.select_next_item()
def _select_prev_item(self):
self.current_screen_instance.select_prev_item()
def _calibrate_selected_probe(self):
# TODO
pass
def start(self):
self._transition_to(self.INIT_SCREEN)
time_stamp = time.time()
while True:
self._handle_input()
def signal_handler(signal, frame):
print 'Bye'
GPIO.cleanup()
sys.exit(0)
if __name__ == '__main__':
print 'Ammonia RPI'
# register custom handler
signal.signal(signal.SIGINT, signal_handler)
ammonia = Ammonia()
ammonia.start()
|
potomak/ammonia-rpi
|
ammonia.py
|
Python
|
mit
| 5,697
|
# pylint: disable=C0301
import decimal
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from django.contrib.auth import authenticate, login, logout
from mock import Mock
from ..middleware import ActiveSubscriptionMiddleware, URLS
from ..models import Customer, CurrentSubscription
from ..utils import get_user_model
class DummySession(dict):
def cycle_key(self):
return
def flush(self):
return
class ActiveSubscriptionMiddlewareTests(TestCase):
def setUp(self):
self.middleware = ActiveSubscriptionMiddleware()
self.request = Mock()
self.request.session = DummySession()
user = get_user_model().objects.create_user(username="patrick")
user.set_password("eldarion")
user.save()
user = authenticate(username="patrick", password="eldarion")
login(self.request, user)
def test_authed_user_with_no_customer_redirects_on_non_exempt_url(self):
self.request.path = "/the/app/"
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response._headers["location"][1], # pylint: disable=W0212
reverse(settings.SUBSCRIPTION_REQUIRED_REDIRECT)
)
def test_authed_user_with_no_customer_passes_with_exempt_url(self):
URLS.append("/accounts/signup/")
self.request.path = "/accounts/signup/"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
def test_authed_user_with_no_active_subscription_passes_with_exempt_url(self):
Customer.objects.create(stripe_id="cus_1", user=self.request.user)
URLS.append("/accounts/signup/")
self.request.path = "/accounts/signup/"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
def test_authed_user_with_no_active_subscription_redirects_on_non_exempt_url(self):
Customer.objects.create(stripe_id="cus_1", user=self.request.user)
URLS.append("/accounts/signup/")
self.request.path = "/the/app/"
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response._headers["location"][1], # pylint: disable=W0212
reverse(settings.SUBSCRIPTION_REQUIRED_REDIRECT)
)
def test_authed_user_with_active_subscription_redirects_on_non_exempt_url(self):
customer = Customer.objects.create(
stripe_id="cus_1",
user=self.request.user
)
CurrentSubscription.objects.create(
customer=customer,
plan="pro",
quantity=1,
start=timezone.now(),
status="active",
cancel_at_period_end=False,
amount=decimal.Decimal("19.99")
)
URLS.append("/accounts/signup/")
self.request.path = "/the/app/"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
def test_unauthed_user_passes(self):
logout(self.request)
URLS.append("/accounts/signup/")
self.request.path = "/the/app/"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
def test_staff_user_passes(self):
self.request.user.is_staff = True
URLS.append("/accounts/signup/")
self.request.path = "/the/app/"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
|
wahuneke/django-stripe-payments
|
payments/tests/test_middleware.py
|
Python
|
bsd-3-clause
| 3,692
|
import os
import re
import shutil
import subprocess
import sys
import unittest
from types import SimpleNamespace as Empty
import flextGLgen
# https://stackoverflow.com/a/12867228
_camelcase_to_snakecase = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
# The test files are automatically detected from derived class name and
# filesystem location. For a `test_generate.VkRelease` class, it will look
# for the `generate_vk_release` directory. If the class name is equivalent to
# the filename (e.g. `test_generate.Generate`), then it will be looking for
# just `page` instead of `page_page`. If needed, the directory name can be
# overriden by passing it via dir to __init__().
class BaseTestCase(unittest.TestCase):
def __init__(self, *args, dir=None, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
# Get the test filename from the derived class module file. If path is
# not supplied, get it from derived class name converted to snake_case
path = sys.modules[self.__class__.__module__].__file__
if not dir: dir = _camelcase_to_snakecase.sub('_\\1', self.__class__.__name__).lower()
# Full directory name (for test_something.py the directory is
# something_{dir}
dir_prefix = os.path.splitext(os.path.basename(path))[0][5:]
if dir and dir_prefix != dir:
dirname = dir_prefix + '_' + dir
else:
dirname = dir_prefix
# Absolute path to this directory
self.path = os.path.join(os.path.dirname(os.path.realpath(path)), dirname)
self.cwd = os.path.dirname(self.path)
self.root = os.path.dirname(self.cwd)
if not os.path.exists(self.path):
raise AssertionError("autodetected path {} doesn't exist".format(self.path))
# Display ALL THE DIFFS
self.maxDiff = None
def setUp(self):
if os.path.exists(os.path.join(self.path, 'generated')):
shutil.rmtree(os.path.join(self.path, 'generated'))
def run_flextglgen(self, template_dir=None):
args = Empty()
args.download = False
args.outdir = os.path.join(self.path, 'generated')
args.template_dir = template_dir if template_dir else self.path
flextGLgen.main('-D generated -t somepath profile.txt', args, os.path.join(self.path, 'profile.txt'))
def actual_expected_contents(self, actual, expected=None, replace=None):
if not expected: expected = actual
with open(os.path.join(self.path, expected)) as f:
expected_contents = f.read().strip()
with open(os.path.join(self.path, 'generated', actual)) as f:
actual_contents = f.read().strip()
# Not replacing expected_contents, there it should be done already.
# That also prevents accidents of replacing something unwanted.
if replace: actual_contents = re.sub(replace[0], replace[1], actual_contents)
return actual_contents, expected_contents
class Generate(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextGL.h'))
class Es(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextGL.h'))
class EsExtraspec(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextGL.h'))
class Vk(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h',
replace=('#define VK_HEADER_VERSION \d+', '#define VK_HEADER_VERSION 00')))
class VkRelease(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen(template_dir=os.path.join(self.cwd, 'generate_vk'))
self.assertTrue(os.path.exists(os.path.join(self.root, "spec/vk.v1.1.73.xml")))
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class VkDuplicateExtensionInteraction(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class VkExtendEmptyFlagBits(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class VkExtensionInteractionReorder(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class VkPromotedEnum(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h',
replace=('#define VK_HEADER_VERSION \d+', '#define VK_HEADER_VERSION 00')))
class VkDuplicateEnum(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h',
replace=('#define VK_HEADER_VERSION \d+', '#define VK_HEADER_VERSION 00')))
class VkEnumAliasWithDependency(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class VkEnumAliasWithoutDependency(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test(self):
self.run_flextglgen()
self.assertEqual(*self.actual_expected_contents('flextVk.h'))
class NotADirectory(BaseTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, dir='generate', **kwargs)
def test_template(self):
with self.assertRaises(SystemExit):
args = Empty()
args.download = False
args.outdir = os.path.join(self.path, 'generated')
args.template_dir = os.path.join(self.cwd, 'nonexistent')
flextGLgen.main('', args, os.path.join(self.cwd, 'profile.txt'))
def test_out(self):
with self.assertRaises(SystemExit):
args = Empty()
args.download = False
args.outdir = os.path.join(self.cwd, 'profile.txt')
args.template_dir = os.path.join(self.root, 'templates')
flextGLgen.main('', args, os.path.join(self.cwd, 'profile.txt'))
|
mosra/flextgl
|
test/test_generate.py
|
Python
|
mit
| 7,126
|
# coding=utf-8
from threading import Lock
import time
class Resource:
def __init__(self, name = ""):
self.lock = Lock()
self.name = name
self.owner = None
def __str__(self):
return self.name + ((" (currently owned by <%s>)" % self.owner) if self.owner else " (not currently owned)")
def __enter__(self):
"""
Entering a 'resource' block *release* the lock, which may seem counter-intuitive.
It is meant to used inside an action that lock the resource, to temporarly transfer the
lock ownership to a sub-action:
For instance:
.. code-block::python
@action
@lock(WHEELS)
def move(...):
...
@action
@lock(WHEELS)
def goto(...):
with WHEELS:
move(...)
Here, ``goto()`` calls ``move()`` by first releasing the lock on
``WHEELS``, executing ``move()`` and reacquiring the lock, also if
``move()`` raises an exception.
"""
self.release()
def __exit__(self, exc_type, exc_value, traceback):
self.acquire()
# here, the exception, if any, is automatically propagated
def acquire(self, wait = True, acquirer = "unknown"):
if not wait:
if self.lock.acquire(False):
self.owner = acquirer
return True
else:
return False
else:
# we need an active wait to make sure we can properly cancel the actions
# that are waiting for the resource
while True:
if self.lock.acquire(False):
self.owner = acquirer
return True
time.sleep(0.1)
def release(self):
self.lock.release()
self.owner = None
class CompoundResource:
def __init__(self, *args, **kwargs):
self.resources = args
self.name = kwargs.get("name", "")
self.owner = None
def __str__(self):
return self.name + ((" (currently owned by <%s>)" % self.owner) if self.owner else " (not currently owned)")
def __enter__(self):
""" cf doc of Resource.__enter__.
"""
self.release()
def __exit__(self, exc_type, exc_value, traceback):
""" cf doc of Resource.__exit__.
"""
self.acquire()
# here, the exception, if any, is automatically propagated
def acquire(self, wait = True, acquirer = "unknown"):
ok = True
for res in self.resources:
ok = res.acquire(wait, acquirer) and ok
if not ok:
return False
self.owner = acquirer
def release(self):
for res in self.resources:
res.release()
self.owner = None
|
chili-epfl/pyrobots
|
src/robots/resources/resources.py
|
Python
|
isc
| 2,851
|
from credentials_hana import db_HOST, db_PORT, db_USER, db_PASSWORD
import pyhdb
class DatabaseConnection:
def __init__(self, queryName):
self.path = queryName
self._connector = None
def __str__(self):
return str(self.path)
def __eq__(self, name):
if type(name) == str:
return self.__str__ == name
else:
print str(type(name))
return self.__dict__ == name.__dict__
@property
def connector(self):
if self._connector == None:
connector = pyhdb.connect(
host = db_HOST,
port = db_PORT,
user = db_USER,
password = db_PASSWORD)
self._connector = connector
return self._connector
|
mockingbird2/EPIC-ness
|
flask_app/database_connection.py
|
Python
|
mit
| 627
|
# Copyright (C) 2011 Aaron Lindsay <aaron@aclindsay.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sqlite3
import logging
RETRIES=10
def cursor_generator(cursor):
results = cursor.fetchmany()
while results:
for result in results:
yield result
results = cursor.fetchmany()
class Database:
def __init__(self):
self.connect()
self.ensure_installed()
def connect(self):
self.conn = sqlite3.connect("asink.db", isolation_level = None)
def execute(self, query, args=()):
for i in range(RETRIES):
try:
cursor = self.conn.execute(query, args)
self.commit()
return cursor_generator(self.cursor)
except sqlite3.OperationalError:
if i is RETRIES-1:
raise
logging.error("sqlite3.OperationalError while running query:\n"+
query+" with args "+str(args))
self.rollback()
self.conn.interrupt()
def lastrowid(self):
return self.cursor.lastrowid
def commit(self):
self.conn.commit()
def rollback(self):
self.conn.rollback()
def close(self):
self.conn.close()
def ensure_installed(self):
cursor = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='events';")
if cursor.fetchone() is None:
self.execute("""CREATE TABLE events (
rev INTEGER PRIMARY KEY,
user INTEGER,
type INTEGER,
hash TEXT,
localpath TEXT,
modified INTEGER,
storagekey TEXT,
permissions INTEGER)""")
#make index on rev and localpath
self.execute("CREATE INDEX IF NOT EXISTS revidx on events (rev)")
self.execute("CREATE INDEX IF NOT EXISTS pathidx on events (localpath)")
|
aclindsa/asink-python
|
src/server/database.py
|
Python
|
gpl-2.0
| 2,596
|
'''
:module: requests (pyCurl based Requests)
a lightweight small footprint interface to pyCurl provides the base for twtCurl
.. Warning:: although classes defined here can possibly be used for generic http(s)
requests those have only been tested for requests
to twitter REST and streaming API
'''
import simplejson
import pycurl
import urllib
import urlparse
import logging
from datetime import datetime
from twtPyCurl import __version__, path
from twtPyCurl.py.utilities import (dict_encode, DotDot, seconds_to_DHMS, format_header)
from twtPyCurl.py.oauth import OAuth1, OAuth2
LOG = logging.getLogger(__name__)
# LOG.addHandler(logging.NullHandler())
LOG.debug("loading module: " + __name__)
class ErrorRq(Exception):
"""Exceptions base"""
class ErrorRqMissingKeys(ErrorRq):
pass
class ErrorRqCredentialsNotValid(ErrorRq):
pass
class ErrorRqHttp(ErrorRq):
"""HTTP error"""
def __init__(self, http_code, msg=''):
super(ErrorRqHttp, self).__init__(http_code, msg)
class ErrorRqCurl(ErrorRq):
"""Exceptions raised by Curl"""
def __init__(self, err_number, msg):
super(ErrorRqCurl, self).__init__(err_number, msg)
class CredentialsProvider(object):
"""Generic oAuth credentials provider class
"""
appl_keys = ['id_appl', 'consumer_access_token']
user_keys = ['id_user', 'user_name', 'consumer_key', 'consumer_secret',
'access_token_key', 'access_token_secret']
@classmethod
def get_credentials(cls, id_appl, id_user=None):
"""must return a dictionary with all appl_keys and user_keys
classes inherited from this base class must implement this method
"""
raise NotImplementedError
@classmethod
def on_revoke_credentials(self, appl_id, user_id):
"""inherited classes should handle this to inform the application """
raise NotImplementedError
@classmethod
def validate(self, credentials_dict):
"""validates credentials dictionary against missing keys
"""
lsr_cr_keys = list(credentials_dict.keys())
rt = [i for i in self.appl_keys if i not in lsr_cr_keys]
if 'id_user' in lsr_cr_keys: # it is user credentials
rt.extend([i for i in self.user_keys if i not in lsr_cr_keys])
if rt:
raise ErrorRqMissingKeys("missing keys: {}".format(",".join(rt)))
return credentials_dict
class CredentialsProviderFile(CredentialsProvider):
"""simple file based credentials provider reads credentials from the contents of a json file
.. seealso::
- a sample file with user credentials at: twt_data/sample_credentials_user.json
- a sample file with application credentials at: twt_data/sample_credentials_application.json
"""
def __call__(self, *args, **kwargs):
rt = self.get_credentials(*args, **kwargs)
return self.validate(rt)
@classmethod
def get_credentials(cls, file_path=None):
"""
:param str file_path: full path name to a file, defaults to credentials.json in user's home directory
:returns: a validated credentials dictionary
:raises: IOError on file error
"""
if file_path is None: # defaults to credentials.json in home directory
file_path = "{}/credentials.json".format(path.expanduser("~"))
with open(file_path, "r") as fin:
try:
crd_dict = simplejson.load(fin)
except IOError:
raise
return cls.validate(crd_dict)
class Credentials(object):
"""stores OAuth1 or OAuth2 credentials and provides OAuth headers
"""
def __init__(self, **kwargs):
kwargs = DotDot(kwargs)
self.id_user = kwargs.get('id_user') # defaults to None (application credentials)
self.user_name = kwargs.get('user_name', self.id_user) # defaults to id_user
self.id_appl = kwargs.id_appl # required
if len(list(kwargs.keys())) > 1:
if self.id_user is None:
self._is_appl = True
self.OAuth = OAuth2(**kwargs)
else:
self._is_appl = False
self.OAuth = OAuth1(**kwargs)
self._id = (self.id_appl, self.id_user)
self._id_str = "{}/{}".format(*self._id)
else:
raise ErrorRqCredentialsNotValid()
def is_appl(self):
"""
:returns: Boolean: True if credentials belong to an application False if belong to an application user
"""
return self._is_appl
@property
def id(self):
"""
:returns: tuple: (application id, user id)
"""
return self._id
@property
def id_str(self):
"""
:returns: (str) representation of intance's id
"""
return self._id_str
def get_oath_header(self, *args):
"""
:returns: str: the OAuth header to be used by a request
"""
return self.OAuth.get_oath_header(*args)
def on_revoke_credantials(self):
"""descendants can override to handle revoking credentials"""
raise NotImplementedError
def __repr__(self):
return '<{:s}:{:s}>'.format(self.__class__.__name__, self.id_str)
def __str__(self):
return self.__repr__()
class Response(object):
''''a lightweight HTTP response class handles only basic things since we want it to be fast'''
def __init__(self):
self.reset()
def reset(self):
"""we reset the properties between requests so we don't have to create a new instance between each request
"""
# caution status_provisional we will only get it if we:
# a) hit a server and b) server sends proper headers
self.headers_raw = []
self.data = ''
self.status_http = None # status(int) from curl we get it only well after perform
self.status_provisional = None # status(int) we derive it early from first header line
self._headers = None
self.err_curl = None
def write_headers(self, headers_data):
if self.headers_raw == []: # first headers record
try:
self.status_provisional = int(headers_data.split(" ")[1])
except (ValueError, IndexError):
pass
self.headers_raw.append(headers_data.strip())
@property
def headers(self):
"""sets (on demand and only once) and returns headers dictionary
constructing the dictionary is not cheap - so we avoid only do it when this method is called
:returns: the headers dictionary
"""
if self._headers is None:
lh = self.headers_raw
hl = [hdr.split(': ') for hdr in lh if hdr and not hdr.startswith('HTTP/')]
self._headers = DotDot((header[0].lower(), header[1]) for header in hl)
if lh:
self._headers.status_raw = lh[0]
return self._headers
# response['status'] = self.curl_handle.getinfo(pycurl.HTTP_CODE)
class Client(object):
"""this is a minimal class to execute HTTP Requests via curl/pycurl,
for efficiency urls are NOT url encoded since it is not necessary for our use case.
all arguments are optional
:param tuple request: (url, method, parms) if specified request will be executed following instance creation
see :func:`request`
:param Credentials credentials: an instance of :class:`Credentials`
:param function on_data_cb: a call back with a single parameter to execute when data from request are ready,
if missing or None instance's :func:`on_data_default` will be called instead
:param str user_agent: a user agent string to use in request header (defaults to class name + 'v '+ __version)
:param str name: name for this instance if missing a default based on instance's id is provided see: :func:`name`
:param bool allow_retries: if True allows instance to perform retries to recover from an error if possible (defaults to True)
:param bool allow_redirects: if True allows automatic redirects (defaults to False)
:param int verbose: set to 0 for silent mode 1 to turn curl verbose and progress on, 2 to turn curl debug mode on (defaults to 0)
:example:
>>> client = Client()
>>> response = client.request(url="https://www.yandex.com/", method='GET')
>>> response.data
'<!DOCTYPE html><html class="i-ua_js_no i-ua_css_standart i-ua_browser_unknown" lang="en">.......'
>>> responce.status_http
200
"""
format_progress = "|progress |download:{:6.2f}%| upload:{:6.2f}%|"
def __init__(
self,
request=None, # a request in the form (url, method, parameters_dictionary
credentials=None, # credentials (see credentials class)
on_data_cb=None, # a function to execute when data arrive
user_agent=None, # a user agent string to use in request
name=None, # a name to distinguish the instance (defaults to str(id(instance))[-4:]
allow_retries=True, # allows instance to perform retries
verbose=0, # 0 for silent mode 1 to turn curl verbose on, 2 to turn curl debug mode on
allow_redirects=False # if True allows automatic redirects
):
self._curl_options = DotDot()
self._vars = DotDot({'last_progress': None})
self._last_req = DotDot()
self._state = DotDot() # keeps state of retries etc.
self.on_data = on_data_cb if on_data_cb else self.on_data_default
self.handle = None
self.response = Response()
self.credentials = credentials
self.request_headers = []
self.verbose = verbose
self.set_user_agent(user_agent)
self.name = name
self.allow_retries = allow_retries
self._allow_redirects = allow_redirects
if request:
self.request(request[0], request[1], request[2])
@property
def name(self):
"""
:returns: instance's name
"""
return self._name
@name.setter
def name(self, name=None):
self._name = name if name is not None else str(id(self))[-4:]
@property
def request_headers(self):
"""
:returns: current request headers
"""
return self._request_headers
@request_headers.setter
def request_headers(self, request_headers):
"""sets request headers
:param list request_headers: request headers i.e:['Accept: text/html', 'Max-Forwards : 2']
"""
self._request_headers = request_headers
@property
def credentials(self):
return self._credentials
@credentials.setter
def credentials(self, credentials):
'''a Credentials class instance'''
self._credentials = credentials
def _handle_init(self): # @Todo any reason why we don't call it from __init__ ?
"""initializes pycurl handle, override for any special set up
`for options details see <http://curl.haxx.se/libcurl/c/curl_easy_setopt.html>`_
"""
self.handle = pycurl.Curl()
if self._allow_redirects is True:
self.handle.setopt(pycurl.FOLLOWLOCATION, True)
self.handle.setopt(pycurl.USERAGENT, self.user_agent)
self.handle.setopt(pycurl.ENCODING, 'deflate, gzip')
self.handle.setopt(pycurl.HEADERFUNCTION, self.handle_on_headers)
self.handle.setopt(pycurl.WRITEFUNCTION, self.handle_on_write)
self.handle.setopt(pycurl.PROGRESSFUNCTION, self.on_progress)
self.handle.setopt(pycurl.NOPROGRESS, 0 if self.verbose else 1)
# self.handle.setopt(pycurl.NOSIGNAL, 1)
# self.handle.setopt(pycurl.CONNECTTIMEOUT, 10)
# self.handle.setopt(pycurl.TIMEOUT, 50) # defaults to 300 ?
self.curl_verbose = 1 if self.verbose > 0 else 0
self.curl_noprogress = 1 if self.verbose < 1 else 0 # defaults to Not verbose
if self.verbose == 2:
self.handle.setopt(pycurl.DEBUGFUNCTION, self.handle_on_debug)
self._handle_init_end()
def _handle_init_end(self):
"""modify in descedants if additional initialization requirements"""
def _raise(self, err_class, *args):
"""use this mechanism to raise critical exceptions
useful to notify applications before raising the exception and maybe try a remedy in application level
especially useful in a threading environment to notify main thread before raising
it calls _on_exception and raises the exception only if it returns True
"""
LOG.exception("exception {:s}{:s}".format(err_class, args))
if self._on_exception(err_class, *args):
raise err_class(*args)
def _on_exception(self, err_class, *args):
"""descendants can specify any special handling
"""
return True
def handle_set(self, url, method, request_parms, multipart=False): # multipart relevant only for POST
"""
:param str url: url to be used by request
:param str method: method to be used by request
:param dict request_parms: request's parameters
:param boolean multipart: defaults to False, specify True for a multipart request
:raises: KeyError: if method is not one of GET POST or HEAD
"""
self._last_req.parms = (url, method, request_parms, multipart)
if self.handle is None:
self._handle_init()
headers = [i for i in self.request_headers] # @Note add copy of standard headers
if self.credentials is not None:
# although not needed if authorization type is application
# set it any way, so credentials can be reseted on the fly
self._last_req.url_parsed = urlparse.urlparse(url)
self._last_req.subdomain = self._last_req.url_parsed.netloc.split('.')[0]
headers.append('Host: %s' % (self._last_req.url_parsed.netloc))
headers.append(self.credentials.get_oath_header(url, method, {} if multipart else request_parms))
if method == 'GET' or method == 'HEAD':
tmp = urllib.urlencode(request_parms)
tmp = "%s%s%s" % (url, "?" if tmp else '', tmp)
self.handle.setopt(pycurl.URL, tmp)
self.handle.setopt(pycurl.HTTPGET, 1)
elif method == 'POST':
self.handle.setopt(pycurl.URL, url)
if multipart:
self.handle.setopt(pycurl.HTTPPOST, list(request_parms.items()))
self.handle.setopt(pycurl.CUSTOMREQUEST, "POST")
# http://pycurl.cvs.sourceforge.net/pycurl/pycurl/tests/test_post2.py?view=markup
else:
self.handle.setopt(pycurl.POSTFIELDS, urllib.urlencode(request_parms))
# no need to setopt(pycurl.POST, 1) POSTFIELDS sets it to POST anyway
# headers.append("Content-Transfer-Encoding: base64") do we need it ?
else:
raise KeyError('method:[%s] is not supported' % method)
self.handle.setopt(pycurl.HTTPHEADER, headers)
def curl_set_option(self, option, value):
'''used for general options like verbose, noprogress etc,
we store values internally so we can query for option status
`for options details see <http://curl.haxx.se/libcurl/c/curl_easy_setopt.html>`_
'''
if self.handle is not None:
self.handle.setopt(option, value)
self._curl_options[option] = value
return value
else:
raise ErrorRq({'msg': 'pycurl handle has not been set'})
def curl_get_option(self, option):
return self._curl_options.get(option)
@property
def curl_noprogress(self):
return self._curl_options[pycurl.NOPROGRESS]
@curl_noprogress.setter
def curl_noprogress(self, zero_or_one):
self.curl_set_option(pycurl.NOPROGRESS, zero_or_one)
@property
def curl_verbose(self):
return self._curl_options[pycurl.VERBOSE]
@curl_verbose.setter
def curl_verbose(self, zero_or_one):
self.curl_set_option(pycurl.VERBOSE, zero_or_one)
@property
def curl_low_speed(self):
return (self._curl_options[pycurl.LOW_SPEED_LIMIT], self._curl_options[pycurl.LOW_SPEED_TIME])
@curl_low_speed.setter
def curl_low_speed(self, speed_time_tuple):
"""sets low speed parameters raises curl Error pycurl.E_OPERATION_TIMEDOUT (28) if limits exceeded
useful for discovering network connection breaks
`see libcurl <http://curl.haxx.se/libcurl/c/CURLOPT_LOW_SPEED_TIME.html>`_
:Parameters:
- speed_time_tuple (tuple) (limit bytes, seconds)
"""
self.curl_set_option(pycurl.LOW_SPEED_LIMIT, speed_time_tuple[0])
self.curl_set_option(pycurl.LOW_SPEED_TIME, speed_time_tuple[1])
@property
def request_abort(self):
return self._request_abort
def request_abort_set(self, reason_num=None, reason_msg=None):
"""Raise or reset _request_abort property
if reason_num is not None aborts current request by returning -1 while
on accepting data or headers
effectively server sees an (104 Connection reset by peer) or (32 broken pipe)
thats the only way to disconnect a connection
its use makes more sense for streaming data connection
:param int reason_num: None or an integer that defines the reason we want to abort current request
:param str reason_msg: a string that describes the reason we want to abort current request
:Usage: set it to a Not None value to abort current request
main purpose is controlled exit from a streaming request
"""
self._request_abort = (None,) if reason_num is None else (-1, reason_num, reason_msg)
def on_progress(self, *args):
"""pycurl on_progress callback gives progress statistics"""
sm = sum(args)
if sm != self._vars.last_progress:
self._vars.last_progress = sm
self.on_progress_change(*args)
return None # all callbacks should return None - otherwise aborts
def on_progress_change(self, download_t, download_d, upload_t, upload_d):
"""called by :func:`on_progress` if it senses a change in progress (to avoid endless progress reports)"""
upload_perc = (upload_d / upload_t) * 100 if upload_d != 0 else 0
download_perc = (download_d / download_t) * 100 if download_t != 0 and upload_d != 0 else 0
if upload_perc + download_perc:
print (self.format_progress.format(download_perc, upload_perc))
return None
def on_request_start(self):
'''called when a request starts override in descendants as needed'''
pass
def on_request_end(self):
'''called when a request ends override in descendants as needed'''
pass
def on_request_error_curl(self, err):
"""default error handling, for curl (connection) Errors override method for any special handling
`see libcurl error codes <http://curl.haxx.se/libcurl/c/libcurl-errors.html>`_
return True to auto retry request, raise an exception or return False to abort
"""
if err[0] == pycurl.E_WRITE_ERROR and self._request_abort[0] is not None: # 23
return False # normal termination requested by us
raise ErrorRqCurl(err[0], err[1])
def on_request_error_http(self, err):
"""default error handling, for HTTP Errors override method for any special handling
return True to auto retry request, raise an exception or return False to abort
"""
raise ErrorRqHttp(err, self.response.status_http)
def request(self, url, method, parms={}, multipart=False):
"""
.. Warning::
- Currently we don't url-encode the url, clients should encode it if needed before making a call.
- Response object returned is hot i.e a reference to client.response will be invalid
after next request. Clients should copy it if they intend to reuse it in future.
:param str url: requests' url
:param str method: request
:param dict kwargs: parameters dictionary to pass to twitter
:return: an instance of :class:`~.Response`
:Raises: proper HTTP or pyCurl errors
"""
parms = dict_encode(parms)
self.on_request_start()
self._state.retries_curl = 0
self._state.retries_http = 0
retry = True
while retry:
self._state.retries_curl += 1
self._state.retries_http += 1
retry = False
self.request_abort_set(None)
self.response.reset()
self.handle_set(url, method, parms, multipart)
# we must call handle_set it every time to get fresh credentials
# (Out-of-sync timestamp in case we retry after long time)
self._before_perform()
try:
self.handle.perform()
except pycurl.error as err:
self.response.err_curl = err
retry = self.on_request_error_curl(err) if self.allow_retries else False
# LOG.info("retry _SBOU =" + str(retry))
finally:
self.response.status_http = self.handle.getinfo(pycurl.HTTP_CODE)
if self.response.status_http > 299:
if self.allow_retries:
retry = self.on_request_error_http(self.response.status_http)
else:
retry = False
self.on_request_end()
return self.response
def _before_perform(self):
pass
def del_request(self, url, method, parms={}, multipart=False):
self.handle_set(url, method, parms, multipart)
return self._perform()
def request_repeat(self):
"""repeat last request, override in subclasses to yield cursor results by modifying parts of pycurl options"""
return self._perform()
def get(self, url, request_parms={}):
"""shortcut to a GET request"""
return self.request(url, 'GET', request_parms)
def post(self, url, request_parms={}):
"""shortcut to a POST request"""
return self.request(url, 'POST', request_parms)
def head(self, url, request_parms={}):
"""shortcut to a HEAD request"""
return self.request(url, 'HEAD', request_parms)
def set_user_agent(self, user_agent_str=None):
"""sets user agent header string
:param str user_agent_str: user agent string defaults class name + version
"""
if user_agent_str is None:
user_agent_str = "%s v %s" % (self.__class__.__name__, __version__)
self.user_agent = user_agent_str
if self.handle:
self.handle.setopt(pycurl.USERAGENT, self.user_agent)
return user_agent_str
def handle_on_headers(self, header_data):
# first header is always the status line
# last header_data is always a "\r\n"
self.response.write_headers(header_data)
if len(self.response.headers_raw) == 1:
if self.response.status_provisional is not None:
self._state.retries_curl = 0 # successful connection
self._state.retries_extra = 0 # extra counter provision to be used by descendants
if self.response.status_provisional < 300:
self._state.retries_http = 0 # successful http status
return self._request_abort[0] # disconnect if an abort
def handle_on_write(self, data):
"""this must return None or number of bytes received else connection terminates"""
self.response.data += data
self.on_data(data)
return None
def on_data_default(self, data):
""" default function to process data, i.e. return json.loads(data),
override it or provide an on_data_cb function on init
"""
pass
def handle_on_ioctl(self, ioctl, cmd):
raise NotImplementedError
def handle_on_debug(self, msg_type, msg_str):
"""pyCurl's handle on debug call back"""
if msg_type == pycurl.INFOTYPE_TEXT:
pass
elif msg_type == pycurl.INFOTYPE_HEADER_IN:
LOG.debug("Header From Peer: %r" % msg_str)
elif msg_type == pycurl.INFOTYPE_HEADER_OUT:
LOG.debug("Header Sent to Peer: %r" % msg_str)
elif msg_type == pycurl.INFOTYPE_DATA_IN:
pass
elif msg_type == pycurl.INFOTYPE_DATA_OUT:
pass
def handle_reset(self):
if self.handle:
self.handle.reset()
def handle_close(self):
if hasattr(self, 'handle') and self.handle:
self.handle.close()
self.handle = None
def __del__(self):
self.handle_close()
class ClientStream(Client):
"""
:param str data_separator: string used by server to separate data
:param int stats_every: report statistics every n data packets (specify 0 to suppress stats)
:param dict kwargs: any other argument(s) as specified in :class:`Client`
"""
format_stream_stats = "|{name:8s}|{DHMS:12s}|{chunks:15,d}|{data:14,d}|{avg_per_sec:12,.2f}|"
format_stream_stats_header = format_header(format_stream_stats)
# format strings for printing statistics
def __init__(self,
data_separator="\r\n",
stats_every=10000, # output statistics every N data packets 0 or None disables
**kwargs):
self.data_separator = data_separator
self.data_separator_len = len(data_separator)
self.stats_every = stats_every
self.stream_started = False
self.counters = DotDot({'name': self.name[:4], 'chunks': 0,
'DHMS': '', 'avg_per_sec': 0,
'data': 0})
super(ClientStream, self).__init__(**kwargs)
def handle_on_write(self, data_chunk):
'''data call back receives chunks of data from server and
this must return None or number of bytes received else connection terminates
'''
# LOG.debug("counters.chunks= {:d} chunk [{:s}]".format(self.counters.chunks, data_chunk))
# @Note:this piece of code is super critical for speed, since it is the main loop executed all the time
# data comes in.
# currently it uses string concatenation to amend data
# tried it with a list buffer and join with very marginal efficiency improvements
# when actual data/chunks ratio is close to 1.
# Also cstringIO can't be used since it complicates things due to utf data handling
# @Note:descented classes can check len(self.resp_buffer) to protect
# from buffer overruns (not properly delimited streams)
self.counters.chunks += 1
self.resp_buffer += data_chunk
if self.resp_buffer.endswith(self.data_separator):
self.resp_buffer = self.resp_buffer[:-self.data_separator_len]
if self.resp_buffer: # @Note:ignore keep_alives strings ('')
self.counters.data += 1
self.on_data(self.resp_buffer)
self.resp_buffer = ''
if self.stats_every and self.counters.data % self.stats_every == 0:
if self.stats_every == self.counters.data:
print (self.format_stream_stats_header)
self.print_stats()
return self._request_abort[0]
def on_data_default(self, data):
'''this is where actual data comes after data chunks cleansing,
if you don't specify an on_data_cb function on init
Override it in descendants for your use case or specify an on_data_cb function
'''
def _reset_counters(self, counters_dict):
for k in list(counters_dict.keys()):
if k not in ['name', 'DHMS']:
self.counters[k] = 0
def on_request_start(self):
self._reset_counters(self.counters)
self.resp_buffer = '' # for streams we don't output to response object for efficiency
self.dt_start = datetime.utcnow()
def _before_perform(self):
self.resp_buffer = ''
def on_request_end(self):
pass
def time_since_start(self):
return datetime.utcnow() - self.dt_start
def stats_str(self):
"""
:returns: a string containing operation(s) statistics
"""
tmp = self.time_since_start().total_seconds()
self.counters.avg_per_sec = (self.counters.data / tmp)
self.counters.DHMS = seconds_to_DHMS(tmp)
return self.format_stream_stats.format(**self.counters)
def print_stats(self):
"""prints a string containing operation(s) statistics"""
print (self.stats_str())
|
nickmilon/twtPyCurl
|
twtPyCurl/py/requests.py
|
Python
|
lgpl-3.0
| 29,234
|
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser, SUPPRESS
from genomic_neuralnet.config import data
from subprocess import call
_parser = ArgumentParser()
_parser.add_argument('-s', '--species', default='arabidopsis', choices=data.keys(), help='Species')
_parser.add_argument('-t', '--trait', default='flowering', help='Trait')
_parser.add_argument('--list', action='store_true', help='Print a list of traits for the chosen species and exit')
_parser.add_argument('-v', '--verbose', action='store_true', help='Print more information')
_parser.add_argument('-f', '--force', action='store_true', help='Force re-fitting of model')
_parser.add_argument('--dryrun', action='store_true', help='Run a single fit, without saving results')
_parser.add_argument('--stats', action='store_true', help='Print stats about dataset and exit')
_parser.add_argument('--gpu', action='store_true', help='Run on GPU if available')
_parser.add_argument('--gpux', action='store_true', help=SUPPRESS) # Hidden argument for GPU paralellism.
_parser.add_argument('--time-stats', action='store_true', help='Print json timing stats during a dry run')
_parser.add_argument('--plot', action='store_true', help='Create many convergence plots during a dry run')
_parser.add_argument('--use-celery', action='store_true', help='Use celery backend')
_parser.add_argument('--celery-gpu', action='store_true', help='Tell celery to run GPU training.')
_parser.add_argument('--reuse-celery-cache', action='store_true', help='Pick up celery cache where it left off')
_parser.add_argument('--timing-size', default=None, choices=['small', 'large'], help='Shape of network for timing run.')
_arguments = None
def get_arguments():
global _arguments
if _arguments is None:
_arguments = _parser.parse_args()
_handle_list_option(_arguments)
_handle_show_stats_option(_arguments)
allowed_traits = data[_arguments.species].keys()
if not _arguments.trait in allowed_traits:
msg = 'Trait not found. Expected one in list: [{}].'
print(msg.format(', '.join(allowed_traits)))
exit()
_maybe_set_parallel_args(_arguments)
_check_dryrun_subcommands(_arguments)
return _arguments
def _check_dryrun_subcommands(args):
msg = "You cannot use the {} switch in a normal run. " \
"Use the '--dryrun' flag with this option."
if args.time_stats and not args.dryrun:
_parser.error(msg.format('--time-stats'))
if args.plot and not args.dryrun:
_parser.error(msg.format('--plot'))
def _maybe_set_parallel_args(args):
"""
You cannot add Theano gpu parallelism flags
after importing Theano, and we don't know when
it will get imported. This adds them to
the environment and re-calls this same command/script.
This provides a guarantee that we use the GPU
parallelism option no matter when Theano is
imported.
"""
# This is a good guess to tell if the current process is
# actually a slave worker.
is_celery = 'celery_slave.py' in ' '.join(sys.argv)
if (not is_celery) and args.gpu and (not args.gpux):
# Set the GPU environment.
os.environ['THEANO_FLAGS'] = 'floatX=float32,device=gpu,' \
'lib.cnmem=0.9,nvcc.fastmath=True,' \
'mode=FAST_RUN,blas.ldflags="-lblas -llapack"'
# Re-execute this process with the new environment.
exit(call([sys.executable] + sys.argv + ['--gpux']))
def _handle_list_option(args):
"""
Print a list of traits for the chosen species and exit.
"""
if args.list:
species, _ = get_species_and_trait()
print(' '.join(data[species].keys()))
exit()
else:
return
def _handle_show_stats_option(args):
""" Should ."""
if args.stats:
species, trait = get_species_and_trait()
definition = data[species][trait]
markers = len(definition.markers)
samples = len(definition.pheno)
print('{} {} = {} markers X {} samples'.format(species, trait, markers, samples))
exit()
else:
return
def get_timing_size():
""" Determine if we are running a large or small network for timing """
args = get_arguments()
if isinstance(args.timing_size, str) and not args.time_stats:
msg = 'Must use timing stats option when setting network size.'
_parser.error(msg)
return args.timing_size
def get_celery_gpu():
""" Should we use the celery gpu training backend """
args = get_arguments()
if args.celery_gpu and (not args.use_celery):
msg = 'Must use celery backend when using celery gpu training option.'
_parser.error(msg)
return args.celery_gpu
def get_reuse_celery_cache():
""" Should we use the celery training backend """
args = get_arguments()
if args.reuse_celery_cache and (not args.use_celery):
msg = 'Must use celery backend when using celery cache option.'
_parser.error(msg)
return args.reuse_celery_cache
def get_use_celery():
""" Should we use the celery training backend """
args = get_arguments()
return args.use_celery
def get_should_force():
""" Should force re-training of model. """
args = get_arguments()
return args.force
def get_is_time_stats():
""" Print json time statistics for run. """
args = get_arguments()
if args.time_stats and not isinstance(args.timing_size, str):
msg = 'Must specify timing_size (small, large) option when collecting timing stats.'
_parser.error(msg)
return args.time_stats
def get_should_plot():
""" Plot convergence statistics for run. """
args = get_arguments()
return args.plot
def get_is_on_gpu():
"""
The gpux flag means we have the environment properly set
for doing GPU compute.
"""
args = get_arguments()
return args.gpux
def get_is_dryrun():
"""
The dryrun flag means that we should train one model with
one set of params and not save the results.
"""
args = get_arguments()
return args.dryrun
def get_markers_and_pheno(species, trait):
"""
Must pass species and trait in case called
from another process.
"""
markers = data[species][trait].markers
pheno = data[species][trait].pheno
return markers, pheno
def get_species_and_trait():
args = get_arguments()
return args.species, args.trait
def get_verbose():
args = get_arguments()
return args.verbose
|
rileymcdowell/genomic-neuralnet
|
genomic_neuralnet/util/param_parser.py
|
Python
|
mit
| 6,606
|
def propertycached(fn):
attr_name = "_cached_" + fn.__name__
@property
def _propertycached(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _propertycached
|
koyadovic/Dia
|
predictive/systems/statistical/analysis/tools/property.py
|
Python
|
gpl-2.0
| 271
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pontocerto.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
willemarcel/pontocerto
|
pontocerto/manage.py
|
Python
|
agpl-3.0
| 259
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import logging
import numbers
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from dashboard.pinpoint import mann_whitney_u
from dashboard.pinpoint.models import attempt as attempt_module
from dashboard.pinpoint.models import change as change_module
from dashboard.pinpoint.models import quest as quest_module
# We want this to be fast to minimize overhead while waiting for tasks to
# finish, but don't want to consume too many resources.
_TASK_INTERVAL = 10
_DEFAULT_MAX_ATTEMPTS = 2
_SIGNIFICANCE_LEVEL = 0.5
_DIFFERENT = 'different'
_PENDING = 'pending'
_SAME = 'same'
_UNKNOWN = 'unknown'
def JobFromId(job_id):
"""Get a Job object from its ID. Its ID is just its urlsafe key.
Users of Job should not have to import ndb. This function maintains an
abstraction layer that separates users from the Datastore details.
"""
job_key = ndb.Key(urlsafe=job_id)
return job_key.get()
class Job(ndb.Model):
"""A Pinpoint job."""
created = ndb.DateTimeProperty(required=True, auto_now_add=True)
updated = ndb.DateTimeProperty(required=True, auto_now=True)
# The name of the Task Queue task this job is running on. If it's not present,
# the job isn't running.
task = ndb.StringProperty()
# Request parameters.
configuration = ndb.StringProperty(required=True)
test_suite = ndb.StringProperty()
test = ndb.StringProperty()
metric = ndb.StringProperty()
# If True, the service should pick additional Changes to run (bisect).
# If False, only run the Changes explicitly added by the user.
auto_explore = ndb.BooleanProperty(required=True)
state = ndb.PickleProperty(required=True)
@classmethod
def New(cls, configuration, test_suite, test, metric, auto_explore):
# Get list of quests.
quests = [quest_module.FindIsolated(configuration)]
if test_suite:
quests.append(quest_module.RunTest(configuration, test_suite, test))
if metric:
quests.append(quest_module.ReadValue(metric))
# Create job.
return cls(
configuration=configuration,
test_suite=test_suite,
test=test,
metric=metric,
auto_explore=auto_explore,
state=_JobState(quests, _DEFAULT_MAX_ATTEMPTS))
@property
def job_id(self):
return self.key.urlsafe()
@property
def running(self):
return bool(self.task)
def AddChange(self, change):
self.state.AddChange(change)
def Start(self):
task = taskqueue.add(queue_name='job-queue', url='/run/' + self.job_id,
countdown=_TASK_INTERVAL)
self.task = task.name
def Run(self):
if self.auto_explore:
self.state.Explore()
work_left = self.state.ScheduleWork()
# Schedule moar task.
if work_left:
self.Start()
else:
self.task = None
def AsDict(self):
if self.running:
status = 'RUNNING'
else:
status = 'COMPLETED'
return {
'job_id': self.job_id,
'configuration': self.configuration,
'test_suite': self.test_suite,
'test': self.test,
'metric': self.metric,
'auto_explore': self.auto_explore,
'created': self.created.strftime('%Y-%m-%d %H:%M:%S %Z'),
'updated': self.updated.strftime('%Y-%m-%d %H:%M:%S %Z'),
'status': status,
'state': self.state.AsDict(),
}
class _JobState(object):
"""The internal state of a Job.
Wrapping the entire internal state of a Job in a PickleProperty allows us to
use regular Python objects, with constructors, dicts, and object references.
We lose the ability to index and query the fields, but it's all internal
anyway. Everything queryable should be on the Job object.
"""
def __init__(self, quests, max_attempts):
"""Create a _JobState.
Args:
quests: A sequence of quests to run on each Change.
max_attempts: The max number of attempts to automatically run per Change.
"""
# _quests is mutable. Any modification should mutate the existing list
# in-place rather than assign a new list, because every Attempt references
# this object and will be updated automatically if it's mutated.
self._quests = list(quests)
# _changes can be in arbitrary order. Client should not assume that the
# list of Changes is sorted in any particular order.
self._changes = []
# A mapping from a Change to a list of Attempts on that Change.
self._attempts = {}
self._max_attempts = max_attempts
def AddAttempt(self, change):
assert change in self._attempts
self._attempts[change].append(attempt_module.Attempt(self._quests, change))
def AddChange(self, change, index=None):
if index:
self._changes.insert(index, change)
else:
self._changes.append(change)
self._attempts[change] = []
self.AddAttempt(change)
def Explore(self):
"""Compare Changes and bisect by adding additional Changes as needed.
For every pair of adjacent Changes, compare their results as probability
distributions. If more information is needed to establish statistical
confidence, add an additional Attempt. If the results are different, find
the midpoint of the Changes and add it to the Job.
The midpoint can only be added if the second Change represents a commit that
comes after the first Change. Otherwise, this method won't explore further.
For example, if Change A is repo@abc, and Change B is repo@abc + patch,
there's no way to pick additional Changes to try.
"""
# Compare every pair of Changes.
# TODO: The list may Change while iterating through it.
for index in xrange(1, len(self._changes)):
change_a = self._changes[index - 1]
change_b = self._changes[index]
comparison_result = self._Compare(change_a, change_b)
if comparison_result == _DIFFERENT:
# Different: Bisect and add an additional Change to the job.
try:
midpoint = change_module.Change.Midpoint(change_a, change_b)
except change_module.NonLinearError:
midpoint = None
if midpoint:
logging.info('Adding Change %s.', midpoint)
self.AddChange(midpoint, index)
elif comparison_result == _SAME:
# The same: Do nothing.
continue
elif comparison_result == _UNKNOWN:
# Unknown: Add an Attempt to the Change with the fewest Attempts.
change = min(change_a, change_b, key=lambda c: len(self._attempts[c]))
self.AddAttempt(change)
def ScheduleWork(self):
work_left = False
for attempts in self._attempts.itervalues():
for attempt in attempts:
if attempt.completed:
continue
attempt.ScheduleWork()
work_left = True
return work_left
def AsDict(self):
comparisons = []
for index in xrange(1, len(self._changes)):
change_a = self._changes[index - 1]
change_b = self._changes[index]
comparisons.append(self._Compare(change_a, change_b))
# result_values is a 3D array. result_values[change][quest] is a list of
# all the result values for that Change and Quest.
result_values = []
for change in self._changes:
change_result_values = []
change_results_per_quest = _CombineResultsPerQuest(self._attempts[change])
for quest in self._quests:
change_result_values.append(map(str, change_results_per_quest[quest]))
result_values.append(change_result_values)
return {
'quests': map(str, self._quests),
'changes': map(str, self._changes),
'comparisons': comparisons,
'result_values': result_values,
}
def _Compare(self, change_a, change_b):
attempts_a = self._attempts[change_a]
attempts_b = self._attempts[change_b]
if any(not attempt.completed for attempt in attempts_a + attempts_b):
return _PENDING
results_a = _CombineResultsPerQuest(attempts_a)
results_b = _CombineResultsPerQuest(attempts_b)
if any(_CompareResults(results_a[quest], results_b[quest]) == _DIFFERENT
for quest in self._quests):
return _DIFFERENT
# Here, "the same" means that we fail to reject the null hypothesis. We can
# never be completely sure that the two Changes have the same results, but
# we've run everything that we planned to, and didn't detect any difference.
if (len(attempts_a) >= self._max_attempts and
len(attempts_b) >= self._max_attempts):
return _SAME
return _UNKNOWN
def _CombineResultsPerQuest(attempts):
aggregate_results = collections.defaultdict(list)
for attempt in attempts:
if not attempt.completed:
continue
for quest, results in attempt.result_values.iteritems():
aggregate_results[quest] += results
return aggregate_results
def _CompareResults(results_a, results_b):
if len(results_a) == 0 or len(results_b) == 0:
return _UNKNOWN
results_a = map(_ConvertToNumber, results_a)
results_b = map(_ConvertToNumber, results_b)
try:
p_value = mann_whitney_u.MannWhitneyU(results_a, results_b)
except ValueError:
return _UNKNOWN
if p_value < _SIGNIFICANCE_LEVEL:
return _DIFFERENT
else:
return _UNKNOWN
def _ConvertToNumber(obj):
# We want the results_values to provide both a message that can be shown to
# the user for why something failed, and also something comparable that can
# be used for bisect. Therefore, they contain the thrown Exceptions. This
# function then converts them into comparable numbers for bisect.
if isinstance(obj, numbers.Number):
return obj
elif isinstance(obj, Exception):
return hash(obj.__class__)
else:
return hash(obj)
|
sahiljain/catapult
|
dashboard/dashboard/pinpoint/models/job.py
|
Python
|
bsd-3-clause
| 9,842
|
from numba import cuda, int32, float64, void
from numba.core.errors import TypingError
from numba.cuda.testing import unittest, CUDATestCase, skip_on_cudasim
import numpy as np
from numba.np import numpy_support as nps
from .extensions_usecases import test_struct_model_type, TestStruct
recordwith2darray = np.dtype([('i', np.int32),
('j', np.float32, (3, 2))])
class TestSharedMemoryIssue(CUDATestCase):
def test_issue_953_sm_linkage_conflict(self):
@cuda.jit(device=True)
def inner():
inner_arr = cuda.shared.array(1, dtype=int32) # noqa: F841
@cuda.jit
def outer():
outer_arr = cuda.shared.array(1, dtype=int32) # noqa: F841
inner()
outer[1, 1]()
def _check_shared_array_size(self, shape, expected):
@cuda.jit
def s(a):
arr = cuda.shared.array(shape, dtype=int32)
a[0] = arr.size
result = np.zeros(1, dtype=np.int32)
s[1, 1](result)
self.assertEqual(result[0], expected)
def test_issue_1051_shared_size_broken_1d(self):
self._check_shared_array_size(2, 2)
def test_issue_1051_shared_size_broken_2d(self):
self._check_shared_array_size((2, 3), 6)
def test_issue_1051_shared_size_broken_3d(self):
self._check_shared_array_size((2, 3, 4), 24)
def test_issue_2393(self):
"""
Test issue of warp misalign address due to nvvm not knowing the
alignment(? but it should have taken the natural alignment of the type)
"""
num_weights = 2
num_blocks = 48
examples_per_block = 4
threads_per_block = 1
@cuda.jit
def costs_func(d_block_costs):
s_features = cuda.shared.array((examples_per_block, num_weights),
float64)
s_initialcost = cuda.shared.array(7, float64) # Bug
threadIdx = cuda.threadIdx.x
prediction = 0
for j in range(num_weights):
prediction += s_features[threadIdx, j]
d_block_costs[0] = s_initialcost[0] + prediction
block_costs = np.zeros(num_blocks, dtype=np.float64)
d_block_costs = cuda.to_device(block_costs)
costs_func[num_blocks, threads_per_block](d_block_costs)
cuda.synchronize()
class TestSharedMemory(CUDATestCase):
def _test_shared(self, arr):
# Use a kernel that copies via shared memory to check loading and
# storing different dtypes with shared memory. All threads in a block
# collaborate to load in values, then the output values are written
# only by the first thread in the block after synchronization.
nelem = len(arr)
nthreads = 16
nblocks = int(nelem / nthreads)
dt = nps.from_dtype(arr.dtype)
@cuda.jit
def use_sm_chunk_copy(x, y):
sm = cuda.shared.array(nthreads, dtype=dt)
tx = cuda.threadIdx.x
bx = cuda.blockIdx.x
bd = cuda.blockDim.x
# Load this block's chunk into shared
i = bx * bd + tx
if i < len(x):
sm[tx] = x[i]
cuda.syncthreads()
# One thread per block writes this block's chunk
if tx == 0:
for j in range(nthreads):
y[bd * bx + j] = sm[j]
d_result = cuda.device_array_like(arr)
use_sm_chunk_copy[nblocks, nthreads](arr, d_result)
host_result = d_result.copy_to_host()
np.testing.assert_array_equal(arr, host_result)
def test_shared_recarray(self):
arr = np.recarray(128, dtype=recordwith2darray)
for x in range(len(arr)):
arr[x].i = x
j = np.arange(3 * 2, dtype=np.float32)
arr[x].j = j.reshape(3, 2) * x
self._test_shared(arr)
def test_shared_bool(self):
arr = np.random.randint(2, size=(1024,), dtype=np.bool_)
self._test_shared(arr)
def _test_dynshared_slice(self, func, arr, expected):
# Check that slices of shared memory are correct
# (See Bug #5073 - prior to the addition of these tests and
# corresponding fix, slices of dynamic shared arrays all aliased each
# other)
nshared = arr.size * arr.dtype.itemsize
func[1, 1, 0, nshared](arr)
np.testing.assert_array_equal(expected, arr)
def test_dynshared_slice_write(self):
# Test writing values into disjoint slices of dynamic shared memory
@cuda.jit
def slice_write(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:2]
sm1[0] = 1
sm2[0] = 2
x[0] = dynsmem[0]
x[1] = dynsmem[1]
arr = np.zeros(2, dtype=np.int32)
expected = np.array([1, 2], dtype=np.int32)
self._test_dynshared_slice(slice_write, arr, expected)
def test_dynshared_slice_read(self):
# Test reading values from disjoint slices of dynamic shared memory
@cuda.jit
def slice_read(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:2]
dynsmem[0] = 1
dynsmem[1] = 2
x[0] = sm1[0]
x[1] = sm2[0]
arr = np.zeros(2, dtype=np.int32)
expected = np.array([1, 2], dtype=np.int32)
self._test_dynshared_slice(slice_read, arr, expected)
def test_dynshared_slice_diff_sizes(self):
# Test reading values from disjoint slices of dynamic shared memory
# with different sizes
@cuda.jit
def slice_diff_sizes(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:3]
dynsmem[0] = 1
dynsmem[1] = 2
dynsmem[2] = 3
x[0] = sm1[0]
x[1] = sm2[0]
x[2] = sm2[1]
arr = np.zeros(3, dtype=np.int32)
expected = np.array([1, 2, 3], dtype=np.int32)
self._test_dynshared_slice(slice_diff_sizes, arr, expected)
def test_dynshared_slice_overlap(self):
# Test reading values from overlapping slices of dynamic shared memory
@cuda.jit
def slice_overlap(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:2]
sm2 = dynsmem[1:4]
dynsmem[0] = 1
dynsmem[1] = 2
dynsmem[2] = 3
dynsmem[3] = 4
x[0] = sm1[0]
x[1] = sm1[1]
x[2] = sm2[0]
x[3] = sm2[1]
x[4] = sm2[2]
arr = np.zeros(5, dtype=np.int32)
expected = np.array([1, 2, 2, 3, 4], dtype=np.int32)
self._test_dynshared_slice(slice_overlap, arr, expected)
def test_dynshared_slice_gaps(self):
# Test writing values to slices of dynamic shared memory doesn't write
# outside the slice
@cuda.jit
def slice_gaps(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[1:3]
sm2 = dynsmem[4:6]
# Initial values for dynamic shared memory, some to be overwritten
dynsmem[0] = 99
dynsmem[1] = 99
dynsmem[2] = 99
dynsmem[3] = 99
dynsmem[4] = 99
dynsmem[5] = 99
dynsmem[6] = 99
sm1[0] = 1
sm1[1] = 2
sm2[0] = 3
sm2[1] = 4
x[0] = dynsmem[0]
x[1] = dynsmem[1]
x[2] = dynsmem[2]
x[3] = dynsmem[3]
x[4] = dynsmem[4]
x[5] = dynsmem[5]
x[6] = dynsmem[6]
arr = np.zeros(7, dtype=np.int32)
expected = np.array([99, 1, 2, 99, 3, 4, 99], dtype=np.int32)
self._test_dynshared_slice(slice_gaps, arr, expected)
def test_dynshared_slice_write_backwards(self):
# Test writing values into disjoint slices of dynamic shared memory
# with negative steps
@cuda.jit
def slice_write_backwards(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[1::-1]
sm2 = dynsmem[3:1:-1]
sm1[0] = 1
sm1[1] = 2
sm2[0] = 3
sm2[1] = 4
x[0] = dynsmem[0]
x[1] = dynsmem[1]
x[2] = dynsmem[2]
x[3] = dynsmem[3]
arr = np.zeros(4, dtype=np.int32)
expected = np.array([2, 1, 4, 3], dtype=np.int32)
self._test_dynshared_slice(slice_write_backwards, arr, expected)
def test_dynshared_slice_nonunit_stride(self):
# Test writing values into slice of dynamic shared memory with
# non-unit stride
@cuda.jit
def slice_nonunit_stride(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[::2]
# Initial values for dynamic shared memory, some to be overwritten
dynsmem[0] = 99
dynsmem[1] = 99
dynsmem[2] = 99
dynsmem[3] = 99
dynsmem[4] = 99
dynsmem[5] = 99
sm1[0] = 1
sm1[1] = 2
sm1[2] = 3
x[0] = dynsmem[0]
x[1] = dynsmem[1]
x[2] = dynsmem[2]
x[3] = dynsmem[3]
x[4] = dynsmem[4]
x[5] = dynsmem[5]
arr = np.zeros(6, dtype=np.int32)
expected = np.array([1, 99, 2, 99, 3, 99], dtype=np.int32)
self._test_dynshared_slice(slice_nonunit_stride, arr, expected)
def test_dynshared_slice_nonunit_reverse_stride(self):
# Test writing values into slice of dynamic shared memory with
# reverse non-unit stride
@cuda.jit
def slice_nonunit_reverse_stride(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[-1::-2]
# Initial values for dynamic shared memory, some to be overwritten
dynsmem[0] = 99
dynsmem[1] = 99
dynsmem[2] = 99
dynsmem[3] = 99
dynsmem[4] = 99
dynsmem[5] = 99
sm1[0] = 1
sm1[1] = 2
sm1[2] = 3
x[0] = dynsmem[0]
x[1] = dynsmem[1]
x[2] = dynsmem[2]
x[3] = dynsmem[3]
x[4] = dynsmem[4]
x[5] = dynsmem[5]
arr = np.zeros(6, dtype=np.int32)
expected = np.array([99, 3, 99, 2, 99, 1], dtype=np.int32)
self._test_dynshared_slice(slice_nonunit_reverse_stride, arr, expected)
def test_issue_5073(self):
# An example with which Bug #5073 (slices of dynamic shared memory all
# alias) was discovered. The kernel uses all threads in the block to
# load values into slices of dynamic shared memory. One thread per
# block then writes the loaded values back to a global array after
# syncthreads().
arr = np.arange(1024)
nelem = len(arr)
nthreads = 16
nblocks = int(nelem / nthreads)
dt = nps.from_dtype(arr.dtype)
nshared = nthreads * arr.dtype.itemsize
chunksize = int(nthreads / 2)
@cuda.jit
def sm_slice_copy(x, y, chunksize):
dynsmem = cuda.shared.array(0, dtype=dt)
sm1 = dynsmem[0:chunksize]
sm2 = dynsmem[chunksize:chunksize * 2]
tx = cuda.threadIdx.x
bx = cuda.blockIdx.x
bd = cuda.blockDim.x
# load this block's chunk into shared
i = bx * bd + tx
if i < len(x):
if tx < chunksize:
sm1[tx] = x[i]
else:
sm2[tx - chunksize] = x[i]
cuda.syncthreads()
# one thread per block writes this block's chunk
if tx == 0:
for j in range(chunksize):
y[bd * bx + j] = sm1[j]
y[bd * bx + j + chunksize] = sm2[j]
d_result = cuda.device_array_like(arr)
sm_slice_copy[nblocks, nthreads, 0, nshared](arr, d_result, chunksize)
host_result = d_result.copy_to_host()
np.testing.assert_array_equal(arr, host_result)
@skip_on_cudasim("Can't check typing in simulator")
def test_invalid_array_type(self):
rgx = ".*Cannot infer the type of variable 'arr'.*"
def unsupported_type():
arr = cuda.shared.array(10, dtype=np.dtype('O')) # noqa: F841
with self.assertRaisesRegex(TypingError, rgx):
cuda.jit(void())(unsupported_type)
rgx = ".*Invalid NumPy dtype specified: 'int33'.*"
def invalid_string_type():
arr = cuda.shared.array(10, dtype='int33') # noqa: F841
with self.assertRaisesRegex(TypingError, rgx):
cuda.jit(void())(invalid_string_type)
@skip_on_cudasim("Struct model array unsupported in simulator")
def test_struct_model_type_static(self):
nthreads = 64
@cuda.jit(void(int32[::1], int32[::1]))
def write_then_reverse_read_static(outx, outy):
# Test creation
arr = cuda.shared.array(nthreads, dtype=test_struct_model_type)
i = cuda.grid(1)
ri = nthreads - i - 1
if i < len(outx) and i < len(outy):
# Test set to arr
obj = TestStruct(int32(i), int32(i * 2))
arr[i] = obj
cuda.syncthreads()
# Test get from arr
outx[i] = arr[ri].x
outy[i] = arr[ri].y
arrx = np.zeros((nthreads,), dtype="int32")
arry = np.zeros((nthreads,), dtype="int32")
write_then_reverse_read_static[1, nthreads](arrx, arry)
for i, x in enumerate(arrx):
self.assertEqual(x, nthreads - i - 1)
for i, y in enumerate(arry):
self.assertEqual(y, (nthreads - i - 1) * 2)
if __name__ == '__main__':
unittest.main()
|
IntelLabs/numba
|
numba/cuda/tests/cudapy/test_sm.py
|
Python
|
bsd-2-clause
| 14,079
|
"""Implementation of the Range type and adaptation
"""
# psycopg/_range.py - Implementation of the Range type and adaptation
#
# Copyright (C) 2012 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import re
from psycopg2._psycopg import ProgrammingError, InterfaceError
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
class Range(object):
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
:param upper: upper bound for the range. `!None` means unbound
:param bounds: one of the literal strings ``()``, ``[)``, ``(]``, ``[]``,
representing whether the lower or upper bounds are included
:param empty: if `!True`, the range is empty
"""
__slots__ = ('_lower', '_upper', '_bounds')
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty:
if bounds not in ('[)', '(]', '()', '[]'):
raise ValueError("bound flags not valid: %r" % bounds)
self._lower = lower
self._upper = upper
self._bounds = bounds
else:
self._lower = self._upper = self._bounds = None
def __repr__(self):
if self._bounds is None:
return "%s(empty=True)" % self.__class__.__name__
else:
return "%s(%r, %r, %r)" % (self.__class__.__name__,
self._lower, self._upper, self._bounds)
@property
def lower(self):
"""The lower bound of the range. `!None` if empty or unbound."""
return self._lower
@property
def upper(self):
"""The upper bound of the range. `!None` if empty or unbound."""
return self._upper
@property
def isempty(self):
"""`!True` if the range is empty."""
return self._bounds is None
@property
def lower_inf(self):
"""`!True` if the range doesn't have a lower bound."""
if self._bounds is None:
return False
return self._lower is None
@property
def upper_inf(self):
"""`!True` if the range doesn't have an upper bound."""
if self._bounds is None:
return False
return self._upper is None
@property
def lower_inc(self):
"""`!True` if the lower bound is included in the range."""
if self._bounds is None or self._lower is None:
return False
return self._bounds[0] == '['
@property
def upper_inc(self):
"""`!True` if the upper bound is included in the range."""
if self._bounds is None or self._upper is None:
return False
return self._bounds[1] == ']'
def __contains__(self, x):
if self._bounds is None:
return False
if self._lower is not None:
if self._bounds[0] == '[':
if x < self._lower:
return False
else:
if x <= self._lower:
return False
if self._upper is not None:
if self._bounds[1] == ']':
if x > self._upper:
return False
else:
if x >= self._upper:
return False
return True
def __nonzero__(self):
return self._bounds is not None
def __eq__(self, other):
if not isinstance(other, Range):
return False
return (self._lower == other._lower
and self._upper == other._upper
and self._bounds == other._bounds)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self._lower, self._upper, self._bounds))
# as the postgres docs describe for the server-side stuff,
# ordering is rather arbitrary, but will remain stable
# and consistent.
def __lt__(self, other):
if not isinstance(other, Range):
return NotImplemented
for attr in ('_lower', '_upper', '_bounds'):
self_value = getattr(self, attr)
other_value = getattr(other, attr)
if self_value == other_value:
pass
elif self_value is None:
return True
elif other_value is None:
return False
else:
return self_value < other_value
return False
def __le__(self, other):
if self == other:
return True
else:
return self.__lt__(other)
def __gt__(self, other):
if isinstance(other, Range):
return other.__lt__(self)
else:
return NotImplemented
def __ge__(self, other):
if self == other:
return True
else:
return self.__gt__(other)
def __getstate__(self):
return dict(
(slot, getattr(self, slot))
for slot in self.__slots__
if hasattr(self, slot)
)
def __setstate__(self, state):
for slot, value in state.items():
setattr(self, slot, value)
def register_range(pgrange, pyrange, conn_or_curs, globally=False):
"""Create and register an adapter and the typecasters to convert between
a PostgreSQL |range|_ type and a PostgreSQL `Range` subclass.
:param pgrange: the name of the PostgreSQL |range| type. Can be
schema-qualified
:param pyrange: a `Range` strict subclass, or just a name to give to a new
class
:param conn_or_curs: a connection or cursor used to find the oid of the
range and its subtype; the typecaster is registered in a scope limited
to this object, unless *globally* is set to `!True`
:param globally: if `!False` (default) register the typecaster only on
*conn_or_curs*, otherwise register it globally
:return: `RangeCaster` instance responsible for the conversion
If a string is passed to *pyrange*, a new `Range` subclass is created
with such name and will be available as the `~RangeCaster.range` attribute
of the returned `RangeCaster` object.
The function queries the database on *conn_or_curs* to inspect the
*pgrange* type and raises `~psycopg2.ProgrammingError` if the type is not
found. If querying the database is not advisable, use directly the
`RangeCaster` class and register the adapter and typecasters using the
provided functions.
"""
caster = RangeCaster._from_db(pgrange, pyrange, conn_or_curs)
caster._register(not globally and conn_or_curs or None)
return caster
class RangeAdapter(object):
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
attribute or override `getquoted()`.
"""
name = None
def __init__(self, adapted):
self.adapted = adapted
def __conform__(self, proto):
if self._proto is ISQLQuote:
return self
def prepare(self, conn):
self._conn = conn
def getquoted(self):
if self.name is None:
raise NotImplementedError(
'RangeAdapter must be subclassed overriding its name '
'or the getquoted() method')
r = self.adapted
if r.isempty:
return b"'empty'::" + self.name.encode('utf8')
if r.lower is not None:
a = adapt(r.lower)
if hasattr(a, 'prepare'):
a.prepare(self._conn)
lower = a.getquoted()
else:
lower = b'NULL'
if r.upper is not None:
a = adapt(r.upper)
if hasattr(a, 'prepare'):
a.prepare(self._conn)
upper = a.getquoted()
else:
upper = b'NULL'
return self.name.encode('utf8') + b'(' + lower + b', ' + upper \
+ b", '" + r._bounds.encode('utf8') + b"')"
class RangeCaster(object):
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
creation could be useful if querying the database is not advisable: in
this case the oids must be provided.
"""
def __init__(self, pgrange, pyrange, oid, subtype_oid, array_oid=None):
self.subtype_oid = subtype_oid
self._create_ranges(pgrange, pyrange)
name = self.adapter.name or self.adapter.__class__.__name__
self.typecaster = new_type((oid,), name, self.parse)
if array_oid is not None:
self.array_typecaster = new_array_type(
(array_oid,), name + "ARRAY", self.typecaster)
else:
self.array_typecaster = None
def _create_ranges(self, pgrange, pyrange):
"""Create Range and RangeAdapter classes if needed."""
# if got a string create a new RangeAdapter concrete type (with a name)
# else take it as an adapter. Passing an adapter should be considered
# an implementation detail and is not documented. It is currently used
# for the numeric ranges.
self.adapter = None
if isinstance(pgrange, basestring):
self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange
else:
try:
if issubclass(pgrange, RangeAdapter) \
and pgrange is not RangeAdapter:
self.adapter = pgrange
except TypeError:
pass
if self.adapter is None:
raise TypeError(
'pgrange must be a string or a RangeAdapter strict subclass')
self.range = None
try:
if isinstance(pyrange, basestring):
self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange
except TypeError:
pass
if self.range is None:
raise TypeError(
'pyrange must be a type or a Range strict subclass')
@classmethod
def _from_db(self, name, pyrange, conn_or_curs):
"""Return a `RangeCaster` instance for the type *pgrange*.
Raise `ProgrammingError` if the type is not found.
"""
from psycopg2.extensions import STATUS_IN_TRANSACTION
from psycopg2.extras import _solve_conn_curs
conn, curs = _solve_conn_curs(conn_or_curs)
if conn.server_version < 90200:
raise ProgrammingError("range types not available in version %s"
% conn.server_version)
# Store the transaction status of the connection to revert it after use
conn_status = conn.status
# Use the correct schema
if '.' in name:
schema, tname = name.split('.', 1)
else:
tname = name
schema = 'public'
# get the type oid and attributes
try:
curs.execute("""\
select rngtypid, rngsubtype,
(select typarray from pg_type where oid = rngtypid)
from pg_range r
join pg_type t on t.oid = rngtypid
join pg_namespace ns on ns.oid = typnamespace
where typname = %s and ns.nspname = %s;
""", (tname, schema))
except ProgrammingError:
if not conn.autocommit:
conn.rollback()
raise
else:
rec = curs.fetchone()
# revert the status of the connection as before the command
if (conn_status != STATUS_IN_TRANSACTION
and not conn.autocommit):
conn.rollback()
if not rec:
raise ProgrammingError(
"PostgreSQL type '%s' not found" % name)
type, subtype, array = rec
return RangeCaster(name, pyrange,
oid=type, subtype_oid=subtype, array_oid=array)
_re_range = re.compile(r"""
( \(|\[ ) # lower bound flag
(?: # lower bound:
" ( (?: [^"] | "")* ) " # - a quoted string
| ( [^",]+ ) # - or an unquoted string
)? # - or empty (not catched)
,
(?: # upper bound:
" ( (?: [^"] | "")* ) " # - a quoted string
| ( [^"\)\]]+ ) # - or an unquoted string
)? # - or empty (not catched)
( \)|\] ) # upper bound flag
""", re.VERBOSE)
_re_undouble = re.compile(r'(["\\])\1')
def parse(self, s, cur=None):
if s is None:
return None
if s == 'empty':
return self.range(empty=True)
m = self._re_range.match(s)
if m is None:
raise InterfaceError("failed to parse range: '%s'" % s)
lower = m.group(3)
if lower is None:
lower = m.group(2)
if lower is not None:
lower = self._re_undouble.sub(r"\1", lower)
upper = m.group(5)
if upper is None:
upper = m.group(4)
if upper is not None:
upper = self._re_undouble.sub(r"\1", upper)
if cur is not None:
lower = cur.cast(self.subtype_oid, lower)
upper = cur.cast(self.subtype_oid, upper)
bounds = m.group(1) + m.group(6)
return self.range(lower, upper, bounds)
def _register(self, scope=None):
register_type(self.typecaster, scope)
if self.array_typecaster is not None:
register_type(self.array_typecaster, scope)
register_adapter(self.range, self.adapter)
class NumericRange(Range):
"""A `Range` suitable to pass Python numeric types to a PostgreSQL range.
PostgreSQL types :sql:`int4range`, :sql:`int8range`, :sql:`numrange` are
casted into `!NumericRange` instances.
"""
pass
class DateRange(Range):
"""Represents :sql:`daterange` values."""
pass
class DateTimeRange(Range):
"""Represents :sql:`tsrange` values."""
pass
class DateTimeTZRange(Range):
"""Represents :sql:`tstzrange` values."""
pass
# Special adaptation for NumericRange. Allows to pass number range regardless
# of whether they are ints, floats and what size of ints are, which are
# pointless in Python world. On the way back, no numeric range is casted to
# NumericRange, but only to their subclasses
class NumberRangeAdapter(RangeAdapter):
"""Adapt a range if the subtype doesn't need quotes."""
def getquoted(self):
r = self.adapted
if r.isempty:
return b"'empty'"
if not r.lower_inf:
# not exactly: we are relying that none of these object is really
# quoted (they are numbers). Also, I'm lazy and not preparing the
# adapter because I assume encoding doesn't matter for these
# objects.
lower = adapt(r.lower).getquoted().decode('ascii')
else:
lower = ''
if not r.upper_inf:
upper = adapt(r.upper).getquoted().decode('ascii')
else:
upper = ''
return ("'%s%s,%s%s'" % (
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
register_adapter(NumericRange, NumberRangeAdapter)
# Register globally typecasters and adapters for builtin range types.
# note: the adapter is registered more than once, but this is harmless.
int4range_caster = RangeCaster(NumberRangeAdapter, NumericRange,
oid=3904, subtype_oid=23, array_oid=3905)
int4range_caster._register()
int8range_caster = RangeCaster(NumberRangeAdapter, NumericRange,
oid=3926, subtype_oid=20, array_oid=3927)
int8range_caster._register()
numrange_caster = RangeCaster(NumberRangeAdapter, NumericRange,
oid=3906, subtype_oid=1700, array_oid=3907)
numrange_caster._register()
daterange_caster = RangeCaster('daterange', DateRange,
oid=3912, subtype_oid=1082, array_oid=3913)
daterange_caster._register()
tsrange_caster = RangeCaster('tsrange', DateTimeRange,
oid=3908, subtype_oid=1114, array_oid=3909)
tsrange_caster._register()
tstzrange_caster = RangeCaster('tstzrange', DateTimeTZRange,
oid=3910, subtype_oid=1184, array_oid=3911)
tstzrange_caster._register()
|
nwokeo/supysonic
|
venv/lib/python2.7/site-packages/psycopg2/_range.py
|
Python
|
agpl-3.0
| 17,270
|
n = int(input())
count = 0
while count < 6:
if n%2 != 0:
print(n)
count += 1
n += 1
|
henrywm/URI
|
src/beginner/1070.py
|
Python
|
apache-2.0
| 89
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""End-to-end test for bandit training under structured linear environments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
from absl import app
from absl import flags
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.bandits.agents import lin_ucb_agent
from tf_agents.bandits.agents import linear_thompson_sampling_agent as lin_ts_agent
from tf_agents.bandits.agents import neural_epsilon_greedy_agent as eps_greedy_agent
from tf_agents.bandits.agents import utils
from tf_agents.bandits.agents.examples.v2 import trainer
from tf_agents.bandits.environments import environment_utilities
from tf_agents.bandits.environments import stationary_stochastic_py_environment as sspe
from tf_agents.bandits.metrics import tf_metrics as tf_bandit_metrics
from tf_agents.environments import tf_py_environment
from tf_agents.networks import q_network
flags.DEFINE_string('root_dir', os.getenv('TEST_UNDECLARED_OUTPUTS_DIR'),
'Root directory for writing logs/summaries/checkpoints.')
flags.DEFINE_enum(
'agent', 'LinUCB', ['LinUCB', 'LinTS', 'epsGreedy'],
'Which agent to use. Possible values are `LinUCB` and `LinTS`.')
FLAGS = flags.FLAGS
BATCH_SIZE = 8
CONTEXT_DIM = 100
NUM_ACTIONS = 100
REWARD_NOISE_VARIANCE = 0.0001
TRAINING_LOOPS = 2000
STEPS_PER_LOOP = 4
AGENT_ALPHA = 0.1
NN_LEARNING_RATE = 1e-2
REWARD_NETWORK_LAYER_PARAMS = (50, 50, 50)
EPSILON = 0.1
def main(unused_argv):
tf.compat.v1.enable_v2_behavior() # The trainer only runs with V2 enabled.
with tf.device('/CPU:0'): # due to b/128333994
action_reward_fns = (
environment_utilities.structured_linear_reward_fn_generator(
CONTEXT_DIM, NUM_ACTIONS, REWARD_NOISE_VARIANCE))
env = sspe.StationaryStochasticPyEnvironment(
functools.partial(
environment_utilities.context_sampling_fn,
batch_size=BATCH_SIZE,
context_dim=CONTEXT_DIM),
action_reward_fns,
batch_size=BATCH_SIZE)
environment = tf_py_environment.TFPyEnvironment(env)
optimal_reward_fn = functools.partial(
environment_utilities.tf_compute_optimal_reward,
per_action_reward_fns=action_reward_fns)
optimal_action_fn = functools.partial(
environment_utilities.tf_compute_optimal_action,
per_action_reward_fns=action_reward_fns)
if FLAGS.agent == 'LinUCB':
agent = lin_ucb_agent.LinearUCBAgent(
time_step_spec=environment.time_step_spec(),
action_spec=environment.action_spec(),
alpha=AGENT_ALPHA,
dtype=tf.float32)
elif FLAGS.agent == 'epsGreedy':
laplacian_matrix = utils.build_laplacian_over_ordinal_integer_actions(
environment.action_spec())
network = q_network.QNetwork(
input_tensor_spec=environment.time_step_spec().observation,
action_spec=environment.action_spec(),
fc_layer_params=REWARD_NETWORK_LAYER_PARAMS)
agent = eps_greedy_agent.NeuralEpsilonGreedyAgent(
time_step_spec=environment.time_step_spec(),
action_spec=environment.action_spec(),
reward_network=network,
optimizer=tf.compat.v1.train.AdamOptimizer(
learning_rate=NN_LEARNING_RATE),
epsilon=EPSILON,
laplacian_matrix=laplacian_matrix,
laplacian_smoothing_weight=0.01)
elif FLAGS.agent == 'LinTS':
agent = lin_ts_agent.LinearThompsonSamplingAgent(
time_step_spec=environment.time_step_spec(),
action_spec=environment.action_spec(),
alpha=AGENT_ALPHA,
dtype=tf.float32)
regret_metric = tf_bandit_metrics.RegretMetric(optimal_reward_fn)
suboptimal_arms_metric = tf_bandit_metrics.SuboptimalArmsMetric(
optimal_action_fn)
trainer.train(
root_dir=FLAGS.root_dir,
agent=agent,
environment=environment,
training_loops=TRAINING_LOOPS,
steps_per_loop=STEPS_PER_LOOP,
additional_metrics=[regret_metric, suboptimal_arms_metric])
if __name__ == '__main__':
app.run(main)
|
tensorflow/agents
|
tf_agents/bandits/agents/examples/v2/train_eval_structured_linear.py
|
Python
|
apache-2.0
| 4,783
|
#!/usr/bin/env python3
## Copyright (C) 2009 David Baddeley <d.baddeley@auckland.ac.nz>
## Copyright (C) 2020 Mick Phillips <mick.phillips@gmail.com>
##
## This file is part of Microscope.
##
## Microscope is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Microscope is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Microscope. If not, see <http://www.gnu.org/licenses/>.
## The implementation of dllFunc is based on the implementation in
## PYME, hence copyright to David Baddeley.
"""pvcam library wrapper.
This module exposes pvcam C library functions in python.
.. todo::
Support frame metadata. The following functions are still not implemented::
/*****************************************************************************/
/*****************************************************************************/
/* */
/* Frame metadata functions */
/* */
/*****************************************************************************/
/*****************************************************************************/
/**
Decodes all the raw frame buffer metadata into a friendly structure.
@param pDstFrame A pre-allocated helper structure that will be filled with
information from the given raw buffer.
@param pSrcBuf A raw frame buffer as retrieved from PVCAM
@param srcBufSize The size of the raw frame buffer
@return #PV_FAIL in case of failure.
*/
rs_bool PV_DECL pl_md_frame_decode (md_frame* pDstFrame, void* pSrcBuf, uns32 srcBufSize);
/**
Optional function that recomposes a multi-ROI frame into a displayable image buffer.
Every ROI will be copied into its appropriate location in the provided buffer.
Please note that the function will subtract the Implied ROI position from each ROI
position which essentially moves the entire Implied ROI to a [0, 0] position.
Use the Offset arguments to shift all ROIs back to desired positions if needed.
If you use the Implied ROI position for offset arguments the frame will be recomposed
as it appears on the full frame.
The caller is responsible for black-filling the input buffer. Usually this function
is called during live/preview mode where the destination buffer is re-used. If the
ROIs do move during acquisition it is essential to black-fill the destination buffer
before calling this function. This is not needed if the ROIs do not move.
If the ROIs move during live mode it is also recommended to use the offset arguments
and recompose the ROI to a full frame - with moving ROIs the implied ROI may change
with each frame and this may cause undesired ROI "twitching" in the displayable image.
@param pDstBuf An output buffer, the buffer must be at least the size of the implied
ROI that is calculated during the frame decoding process. The buffer
must be of type uns16. If offset is set the buffer must be large
enough to allow the entire implied ROI to be shifted.
@param offX Offset in the destination buffer, in pixels. If 0 the Implied
ROI will be shifted to position 0 in the target buffer.
Use (ImpliedRoi.s1 / ImplierRoi.sbin) as offset value to
disable the shift and keep the ROIs in their absolute positions.
@param offY Offset in the destination buffer, in pixels. If 0 the Implied
ROI will be shifted to position 0 in the target buffer.
Use (ImpliedRoi.p1 / ImplierRoi.pbin) as offset value to
disable the shift and keep the ROIs in their absolute positions.
@param dstWidth Width, in pixels of the destination image buffer. The buffer
must be large enough to hold the entire Implied ROI, including
the offsets (if used).
@param dstHeight Height, in pixels of the destination image buffer.
@param pSrcFrame A helper structure, previously decoded using the frame
decoding function.
@return #PV_FAIL in case of failure.
*/
rs_bool PV_DECL pl_md_frame_recompose (void* pDstBuf, uns16 offX, uns16 offY,
uns16 dstWidth, uns16 dstHeight,
md_frame* pSrcFrame);
/**
This method creates an empty md_frame structure for known number of ROIs.
Use this method to prepare and pre-allocate one structure before starting
continous acquisition. Once callback arrives fill the structure with
pl_md_frame_decode() and display the metadata.
Release the structure when not needed.
@param pFrame a pointer to frame helper structure address where the structure
will be allocated.
@param roiCount Number of ROIs the structure should be prepared for.
@return #PV_FAIL in case of failure.
*/
rs_bool PV_DECL pl_md_create_frame_struct_cont (md_frame** pFrame, uns16 roiCount);
/**
This method creates an empty md_frame structure from an existing buffer.
Use this method when loading buffers from disk or when performance is not
critical. Do not forget to release the structure when not needed.
For continous acquisition where the number or ROIs is known it is recommended
to use the other provided method to avoid frequent memory allocation.
@param pFrame A pointer address where the newly created structure will be stored.
@param pSrcBuf A raw frame data pointer as returned from the camera
@param srcBufSize Size of the raw frame data buffer
@return #PV_FAIL in case of failure
*/
rs_bool PV_DECL pl_md_create_frame_struct (md_frame** pFrame, void* pSrcBuf,
uns32 srcBufSize);
/**
Releases the md_frame struct
@param pFrame a pointer to the previously allocated structure
*/
rs_bool PV_DECL pl_md_release_frame_struct (md_frame* pFrame);
/**
Reads all the extended metadata from the given ext. metadata buffer.
@param pOutput A pre-allocated structure that will be filled with metadata
@param pExtMdPtr A pointer to the ext. MD buffer, this can be obtained from
the md_frame and md_frame_roi structures.
@param extMdSize Size of the ext. MD buffer, also retrievable from the helper
structures.
@return #PV_FAIL in case the metadata cannot be decoded.
*/
rs_bool PV_DECL pl_md_read_extended (md_ext_item_collection* pOutput, void* pExtMdPtr,
uns32 extMdSize);
"""
import ctypes
import logging
import os
import platform
import time
import weakref
import numpy as np
import Pyro4
import microscope
import microscope.abc
_logger = logging.getLogger(__name__)
# Readout transform mapping - {CHIP_NAME: {port: transform}}
READOUT_TRANSFORMS = {"Evolve-5": {0: (0, 0, 0), 1: (1, 0, 0)}}
# === Data types ===
# Base typedefs, from pvcam SDK master.h
# typedef unsigned short rs_bool;
rs_bool = ctypes.c_ushort
# typedef signed char int8;
int8 = ctypes.c_byte
# typedef unsigned char uns8;
uns8 = ctypes.c_ubyte
# typedef short int16;
int16 = ctypes.c_short
# typedef unsigned short uns16;
uns16 = ctypes.c_ushort
# typedef int int32;
int32 = ctypes.c_int32
# typedef unsigned int uns32;
uns32 = ctypes.c_uint32
# typedef float flt32;
flt32 = ctypes.c_float
# typedef double flt64;
flt64 = ctypes.c_double
# typedef unsigned long long ulong64;
ulong64 = ctypes.c_ulonglong
# typedef signed long long long64;
long64 = ctypes.c_longlong
# enums
enumtype = ctypes.c_int32
# defines, typedefs and enums parsed from pvcam.h .
MAX_CAM = 16
CAM_NAME_LEN = 32
PARAM_NAME_LEN = 32
ERROR_MSG_LEN = 255
CCD_NAME_LEN = 17
MAX_ALPHA_SER_NUM_LEN = 32
MAX_PP_NAME_LEN = 32
MAX_SYSTEM_NAME_LEN = 32
MAX_VENDOR_NAME_LEN = 32
MAX_PRODUCT_NAME_LEN = 32
MAX_CAM_PART_NUM_LEN = 32
MAX_GAIN_NAME_LEN = 32
OPEN_EXCLUSIVE = 0
NORMAL_COOL = 0
CRYO_COOL = 1
MPP_UNKNOWN = 0
MPP_ALWAYS_OFF = 1
MPP_ALWAYS_ON = 2
MPP_SELECTABLE = 3
SHTR_FAULT = 0
SHTR_OPENING = 1
SHTR_OPEN = 2
SHTR_CLOSING = 3
SHTR_CLOSED = 4
SHTR_UNKNOWN = 5
PMODE_NORMAL = 0
PMODE_FT = 1
PMODE_MPP = 2
PMODE_FT_MPP = 3
PMODE_ALT_NORMAL = 4
PMODE_ALT_FT = 5
PMODE_ALT_MPP = 6
PMODE_ALT_FT_MPP = 7
COLOR_NONE = 0
COLOR_RESERVED = 1
COLOR_RGGB = 2
COLOR_GRBG = 3
COLOR_GBRG = 4
COLOR_BGGR = 5
ATTR_CURRENT = 0
ATTR_COUNT = 1
ATTR_TYPE = 2
ATTR_MIN = 3
ATTR_MAX = 4
ATTR_DEFAULT = 5
ATTR_INCREMENT = 6
ATTR_ACCESS = 7
ATTR_AVAIL = 8
ACC_READ_ONLY = 1
ACC_READ_WRITE = 2
ACC_EXIST_CHECK_ONLY = 3
ACC_WRITE_ONLY = 4
IO_TYPE_TTL = 0
IO_TYPE_DAC = 1
IO_DIR_INPUT = 0
IO_DIR_OUTPUT = 1
IO_DIR_INPUT_OUTPUT = 2
READOUT_PORT_0 = 0
READOUT_PORT_1 = 1
CLEAR_NEVER = 0
CLEAR_PRE_EXPOSURE = 1
CLEAR_PRE_SEQUENCE = 2
CLEAR_POST_SEQUENCE = 3
CLEAR_PRE_POST_SEQUENCE = 4
CLEAR_PRE_EXPOSURE_POST_SEQ = 5
MAX_CLEAR_MODE = 6
OPEN_NEVER = 0
OPEN_PRE_EXPOSURE = 1
OPEN_PRE_SEQUENCE = 2
OPEN_PRE_TRIGGER = 3
OPEN_NO_CHANGE = 4
TIMED_MODE = 0
STROBED_MODE = 1
BULB_MODE = 2
TRIGGER_FIRST_MODE = 3
FLASH_MODE = 4
VARIABLE_TIMED_MODE = 5
INT_STROBE_MODE = 6
MAX_EXPOSE_MODE = 7
Extended = 8
camera = 9
The = 10
definition = 11
EXT_TRIG_INTERNAL = 12
EXT_TRIG_TRIG_FIRST = 13
EXT_TRIG_EDGE_RISING = 14
EXPOSE_OUT_FIRST_ROW = 0
EXPOSE_OUT_ALL_ROWS = 1
EXPOSE_OUT_ANY_ROW = 2
MAX_EXPOSE_OUT_MODE = 3
FAN_SPEED_HIGH = 0
FAN_SPEED_MEDIUM = 1
FAN_SPEED_LOW = 2
FAN_SPEED_OFF = 3
PL_TRIGTAB_SIGNAL_EXPOSE_OUT = 0
PP_FEATURE_RING_FUNCTION = 0
PP_FEATURE_BIAS = 1
PP_FEATURE_BERT = 2
PP_FEATURE_QUANT_VIEW = 3
PP_FEATURE_BLACK_LOCK = 4
PP_FEATURE_TOP_LOCK = 5
PP_FEATURE_VARI_BIT = 6
PP_FEATURE_RESERVED = 7
PP_FEATURE_DESPECKLE_BRIGHT_HIGH = 8
PP_FEATURE_DESPECKLE_DARK_LOW = 9
PP_FEATURE_DEFECTIVE_PIXEL_CORRECTION = 10
PP_FEATURE_DYNAMIC_DARK_FRAME_CORRECTION = 11
PP_FEATURE_HIGH_DYNAMIC_RANGE = 12
PP_FEATURE_DESPECKLE_BRIGHT_LOW = 13
PP_FEATURE_DENOISING = 14
PP_FEATURE_DESPECKLE_DARK_HIGH = 15
PP_FEATURE_ENHANCED_DYNAMIC_RANGE = 16
PP_FEATURE_MAX = 17
PP_MAX_PARAMETERS_PER_FEATURE = 10
PP_PARAMETER_RF_FUNCTION = 0
PP_FEATURE_BIAS_ENABLED = 1
PP_FEATURE_BIAS_LEVEL = 2
PP_FEATURE_BERT_ENABLED = 3
PP_FEATURE_BERT_THRESHOLD = 4
PP_FEATURE_QUANT_VIEW_ENABLED = 5
PP_FEATURE_QUANT_VIEW_E = 6
PP_FEATURE_BLACK_LOCK_ENABLED = 7
PP_FEATURE_BLACK_LOCK_BLACK_CLIP = 8
PP_FEATURE_TOP_LOCK_ENABLED = 9
PP_FEATURE_TOP_LOCK_WHITE_CLIP = 10
PP_FEATURE_VARI_BIT_ENABLED = 11
PP_FEATURE_VARI_BIT_BIT_DEPTH = 12
PP_FEATURE_DESPECKLE_BRIGHT_HIGH_ENABLED = 13
PP_FEATURE_DESPECKLE_BRIGHT_HIGH_THRESHOLD = 14
PP_FEATURE_DESPECKLE_BRIGHT_HIGH_MIN_ADU_AFFECTED = 15
PP_FEATURE_DESPECKLE_DARK_LOW_ENABLED = 16
PP_FEATURE_DESPECKLE_DARK_LOW_THRESHOLD = 17
PP_FEATURE_DESPECKLE_DARK_LOW_MAX_ADU_AFFECTED = 18
PP_FEATURE_DEFECTIVE_PIXEL_CORRECTION_ENABLED = 19
PP_FEATURE_DYNAMIC_DARK_FRAME_CORRECTION_ENABLED = 20
PP_FEATURE_HIGH_DYNAMIC_RANGE_ENABLED = 21
PP_FEATURE_DESPECKLE_BRIGHT_LOW_ENABLED = 22
PP_FEATURE_DESPECKLE_BRIGHT_LOW_THRESHOLD = 23
PP_FEATURE_DESPECKLE_BRIGHT_LOW_MAX_ADU_AFFECTED = 24
PP_FEATURE_DENOISING_ENABLED = 25
PP_FEATURE_DENOISING_NO_OF_ITERATIONS = 26
PP_FEATURE_DENOISING_GAIN = 27
PP_FEATURE_DENOISING_OFFSET = 28
PP_FEATURE_DENOISING_LAMBDA = 29
PP_FEATURE_DESPECKLE_DARK_HIGH_ENABLED = 30
PP_FEATURE_DESPECKLE_DARK_HIGH_THRESHOLD = 31
PP_FEATURE_DESPECKLE_DARK_HIGH_MIN_ADU_AFFECTED = 32
PP_FEATURE_ENHANCED_DYNAMIC_RANGE_ENABLED = 33
PP_PARAMETER_ID_MAX = 34
SMTMODE_ARBITRARY_ALL = 0
SMTMODE_MAX = 1
READOUT_NOT_ACTIVE = 0
EXPOSURE_IN_PROGRESS = 1
READOUT_IN_PROGRESS = 2
READOUT_COMPLETE = 3
FRAME_AVAILABLE = 3
READOUT_FAILED = 4
ACQUISITION_IN_PROGRESS = 5
MAX_CAMERA_STATUS = 6
CCS_NO_CHANGE = 0
CCS_HALT = 1
CCS_HALT_CLOSE_SHTR = 2
CCS_CLEAR = 3
CCS_CLEAR_CLOSE_SHTR = 4
CCS_OPEN_SHTR = 5
CCS_CLEAR_OPEN_SHTR = 6
NO_FRAME_IRQS = 0
BEGIN_FRAME_IRQS = 1
END_FRAME_IRQS = 2
BEGIN_END_FRAME_IRQS = 3
CIRC_NONE = 0
CIRC_OVERWRITE = 1
CIRC_NO_OVERWRITE = 2
EXP_RES_ONE_MILLISEC = 0
EXP_RES_ONE_MICROSEC = 1
EXP_RES_ONE_SEC = 2
SCR_PRE_OPEN_SHTR = 0
SCR_POST_OPEN_SHTR = 1
SCR_PRE_FLASH = 2
SCR_POST_FLASH = 3
SCR_PRE_INTEGRATE = 4
SCR_POST_INTEGRATE = 5
SCR_PRE_READOUT = 6
SCR_POST_READOUT = 7
SCR_PRE_CLOSE_SHTR = 8
SCR_POST_CLOSE_SHTR = 9
PL_CALLBACK_BOF = 0
PL_CALLBACK_EOF = 1
PL_CALLBACK_CHECK_CAMS = 2
PL_CALLBACK_CAM_REMOVED = 3
PL_CALLBACK_CAM_RESUMED = 4
PL_CALLBACK_MAX = 5
PL_MD_FRAME_FLAG_ROI_TS_SUPPORTED = 1
PL_MD_FRAME_FLAG_UNUSED_2 = 2
PL_MD_FRAME_FLAG_UNUSED_3 = 4
PL_MD_FRAME_FLAG_UNUSED_4 = 16
PL_MD_FRAME_FLAG_UNUSED_5 = 32
PL_MD_FRAME_FLAG_UNUSED_6 = 64
PL_MD_FRAME_FLAG_UNUSED_7 = 128
PL_MD_ROI_FLAG_INVALID = 1
PL_MD_ROI_FLAG_UNUSED_2 = 2
PL_MD_ROI_FLAG_UNUSED_3 = 4
PL_MD_ROI_FLAG_UNUSED_4 = 16
PL_MD_ROI_FLAG_UNUSED_5 = 32
PL_MD_ROI_FLAG_UNUSED_6 = 64
PL_MD_ROI_FLAG_UNUSED_7 = 128
PL_MD_FRAME_SIGNATURE = 5328208
PL_MD_EXT_TAGS_MAX_SUPPORTED = 255
PL_MD_EXT_TAG_MAX = 0
TYPE_INT16 = 1
TYPE_INT32 = 2
TYPE_FLT64 = 4
TYPE_UNS8 = 5
TYPE_UNS16 = 6
TYPE_UNS32 = 7
TYPE_UNS64 = 8
TYPE_ENUM = 9
TYPE_BOOLEAN = 11
TYPE_INT8 = 12
TYPE_CHAR_PTR = 13
TYPE_VOID_PTR = 14
TYPE_VOID_PTR_PTR = 15
TYPE_INT64 = 16
TYPE_SMART_STREAM_TYPE = 17
TYPE_SMART_STREAM_TYPE_PTR = 18
TYPE_FLT32 = 19
CLASS0 = 0
CLASS2 = 2
CLASS3 = 3
PARAM_DD_INFO_LENGTH = 16777217
PARAM_DD_VERSION = 100663298
PARAM_DD_RETRIES = 100663299
PARAM_DD_TIMEOUT = 100663300
PARAM_DD_INFO = 218103813
PARAM_ADC_OFFSET = 16908483
PARAM_CHIP_NAME = 218235009
PARAM_SYSTEM_NAME = 218235010
PARAM_VENDOR_NAME = 218235011
PARAM_PRODUCT_NAME = 218235012
PARAM_CAMERA_PART_NUMBER = 218235013
PARAM_COOLING_MODE = 151126230
PARAM_PREAMP_DELAY = 100794870
PARAM_COLOR_MODE = 151126520
PARAM_MPP_CAPABLE = 151126240
PARAM_PREAMP_OFF_CONTROL = 117572091
PARAM_PREMASK = 100794421
PARAM_PRESCAN = 100794423
PARAM_POSTMASK = 100794422
PARAM_POSTSCAN = 100794424
PARAM_PIX_PAR_DIST = 100794868
PARAM_PIX_PAR_SIZE = 100794431
PARAM_PIX_SER_DIST = 100794869
PARAM_PIX_SER_SIZE = 100794430
PARAM_SUMMING_WELL = 184680953
PARAM_FWELL_CAPACITY = 117572090
PARAM_PAR_SIZE = 100794425
PARAM_SER_SIZE = 100794426
PARAM_ACCUM_CAPABLE = 184680986
PARAM_FLASH_DWNLD_CAPABLE = 184680987
PARAM_READOUT_TIME = 67240115
PARAM_CLEAR_CYCLES = 100794465
PARAM_CLEAR_MODE = 151126539
PARAM_FRAME_CAPABLE = 184680957
PARAM_PMODE = 151126540
PARAM_TEMP = 16908813
PARAM_TEMP_SETPOINT = 16908814
PARAM_CAM_FW_VERSION = 100794900
PARAM_HEAD_SER_NUM_ALPHA = 218235413
PARAM_PCI_FW_VERSION = 100794902
PARAM_FAN_SPEED_SETPOINT = 151126726
PARAM_EXPOSURE_MODE = 151126551
PARAM_EXPOSE_OUT_MODE = 151126576
PARAM_BIT_DEPTH = 16908799
PARAM_GAIN_INDEX = 16908800
PARAM_SPDTAB_INDEX = 16908801
PARAM_GAIN_NAME = 218235394
PARAM_READOUT_PORT = 151126263
PARAM_PIX_TIME = 100794884
PARAM_SHTR_CLOSE_DELAY = 100794887
PARAM_SHTR_OPEN_DELAY = 100794888
PARAM_SHTR_OPEN_MODE = 151126537
PARAM_SHTR_STATUS = 151126538
PARAM_IO_ADDR = 100794895
PARAM_IO_TYPE = 151126544
PARAM_IO_DIRECTION = 151126545
PARAM_IO_STATE = 67240466
PARAM_IO_BITDEPTH = 100794899
PARAM_GAIN_MULT_FACTOR = 100794905
PARAM_GAIN_MULT_ENABLE = 184680989
PARAM_PP_FEAT_NAME = 218235422
PARAM_PP_INDEX = 16908831
PARAM_ACTUAL_GAIN = 100794912
PARAM_PP_PARAM_INDEX = 16908833
PARAM_PP_PARAM_NAME = 218235426
PARAM_PP_PARAM = 117572131
PARAM_READ_NOISE = 100794916
PARAM_PP_FEAT_ID = 100794917
PARAM_PP_PARAM_ID = 100794918
PARAM_SMART_STREAM_MODE_ENABLED = 184681148
PARAM_SMART_STREAM_MODE = 100795069
PARAM_SMART_STREAM_EXP_PARAMS = 235012798
PARAM_SMART_STREAM_DLY_PARAMS = 235012799
PARAM_EXP_TIME = 100859905
PARAM_EXP_RES = 151191554
PARAM_EXP_RES_INDEX = 100859908
PARAM_EXPOSURE_TIME = 134414344
PARAM_BOF_EOF_ENABLE = 151191557
PARAM_BOF_EOF_COUNT = 117637126
PARAM_BOF_EOF_CLR = 184745991
PARAM_CIRC_BUFFER = 184746283
PARAM_FRAME_BUFFER_SIZE = 134414636
PARAM_BINNING_SER = 151191717
PARAM_BINNING_PAR = 151191718
PARAM_METADATA_ENABLED = 184746152
PARAM_ROI_COUNT = 100860073
PARAM_CENTROIDS_ENABLED = 184746154
PARAM_CENTROIDS_RADIUS = 100860075
PARAM_CENTROIDS_COUNT = 100860076
PARAM_TRIGTAB_SIGNAL = 151191732
PARAM_LAST_MUXED_SIGNAL = 84082869
# === C structures ===
# GUID for #FRAME_INFO structure.
class PVCAM_FRAME_INFO_GUID(ctypes.Structure):
_fields_ = [
("f1", uns32),
("f2", uns16),
("f3", uns16),
("f4", uns8 * 8),
]
# Structure used to uniquely identify frames in the camera.
class FRAME_INFO(ctypes.Structure):
_fields_ = [
("FrameInfoGUID", PVCAM_FRAME_INFO_GUID),
("hCam", int16),
("FrameNr", int32),
("TimeStamp", long64),
("ReadoutTime", int32),
("TimeStampBOF", long64),
]
class smart_stream_type(ctypes.Structure):
_fields_ = [
("entries", uns16),
("params", uns32),
]
class rgn_type(ctypes.Structure):
_fields_ = [
("s1", uns16),
("s2", uns16),
("sbin", uns16),
("p1", uns16),
("p2", uns16),
("pbin", uns16),
]
class io_struct(ctypes.Structure):
pass
io_struct._fields_ = [
("io_port", uns16),
("io_type", uns32),
("state", flt64),
("next", ctypes.POINTER(io_struct)),
]
class io_list(ctypes.Structure):
_fields_ = [
("pre_open", ctypes.POINTER(io_struct)),
("post_open", ctypes.POINTER(io_struct)),
("pre_flash", ctypes.POINTER(io_struct)),
("post_flash", ctypes.POINTER(io_struct)),
("pre_integrate", ctypes.POINTER(io_struct)),
("post_integrate", ctypes.POINTER(io_struct)),
("pre_readout", ctypes.POINTER(io_struct)),
("post_readout", ctypes.POINTER(io_struct)),
("pre_close", ctypes.POINTER(io_struct)),
("post_close", ctypes.POINTER(io_struct)),
]
class active_camera_type(ctypes.Structure):
_fields_ = [
("shutter_close_delay", uns16),
("shutter_open_delay", uns16),
("rows", uns16),
("cols", uns16),
("prescan", uns16),
("postscan", uns16),
("premask", uns16),
("postmask", uns16),
("preflash", uns16),
("clear_count", uns16),
("preamp_delay", uns16),
("mpp_selectable", rs_bool),
("frame_selectable", rs_bool),
("do_clear", uns16),
("open_shutter", uns16),
("mpp_mode", rs_bool),
("frame_transfer", rs_bool),
("alt_mode", rs_bool),
("exp_res", uns32),
("io_hdr", ctypes.POINTER(io_list)),
]
class md_frame_header(ctypes.Structure):
_fields_ = [
("signature", uns32),
("version", uns8),
("frameNr", uns32),
("roiCount", uns16),
("timestampBOF", uns32),
("timestampEOF", uns32),
("timestampResNs", uns32),
("exposureTime", uns32),
("exposureTimeResN", uns32),
("roiTimestampResN", uns32),
("bitDepth", uns8),
("colorMask", uns8),
("flags", uns8),
("extendedMdSize", uns16),
("_reserved", uns8 * 8),
]
class md_frame_roi_header(ctypes.Structure):
_fields_ = [
("roiNr", uns16),
("timestampBOR", uns32),
("timestampEOR", uns32),
("roi", rgn_type),
("flags", uns8),
("extendedMdSize", uns16),
("_reserved", uns8 * 7),
]
class md_ext_item_info(ctypes.Structure):
_fields_ = [
("tag", uns16),
("size", uns16),
("name", ctypes.c_char_p),
]
class md_ext_item(ctypes.Structure):
_fields_ = [
("tagInfo", ctypes.POINTER(md_ext_item_info)), #
("value", ctypes.c_void_p),
]
class md_ext_item_collection(ctypes.Structure):
_fields_ = [
("list", md_ext_item * PL_MD_EXT_TAGS_MAX_SUPPORTED),
("map", ctypes.POINTER(md_ext_item) * PL_MD_EXT_TAGS_MAX_SUPPORTED),
("count", uns16),
]
class md_frame_roi(ctypes.Structure):
_fields_ = [
("header", ctypes.POINTER(md_frame_roi_header)),
("data", ctypes.c_void_p),
("dataSize", uns32),
("extMdData", ctypes.c_void_p),
("extMdDataSize", uns16),
]
class md_frame(ctypes.Structure):
_fields_ = [
("header", ctypes.POINTER(md_frame_header)),
("extMdData", ctypes.c_void_p),
("extMdDataSize", uns16),
("impliedRoi", rgn_type),
("roiArray", ctypes.POINTER(md_frame_roi)),
("roiCapacity", uns16),
("roiCount", uns16),
]
if os.name in ("nt", "ce"):
if platform.architecture()[0] == "32bit":
_lib = ctypes.WinDLL("pvcam32")
else:
_lib = ctypes.WinDLL("pvcam64")
else:
_lib = ctypes.CDLL("pvcam.so")
### Functions ###
STRING = ctypes.c_char_p
# classes so that we do some magic and automatically add byrefs etc ... can classify outputs
# (Nicked from PYME's Ixon wrapper.)
class _meta:
pass
class OUTPUT(_meta):
def __init__(self, val):
self.type = val
self.val = ctypes.POINTER(val)
def get_var(self, buf_len=0):
if self.type in [STRING, ctypes.c_void_p] and buf_len > 0:
v = ctypes.create_string_buffer(buf_len)
ref = ctypes.cast(ctypes.pointer(v), self.val)
else:
v = self.type()
ref = ctypes.byref(v)
return v, ref
class _OUTSTRING(OUTPUT):
def __init__(self):
self.val = STRING
def get_var(self, buf_len):
v = ctypes.create_string_buffer(buf_len)
return v, v
OUTSTRING = _OUTSTRING()
def stripMeta(val):
"""Strip meta info from OUTPUT and OUTSTRING instances."""
if isinstance(val, _meta):
return val.val
else:
return val
# Function type for callbacks.
CALLBACK = ctypes.CFUNCTYPE(ctypes.c_void_p)
class dllFunction:
"""Expose a DLL function to python.
(Again, largely nicked from PYME.)"""
def __init__(self, name, args=[], argnames=[], buf_len=-1, lib=_lib):
self.f = getattr(lib, name)
self.f.restype = rs_bool
self.f.argtypes = [stripMeta(a) for a in args]
self.fargs = args
self.fargnames = argnames
self.name = name
self.inp = [not isinstance(a, OUTPUT) for a in args]
self.in_args = [a for a in args if not isinstance(a, OUTPUT)]
self.out_args = [a for a in args if isinstance(a, OUTPUT)]
self.buf_len = buf_len
docstring = name + "\n\nArguments:\n===========\n"
for i in range(len(args)):
an = ""
if i < len(argnames):
an = argnames[i]
docstring += "\t%s\t%s\n" % (args[i], an)
self.f.__doc__ = docstring
def __call__(self, *args, **kwargs):
ars = []
i = 0
ret = []
# pl_get_param buffer length depends on the parameter being fetched, so
# use kwargs to pass buffer length.
if "buf_len" in kwargs:
bs = kwargs["buf_len"]
elif self.name == "pl_get_enum_param":
# last argument is buffer length
bs = args[-1]
elif self.buf_len >= 0:
bs = self.buf_len
else:
bs = 256
# May have been passed a ctype; if so, fetch its value.
if isinstance(bs, ctypes._SimpleCData):
bs = bs.value
for j in range(len(self.inp)):
if self.inp[j]: # an input
if self.f.argtypes[j] is CALLBACK and not isinstance(
args[i], CALLBACK
):
ars.append(CALLBACK(args[i]))
else:
ars.append(args[i])
i += 1
else: # an output
r, ar = self.fargs[j].get_var(bs)
ars.append(ar)
ret.append(r)
# print r, r._type_
# print (self.name, ars)
res = self.f(*ars)
# print res
if res == False:
err_code = _lib.pl_error_code()
err_msg = ctypes.create_string_buffer(ERROR_MSG_LEN)
_lib.pl_error_message(err_code, err_msg)
raise microscope.DeviceError(
"pvcam error %d: %s" % (err_code, err_msg.value)
)
if len(ret) == 0:
return None
if len(ret) == 1:
return ret[0]
else:
return ret
def _status():
"""Fetch the PVCAM DLL status."""
err_code = _lib.pl_error_code()
err_msg = ctypes.create_string_buffer(ERROR_MSG_LEN)
_lib.pl_error_message(err_code, err_msg)
def dllFunc(name, args=[], argnames=[], buf_len=0):
"""Register a function using dllFunction."""
f = dllFunction(name, args, argnames, buf_len=buf_len)
globals()[name[2:]] = f
"""DLL function imports."""
# Class 0 functions - library
dllFunc("pl_pvcam_get_ver", [OUTPUT(uns16)], ["version"])
dllFunc("pl_pvcam_init")
dllFunc("pl_pvcam_uninit")
# Class 0 functions - camera
dllFunc("pl_cam_close", [int16], ["hcam"])
dllFunc(
"pl_cam_get_name",
[int16, OUTSTRING],
["can_num", "cam_name"],
buf_len=CAM_NAME_LEN,
)
dllFunc("pl_cam_get_total", [OUTPUT(int16),], ["total_cams",])
dllFunc(
"pl_cam_open",
[STRING, OUTPUT(int16), int16],
["cam_name", "hcam", "o_mode"],
)
dllFunc(
"pl_cam_register_callback",
[int16, int32, CALLBACK],
["hcam", "event", "Callback"],
)
dllFunc(
"pl_cam_register_callback_ex",
[int16, int32, CALLBACK, ctypes.c_void_p],
["hcam", "event", "Callback", "Context"],
)
dllFunc(
"pl_cam_register_callback_ex2",
[int16, int32, CALLBACK],
["hcam", "event", "Callback"],
)
dllFunc(
"pl_cam_register_callback_ex3",
[int16, int32, CALLBACK, ctypes.c_void_p],
["hcam", "event", "Callback", "Context"],
)
dllFunc(
"pl_cam_deregister_callback", [int16, ctypes.c_void_p], ["hcam", "event"]
)
# Class 1 functions - error handling. Handled in dllFunction.
# Class 2 functions - configuration/setup.
dllFunc(
"pl_get_param",
[int16, uns32, int16, OUTPUT(ctypes.c_void_p)],
["hcam", "param_id", "param_attrib", "param_value"],
)
dllFunc(
"pl_set_param",
[int16, uns32, ctypes.c_void_p],
["hcam", "param_id", "param_value"],
)
dllFunc(
"pl_get_enum_param",
[int16, uns32, uns32, OUTPUT(int32), OUTSTRING, uns32],
["hcam", "param_id", "index", "value", "desc", "length"],
)
dllFunc(
"pl_enum_str_length",
[int16, uns32, uns32, OUTPUT(uns32)],
["hcam", "param_id", "index", "length"],
)
dllFunc("pl_pp_reset", [int16,], ["hcam"])
dllFunc(
"pl_create_smart_stream_struct",
[OUTPUT(smart_stream_type), uns16],
["pSmtStruct", "entries"],
)
dllFunc(
"pl_release_smart_stream_struct",
[ctypes.POINTER(smart_stream_type),],
["pSmtStruct",],
)
dllFunc(
"pl_create_frame_info_struct", [OUTPUT(FRAME_INFO),], ["pNewFrameInfo"]
)
dllFunc(
"pl_release_frame_info_struct",
[ctypes.POINTER(FRAME_INFO),],
["pFrameInfoToDel",],
)
dllFunc("pl_exp_abort", [int16, int16], ["hcam", "cam_state"])
dllFunc(
"pl_exp_setup_seq",
[
int16,
uns16,
uns16,
ctypes.POINTER(rgn_type),
int16,
uns32,
OUTPUT(uns32),
],
[
"hcam",
"exp_total",
"rgn_total",
"rgn_array",
"exp_mode",
"exposure_time",
"exp_bytes",
],
)
dllFunc("pl_exp_start_seq", [int16, ctypes.c_void_p], ["hcam", "pixel_stream"])
dllFunc(
"pl_exp_setup_cont",
[
int16,
uns16,
ctypes.POINTER(rgn_type),
int16,
uns32,
OUTPUT(uns32),
int16,
],
[
"hcam",
"rgn_total",
"rgn_array",
"exp_mode",
"exposure_time",
"exp_bytes",
"buffer_mode",
],
)
dllFunc(
"pl_exp_start_cont",
[int16, ctypes.c_void_p, uns32],
["hcam", "pixel_stream", "size"],
)
dllFunc(
"pl_exp_check_status",
[int16, OUTPUT(int16), OUTPUT(uns32)],
["hcam", "status", "bytes_arrived"],
)
dllFunc(
"pl_exp_check_cont_status",
[int16, OUTPUT(int16), OUTPUT(uns32), OUTPUT(uns32)],
["hcam", "status", "bytes_arrived", "buffer_cnt"],
)
dllFunc(
"pl_exp_check_cont_status_ex",
[
int16,
OUTPUT(int16),
OUTPUT(uns32),
OUTPUT(uns32),
ctypes.POINTER(FRAME_INFO),
],
["hcam", "status", "byte_cnt", "buffer_cnt", "pFrameInfo"],
)
dllFunc(
"pl_exp_get_latest_frame",
[int16, OUTPUT(ctypes.c_void_p)],
["hcam", "frame"],
)
dllFunc(
"pl_exp_get_latest_frame_ex",
[int16, OUTPUT(ctypes.c_void_p), ctypes.POINTER(FRAME_INFO)],
["hcam", "frame", "pFrameInfo"],
)
dllFunc(
"pl_exp_get_oldest_frame",
[int16, OUTPUT(ctypes.c_void_p)],
["hcam", "frame"],
)
dllFunc(
"pl_exp_get_oldest_frame_ex",
[int16, OUTPUT(ctypes.c_void_p), ctypes.POINTER(FRAME_INFO)],
["hcam", "frame", "pFrameInfo"],
)
dllFunc("pl_exp_unlock_oldest_frame", [int16], ["hcam"])
dllFunc("pl_exp_stop_cont", [int16, int16], ["hcam", "cam_state"])
dllFunc("pl_exp_abort", [int16, int16], ["hcam", "cam_state"])
dllFunc(
"pl_exp_finish_seq", [int16, ctypes.c_void_p], ["hcam", "pixel_stream"]
)
# Map ATTR_ enums to the return type for that ATTR.
_attr_map = {
ATTR_ACCESS: uns16,
ATTR_AVAIL: rs_bool,
ATTR_COUNT: uns32,
ATTR_CURRENT: None,
ATTR_DEFAULT: None,
ATTR_INCREMENT: None,
ATTR_MAX: None,
ATTR_MIN: None,
ATTR_TYPE: uns16,
}
# Map TYPE enums to their type.
_typemap = {
TYPE_INT16: int16,
TYPE_INT32: int32,
TYPE_FLT64: flt64,
TYPE_UNS8: uns8,
TYPE_UNS16: uns16,
TYPE_UNS32: uns32,
TYPE_UNS64: ulong64,
TYPE_ENUM: int32, # from SDK documentation
TYPE_BOOLEAN: rs_bool,
TYPE_INT8: int8,
TYPE_CHAR_PTR: ctypes.c_char_p,
TYPE_VOID_PTR: ctypes.c_void_p,
TYPE_VOID_PTR_PTR: ctypes.POINTER(ctypes.c_void_p),
TYPE_INT64: long64,
TYPE_SMART_STREAM_TYPE: smart_stream_type,
TYPE_SMART_STREAM_TYPE_PTR: ctypes.POINTER(smart_stream_type),
TYPE_FLT32: flt32,
}
# Map TYPE enums to the appropriate setting dtype.
_dtypemap = {
TYPE_INT16: "int",
TYPE_INT32: "int",
TYPE_FLT64: "float",
TYPE_UNS8: "int",
TYPE_UNS16: "int",
TYPE_UNS32: "int",
TYPE_UNS64: "int",
TYPE_ENUM: "enum",
TYPE_BOOLEAN: "bool",
TYPE_INT8: "int",
TYPE_CHAR_PTR: "str",
TYPE_VOID_PTR: None,
TYPE_VOID_PTR_PTR: None,
TYPE_INT64: "int",
TYPE_SMART_STREAM_TYPE: None,
TYPE_SMART_STREAM_TYPE_PTR: None,
TYPE_FLT32: "float",
}
# Mapping of param ids to maximum string lengths.
# PARAM_DD_INFO is a variable length string, and its length can be found by
# querying PARAM_DD_INFO_LEN. However, querying PARAM_DD_INFO frequently causes
# a general protection fault in the DLL, regardless of buffer length.
_length_map = {
PARAM_DD_INFO: PARAM_DD_INFO_LENGTH,
PARAM_CHIP_NAME: CCD_NAME_LEN,
PARAM_SYSTEM_NAME: MAX_SYSTEM_NAME_LEN,
PARAM_VENDOR_NAME: MAX_VENDOR_NAME_LEN,
PARAM_PRODUCT_NAME: MAX_PRODUCT_NAME_LEN,
PARAM_CAMERA_PART_NUMBER: MAX_CAM_PART_NUM_LEN,
PARAM_GAIN_NAME: MAX_GAIN_NAME_LEN,
PARAM_HEAD_SER_NUM_ALPHA: MAX_ALPHA_SER_NUM_LEN,
PARAM_PP_FEAT_NAME: MAX_PP_NAME_LEN,
PARAM_PP_PARAM_NAME: MAX_PP_NAME_LEN,
}
# map PARAM enums to the parameter name
_param_to_name = {
globals()[param]: param
for param in globals()
if (param.startswith("PARAM_") and param != "PARAM_NAME_LEN")
}
def get_param_type(param_id):
"""Return parameter type code (for C/DLL) for param_id."""
# Parameter types are encoded in the 4th MSB of the param_id.
return _typemap[param_id >> 24 & 255]
def get_param_dtype(param_id):
"""Return parameter dtype (for microscope settings) for param_id."""
# Parameter types are encoded in the 4th MSB of the param_id.
return _dtypemap[param_id >> 24 & 255]
# Map status codes to strings.
STATUS_STRINGS = {
READOUT_NOT_ACTIVE: "READOUT_NOT_ACTIVE",
EXPOSURE_IN_PROGRESS: "EXPOSURE_IN_PROGRESS",
READOUT_IN_PROGRESS: "READOUT_IN_PROGRESS",
READOUT_COMPLETE: "READOUT_COMPLETE",
READOUT_FAILED: "READOUT_FAILED",
FRAME_AVAILABLE: "FRAME_AVAILABLE",
}
# === Python classes ===
# Trigger modes.
class TriggerMode:
"""A microscope trigger mode using PVCAM PMODES."""
def __init__(self, label, pv_mode):
self.label = label
self.pv_mode = pv_mode
def __repr__(self):
return "<%s: '%s'>" % (type(self).__name__, self.label)
# Enumerate trigger types.
(
TRIG_SOFT,
TRIG_TIMED,
TRIG_VARIABLE,
TRIG_FIRST,
TRIG_STROBED,
TRIG_BULB,
) = range(6)
# Trigger mode definitions.
TRIGGER_MODES = {
TRIG_SOFT: TriggerMode("software", TIMED_MODE),
TRIG_TIMED: TriggerMode("timed", TIMED_MODE),
TRIG_VARIABLE: TriggerMode("variable timed", VARIABLE_TIMED_MODE),
TRIG_FIRST: TriggerMode("trig. first", TRIGGER_FIRST_MODE),
TRIG_STROBED: TriggerMode("strobed", STROBED_MODE),
TRIG_BULB: TriggerMode("bulb", BULB_MODE),
}
PV_MODE_TO_TRIGGER = {
TRIG_SOFT: (microscope.TriggerType.SOFTWARE, microscope.TriggerMode.ONCE),
TRIG_FIRST: (
microscope.TriggerType.RISING_EDGE,
microscope.TriggerMode.ONCE,
),
TRIG_STROBED: (
microscope.TriggerType.RISING_EDGE,
microscope.TriggerMode.STROBE,
),
TRIG_BULB: (
microscope.TriggerType.RISING_EDGE,
microscope.TriggerMode.BULB,
),
}
TRIGGER_TO_PV_MODE = {v: k for k, v in PV_MODE_TO_TRIGGER.items()}
class PVParam:
"""A wrapper around PVCAM parameters."""
@staticmethod
def factory(camera, param_id):
"""Create a PVParam or appropriate subclass"""
# A mapping of pv parameters types to python types.
# None means unsupported.
# Parameters omitted from the mapping will default to PVParam.
__types__ = {
TYPE_SMART_STREAM_TYPE: None,
TYPE_SMART_STREAM_TYPE_PTR: None,
TYPE_VOID_PTR: None,
TYPE_VOID_PTR_PTR: None,
TYPE_ENUM: PVEnumParam,
TYPE_CHAR_PTR: PVStringParam,
}
# Determine the appropiate type from its id.
pvtype = __types__.get(param_id >> 24 & 255, PVParam)
if pvtype is None:
raise microscope.LibraryLoadError(
"Parameter %s not supported" % _param_to_name[param_id]
)
return pvtype(camera, param_id)
def __init__(self, camera, param_id):
# Use a weakref back to the camera to avoid circular dependency.
self.cam = weakref.proxy(camera)
self.param_id = param_id
self.name = _param_to_name[param_id]
self._pvtype = param_id >> 24 & 255
if self.name == "PARAM_READOUT_TIME":
# Bugged. Here is what the SDK says: "The parameter type is
# incorrectly defined. The actual type is TYPE_UNS32."
self._pvtype = TYPE_UNS32
self.dtype = _dtypemap[self._pvtype]
self._ctype = _typemap[self._pvtype]
self.__cache = {}
def set_value(self, new_value):
"""Set a parameter value.
Subclasses should do whatever processing they need on new_value,
then call super().set_value(new_value)"""
try:
ref = ctypes.byref(new_value)
except TypeError:
# Need to convert python type to ctype first.
ref = ctypes.byref(self._ctype(new_value))
_set_param(self.cam.handle, self.param_id, ref)
# Read back the value to update cache.
self._query(force_query=True)
def _query(self, what=ATTR_CURRENT, force_query=False):
"""Query the DLL for an attribute for this parameter.
This returns pythonic types, not ctypes."""
err = None
key = (self, what) # key for cache
if self.cam._acquiring and not force_query:
return self.__cache.get(key, None)
if what == ATTR_AVAIL:
return self.available
elif not self.available:
raise microscope.DeviceError(
"Parameter %s is not available" % self.name
)
rtype = _attr_map[what] # return type
if not rtype:
rtype = _get_param(self.cam.handle, self.param_id, ATTR_TYPE)
if rtype.value == TYPE_CHAR_PTR:
buf_len = _length_map[self.param_id]
if not buf_len:
raise microscope.DeviceError(
"pvcam: parameter %s not supported in python." % self.name
)
try:
result = _get_param(
self.cam.handle, self.param_id, what, buf_len=buf_len
)
except Exception as e:
err = e
else:
result = result.value
else:
try:
result = _get_param(self.cam.handle, self.param_id, what)
except Exception as e:
err = e
else:
result = ctypes.POINTER(self._ctype)(result).contents.value
# Test on err.args prevents indexing into empty tuple.
if err and err.args and err.args[0].startswith("pvcam error 49"):
_logger.warn(
"Parameter %s not available due to camera state.", self.name
)
result = None
elif err:
raise e
else:
self.__cache[key] = result
return result
@property
def access(self):
"""Return parameter access attribute."""
return int(
_get_param(self.cam.handle, self.param_id, ATTR_ACCESS).value
)
@property
def available(self):
"""Return whether or not parameter is available on hardware."""
return bool(
_get_param(self.cam.handle, self.param_id, ATTR_AVAIL).value
)
@property
def count(self):
"""Return count of parameter enum entries."""
return int(
_get_param(self.cam.handle, self.param_id, ATTR_COUNT).value
)
@property
def values(self):
"""Get parameter min and max values.
Subclasses for strings and enum override this."""
return (self._query(ATTR_MIN), self._query(ATTR_MAX))
@property
def current(self):
"""Return the current (or cached) parameter value.
Subclasses should override this for more complex data types."""
return self._query()
class PVEnumParam(PVParam):
"""PVParam subclass for enums"""
def set_value(self, new_value):
"""Set an enum parameter value."""
# We may be passed a value, a description string, or a tuple of
# (value, string).
values, descriptions = list(zip(*self.values.items()))
if hasattr(new_value, "__iter__"):
desc = str(new_value[1])
elif isinstance(new_value, str):
desc = str(new_value)
else:
desc = None
# If we have a description, rely on that, as this avoids any confusion
# of index and value.
if desc and desc in descriptions:
new_index = descriptions.index(desc)
new_value = values[new_index]
elif desc:
raise Exception(
"Could not find description '%s' for enum %s."
% (desc, self.name)
)
super().set_value(new_value)
@property
def current(self):
"""Return the current (or cached) enum parameter value."""
# c_void_p(0) is None, so replace with 0
return int(self._query() or 0)
@property
def values(self):
"""Get allowable enum values"""
values = {}
for i in range(self.count):
length = _enum_str_length(self.cam.handle, self.param_id, i)
value, desc = _get_enum_param(
self.cam.handle, self.param_id, i, length
)
values[value.value] = desc.value.decode()
return values
class PVStringParam(PVParam):
"""PVParam subclass for strings"""
def set_value(self, new_value):
"""Set a string parameter value"""
if hasattr(new_value, "encode"):
new_value = new_value.encode()
super().set_value(new_value)
@property
def current(self):
"""Return the current (or cached) string parameter value."""
return self._query().decode()
@property
def values(self):
"""Get allowable string length."""
values = _length_map[self.param_id] or 0
return values
class PVCamera(
microscope.abc.FloatingDeviceMixin, microscope.abc.Camera,
):
"""Implements the CameraDevice interface for the pvcam library."""
# Keep track of open cameras.
open_cameras = []
def __init__(self, index=0, **kwargs):
super().__init__(index=index, **kwargs)
# Camera name in DLL.
self._pv_name = None
# Camera handle.
self.handle = None
# Sensor shape.
self.shape = (None, None)
# Region of interest.
self.roi = (None, None, None, None)
# Binning setting.
self.binning = microscope.Binning(1, 1)
self._trigger = TRIG_STROBED
self.exposure_time = 0.001 # in seconds
# Cycle time
self.cycle_time = self.exposure_time
# Data buffer.
self._buffer = None
# This devices PVCAM parameters.
self._params = {}
# Circular buffer length.
self._circ_buffer_length = 10
# Add common settings.
self.add_setting(
"exposure time",
"float",
lambda: self.exposure_time,
self.set_exposure_time,
lambda: (1e-6, 1),
)
self.add_setting(
"trigger mode",
"enum",
lambda: self._trigger,
lambda value: setattr(self, "_trigger", value),
{k: v.label for k, v in TRIGGER_MODES.items()},
)
self.add_setting(
"circular buffer length",
"int",
lambda: self._circ_buffer_length,
lambda value: setattr(self, "_circ_buffer_length", value),
(2, 100),
)
self.initialize()
@property
def _region(self):
"""Return a rgn_type for current roi and binning settings."""
return rgn_type(
self.roi.left,
self.roi.left + self.roi.width - 1,
self.binning.h,
self.roi.top,
self.roi.top + self.roi.height - 1,
self.binning.v,
)
"""Private methods, called here and within super classes."""
def _fetch_data(self):
"""Fetch data - for use in fetch_loop."""
# Not used: images fetched using callback.
return None
def _do_enable(self):
"""Enable the camera hardware and make ready to respond to triggers.
Return True if successful, False if not."""
# Set exposure time resolution on camera and determine t_exp, the
# integer value used to set exposure time on the hardware later.
if self.exposure_time < 1e-3:
self._params[PARAM_EXP_RES].set_value(EXP_RES_ONE_MICROSEC)
t_exp = int(self.exposure_time * 1e6)
else:
self._params[PARAM_EXP_RES].set_value(EXP_RES_ONE_MILLISEC)
t_exp = int(self.exposure_time * 1e3)
# Determine the data type of the buffer
# Kinetix has an 8 bit mode, may need more options for colour
# cameras.
buffer_dtype = "uint16"
if self._params[PARAM_BIT_DEPTH].current == 8:
buffer_dtype = "uint8"
# Configure camera, allocate buffer, and register callback.
if self._trigger == TRIG_SOFT:
# Software triggering for single frames.
# Set up callback.
self._using_callback = True
def cb():
"""Soft trigger mode end-of-frame callback."""
timestamp = time.time()
frame = self._buffer.copy()
_logger.debug("Fetched single frame.")
_exp_finish_seq(self.handle, CCS_CLEAR)
self._put(frame, timestamp)
return
# Need to keep a reference to the callback.
self._eof_callback = CALLBACK(cb)
_cam_register_callback(
self.handle, PL_CALLBACK_EOF, self._eof_callback
)
nbytes = _exp_setup_seq(
self.handle,
1,
1, # cam, num epxosures, num regions
self._region,
TRIGGER_MODES[self._trigger].pv_mode,
t_exp,
)
buffer_shape = (
self.roi.height // self.binning.v,
self.roi.width // self.binning.h,
)
self._buffer = np.require(
np.zeros(buffer_shape, dtype=buffer_dtype),
requirements=["C_CONTIGUOUS", "ALIGNED", "OWNDATA"],
)
else:
# Use a circular buffer.
self._using_callback = True
# Determine the data type of the frame
frame_type = uns16
if buffer_dtype == "uint8":
frame_type = uns8
def cb():
"""Circular buffer mode end-of-frame callback."""
timestamp = time.time()
frame_p = ctypes.cast(
_exp_get_latest_frame(self.handle),
ctypes.POINTER(frame_type),
)
frame = np.ctypeslib.as_array(
frame_p, (self.roi[2], self.roi[3])
).copy()
_logger.debug("Fetched frame from circular buffer.")
self._put(frame, timestamp)
return
# Need to keep a reference to the callback.
self._eof_callback = CALLBACK(cb)
_cam_register_callback(
self.handle, PL_CALLBACK_EOF, self._eof_callback
)
buffer_shape = (
self._circ_buffer_length,
self.roi.height // self.binning.v,
self.roi.width // self.binning.h,
)
self._buffer = np.require(
np.zeros(buffer_shape, dtype=buffer_dtype),
requirements=["C_CONTIGUOUS", "ALIGNED", "OWNDATA"],
)
nbytes = _exp_setup_cont(
self.handle,
1,
self._region,
TRIGGER_MODES[self._trigger].pv_mode,
t_exp,
CIRC_OVERWRITE,
).value
# Read back exposure time.
t_readback = self._params[PARAM_EXPOSURE_TIME].current
t_resolution = self._params[PARAM_EXP_RES].current
multipliers = {
EXP_RES_ONE_SEC: 1.0,
EXP_RES_ONE_MILLISEC: 1e-3,
EXP_RES_ONE_MICROSEC: 1e-6,
}
if isinstance(t_resolution, tuple):
self.exposure_time = t_readback * multipliers[t_resolution[0]]
else:
self.exposure_time = t_readback * multipliers[t_resolution]
# Update cycle time. Exposure time in seconds; readout time in microseconds.
self.cycle_time = (
self.exposure_time
+ 1e-6 * self._params[PARAM_READOUT_TIME].current
)
# Set up exposure time for VARIABLE_TIMED_MODE, as according to documentation.
# It doesn't seem to work.
if self._trigger == TRIG_VARIABLE:
self._params[PARAM_EXP_TIME].set_value(t_exp)
# If using real triggering, start triggered acquisition.
# (Software triggering will start acquisition in soft_trigger().)
if self._trigger != TRIG_SOFT:
_exp_start_cont(
self.handle,
self._buffer.ctypes.data_as(ctypes.c_void_p),
self._buffer.nbytes,
)
# Done.
self._acquiring = True
return self._acquiring
def _do_disable(self):
"""Disable the hardware for a short period of inactivity."""
self.abort()
_cam_deregister_callback(self.handle, PL_CALLBACK_EOF)
def _do_shutdown(self) -> None:
"""Disable the hardware for a prolonged period of inactivity."""
self.abort()
_cam_close(self.handle)
PVCamera.open_cameras.remove(self.handle)
if not PVCamera.open_cameras:
_logger.info("No more open cameras - calling _pvcam_uninit.")
_pvcam_uninit()
"""Private shape-related methods. These methods do not need to account
for camera orientation or transforms due to readout mode, as that
is handled in the parent class."""
def _get_sensor_shape(self):
"""Return the sensor shape (width, height)."""
return self.shape
def _get_binning(self):
"""Return the current binning (horizontal, vertical)."""
return self.binning
@microscope.abc.keep_acquiring
def _set_binning(self, binning):
"""Set binning to (h, v)."""
# The keep_acquiring decorator will cause recreation of buffers.
self.binning = microscope.Binning(binning.h, binning.v)
def _get_roi(self):
"""Return the current ROI (left, top, width, height)."""
return self.roi
@microscope.abc.keep_acquiring
def _set_roi(self, roi):
"""Set the ROI to (left, tip, width, height)."""
right = roi.left + roi.width
bottom = roi.top + roi.height
if (right, bottom) > self.shape:
raise ValueError("ROI exceeds sensor area.")
self.roi = roi
"""Public methods, callable from client."""
def get_id(self):
"""Get hardware's unique identifier."""
return self._params[PARAM_HEAD_SER_NUM_ALPHA].current
def abort(self):
"""Abort acquisition.
This should put the camera into a state in which settings can
be modified."""
if self._trigger == TRIG_SOFT:
_exp_finish_seq(self.handle, CCS_CLEAR)
else:
_exp_stop_cont(self.handle, CCS_CLEAR)
_exp_abort(self.handle, CCS_HALT)
self._acquiring = False
def initialize(self):
"""Initialise the camera."""
# Init the DLL if necessary.
if not PVCamera.open_cameras:
try:
_pvcam_init()
except:
pass
# If no cameras detected, need to deinit DLL so it can be reinited to update count.
if _cam_get_total().value == 0:
_pvcam_uninit()
raise microscope.InitialiseError("No cameras detected.")
# Connect to the camera.
_logger.info("DLL version: %s", _pvcam_get_ver().value)
self._pv_name = _cam_get_name(self._index).value
_logger.info("Initializing %s", self._pv_name)
self.handle = _cam_open(self._pv_name, OPEN_EXCLUSIVE)
PVCamera.open_cameras.append(self.handle)
# Set up event callbacks. Tried to use the resume callback to reinit camera
# after power loss, but any attempt to close/reopen the camera or deinit the
# DLL throws a Windows Error 0xE06D7363.
def _cb(event):
_logger.info("Received %s event.", event)
if event == "removed":
_logger.critical("Can not re-init hardware. Exiting.")
exit(-1)
return
self._cbs = {
"check": CALLBACK(lambda: _cb("check")),
"resumed": CALLBACK(lambda: _cb("resumed")),
"removed": CALLBACK(lambda: _cb("removed")),
}
_cam_register_callback(
self.handle, PL_CALLBACK_CHECK_CAMS, self._cbs["check"]
)
_cam_register_callback(
self.handle, PL_CALLBACK_CAM_REMOVED, self._cbs["removed"]
)
_cam_register_callback(
self.handle, PL_CALLBACK_CAM_RESUMED, self._cbs["resumed"]
)
# Repopulate _params.
self._params = {}
for (param_id, name) in _param_to_name.items():
try:
p = PVParam.factory(self, param_id)
except:
_logger.warn("Skipping unsupported parameter %s.", name)
continue
if not p.dtype or not p.available:
continue
self._params[param_id] = p
name = name[6:]
try:
p.current
except KeyError:
# Raise these here, as the message is a tuple, not a str.
raise
except Exception as err:
# Test on err.args prevents indexing into empty tuple.
if err.args and not err.args[0].startswith("pvcam error 49"):
_logger.warn(
"Skipping parameter %s: not supported" " in python.",
p.name,
)
continue
self.add_setting(
p.name,
p.dtype,
lambda p=p: p.current,
p.set_value
if p.access in [ACC_READ_WRITE, ACC_WRITE_ONLY]
else None,
lambda p=p: p.values,
)
if PARAM_GAIN_MULT_FACTOR in self._params:
self.add_setting(
"gain",
self._params[PARAM_GAIN_MULT_FACTOR].dtype,
lambda: self._params[PARAM_GAIN_MULT_FACTOR].current,
self._params[PARAM_GAIN_MULT_FACTOR].set_value,
self._params[PARAM_GAIN_MULT_FACTOR].values,
)
if PARAM_PMODE in self._params:
self.add_setting(
"frame transfer mode",
self._params[PARAM_PMODE].dtype,
lambda: self._params[PARAM_PMODE].current,
self._params[PARAM_PMODE].set_value,
self._params[PARAM_PMODE].values,
)
self.shape = (
self._params[PARAM_PAR_SIZE].current,
self._params[PARAM_SER_SIZE].current,
)
self.roi = microscope.ROI(0, 0, self.shape[0], self.shape[1])
# Populate readout modes by iterating over readout ports and speed
# table entries.
ro_ports = self._params[PARAM_READOUT_PORT].values
self._readout_modes = []
self._readout_mode_parameters = []
for i, port in ro_ports.items():
self._params[PARAM_READOUT_PORT].set_value(i)
ro_speeds = self._params[PARAM_SPDTAB_INDEX].values
for j in range(ro_speeds[0], ro_speeds[1] + 1):
self._params[PARAM_SPDTAB_INDEX].set_value(j)
bit_depth = self._params[PARAM_BIT_DEPTH].current
freq = 1e9 / self._params[PARAM_PIX_TIME].current
if freq > 1e6:
freq *= 1e-6
prefix = "M"
elif freq > 1e3:
freq *= 1e-3
prefix = "k"
else:
prefix = "Hz"
mode_str = "%s, %s-bit, %s %sHz" % (
port,
bit_depth,
freq,
prefix,
)
self._readout_modes.append(mode_str)
self._readout_mode_parameters.append(
{"port": i, "spdtab_index": j}
)
# Set to default mode.
self.set_readout_mode(0)
self._params[PARAM_CLEAR_MODE].set_value(CLEAR_PRE_EXPOSURE_POST_SEQ)
@microscope.abc.keep_acquiring
def set_readout_mode(self, index):
"""Set the readout mode and transform."""
params = self._readout_mode_parameters[index]
self._params[PARAM_READOUT_PORT].set_value(params["port"])
self._params[PARAM_SPDTAB_INDEX].set_value(params["spdtab_index"])
self._readout_mode = index
# Update transforms, if available.
chip = self._params[PARAM_CHIP_NAME].current
new_readout_transform = None
readout_map = READOUT_TRANSFORMS.get(chip, None)
if readout_map:
new_readout_transform = readout_map.get(params["port"], None)
if new_readout_transform:
self._set_readout_transform(new_readout_transform)
@microscope.abc.keep_acquiring
def set_exposure_time(self, value):
"""Set the exposure time to value."""
self.exposure_time = value
def get_exposure_time(self):
"""Return the current exposure time.
Just return self.exposure_time, which is updated with the real
value during _do_enable."""
return self.exposure_time
def get_cycle_time(self):
"""Return the cycle time.
Just return self.cycle_time, which is updated with the real
value during _do_enable."""
return self.cycle_time
@Pyro4.oneway
def soft_trigger(self):
"""Expose software triggering to a client.
Deprecated, use trigger().
Trigger an exposure in TRIG_SOFT mode.
Log some debugging stats in other trigger modes."""
if self._trigger == TRIG_SOFT:
_logger.debug("Received soft trigger ...")
_exp_start_seq(
self.handle, self._buffer.ctypes.data_as(ctypes.c_void_p)
)
else:
cstatus, cbytes, cframes = _exp_check_cont_status(self.handle)
status, bytes = _exp_check_status(self.handle)
_logger.debug(
"Status checks\n"
"check_cont: %s \t bytes: %d\tframes: %d\n"
"check_status: %s \t bytes: %d\t",
STATUS_STRINGS[cstatus.value],
cbytes.value,
cframes.value,
STATUS_STRINGS[status.value],
bytes.value,
)
return
@property
def trigger_mode(self) -> microscope.TriggerMode:
return PV_MODE_TO_TRIGGER[self._trigger][1]
@property
def trigger_type(self) -> microscope.TriggerType:
return PV_MODE_TO_TRIGGER[self._trigger][0]
def set_trigger(
self, ttype: microscope.TriggerType, tmode: microscope.TriggerMode
) -> None:
try:
self._trigger = TRIGGER_TO_PV_MODE[(ttype, tmode)]
except KeyError:
raise microscope.UnsupportedFeatureError(
"no PVCam mode for %s and %s" % (ttype, tmode)
)
def _do_trigger(self) -> None:
_exp_start_seq(
self.handle, self._buffer.ctypes.data_as(ctypes.c_void_p)
)
|
carandraug/microscope
|
microscope/cameras/pvcam.py
|
Python
|
gpl-3.0
| 59,214
|
import time
from flask import request
from mock import patch
from tests import BaseTestCase
from redash import models, settings
from redash.authentication import (api_key_load_user_from_request,
get_login_url, hmac_load_user_from_request,
sign)
from redash.authentication.google_oauth import (create_and_login_user,
verify_profile)
class TestApiKeyAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestApiKeyAuthentication, self).setUp()
self.api_key = '10'
self.query = self.factory.create_query(api_key=self.api_key)
models.db.session.flush()
self.query_url = '/{}/api/queries/{}'.format(self.factory.org.slug, self.query.id)
self.queries_url = '/{}/api/queries'.format(self.factory.org.slug)
def test_no_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url)
self.assertIsNone(api_key_load_user_from_request(request))
def test_wrong_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, query_string={'api_key': 'whatever'})
self.assertIsNone(api_key_load_user_from_request(request))
def test_correct_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, query_string={'api_key': self.api_key})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_no_query_id(self):
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': self.api_key})
self.assertIsNone(api_key_load_user_from_request(request))
def test_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
models.db.session.flush()
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
def test_api_key_header(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(self.api_key)})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_api_key_header_with_wrong_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key oops"})
self.assertIsNone(api_key_load_user_from_request(request))
def test_api_key_for_wrong_org(self):
other_user = self.factory.create_admin(org=self.factory.create_org())
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(other_user.api_key)})
self.assertEqual(404, rv.status_code)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = '10'
self.query = self.factory.create_query(api_key=self.api_key)
models.db.session.flush()
self.path = '/{}/api/queries/{}'.format(self.query.org.slug, self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_no_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with self.app.test_client() as c:
rv = c.get('/{}/api/queries'.format(self.query.org.slug), query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
path = '/api/queries/'
models.db.session.flush()
signature = sign(user.api_key, path, self.expires)
with self.app.test_client() as c:
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):
user = self.factory.create_user(email='test@example.com')
with patch('redash.authentication.google_oauth.login_user') as login_user_mock:
create_and_login_user(self.factory.org, user.name, user.email)
login_user_mock.assert_called_once_with(user, remember=True)
def test_creates_vaild_new_user(self):
email = 'test@example.com'
name = 'Test User'
with patch('redash.authentication.google_oauth.login_user') as login_user_mock:
create_and_login_user(self.factory.org, name, email)
self.assertTrue(login_user_mock.called)
user = models.User.query.filter(models.User.email == email).one()
self.assertEqual(user.email, email)
def test_updates_user_name(self):
user = self.factory.create_user(email='test@example.com')
with patch('redash.authentication.google_oauth.login_user') as login_user_mock:
create_and_login_user(self.factory.org, "New Name", user.email)
login_user_mock.assert_called_once_with(user, remember=True)
class TestVerifyProfile(BaseTestCase):
def test_no_domain_allowed_for_org(self):
profile = dict(email='arik@example.com')
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_not_in_org_domains_list(self):
profile = dict(email='arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_in_org_domains_list(self):
profile = dict(email='arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org', 'example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
def test_org_in_public_mode_accepts_any_domain(self):
profile = dict(email='arik@example.com')
self.factory.org.settings[models.Organization.SETTING_IS_PUBLIC] = True
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = []
self.assertTrue(verify_profile(self.factory.org, profile))
def test_user_not_in_domain_but_account_exists(self):
profile = dict(email='arik@example.com')
self.factory.create_user(email='arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.assertTrue(verify_profile(self.factory.org, profile))
class TestGetLoginUrl(BaseTestCase):
def test_when_multi_org_enabled_and_org_exists(self):
with self.app.test_request_context('/{}/'.format(self.factory.org.slug)):
self.assertEqual(get_login_url(next=None), '/{}/login'.format(self.factory.org.slug))
def test_when_multi_org_enabled_and_org_doesnt_exist(self):
with self.app.test_request_context('/{}_notexists/'.format(self.factory.org.slug)):
self.assertEqual(get_login_url(next=None), '/')
|
EverlyWell/redash
|
tests/test_authentication.py
|
Python
|
bsd-2-clause
| 8,273
|
from io import BytesIO
import logging
logger = logging.getLogger(__name__)
class CorruptDataError(Exception):
pass
class LZWDecoder:
def __init__(self, fp):
self.fp = fp
self.buff = 0
self.bpos = 8
self.nbits = 9
self.table = None
self.prevbuf = None
return
def readbits(self, bits):
v = 0
while 1:
# the number of remaining bits we can get from the current buffer.
r = 8-self.bpos
if bits <= r:
# |-----8-bits-----|
# |-bpos-|-bits-| |
# | |----r----|
v = (v << bits) | ((self.buff >> (r-bits)) & ((1 << bits)-1))
self.bpos += bits
break
else:
# |-----8-bits-----|
# |-bpos-|---bits----...
# | |----r----|
v = (v << r) | (self.buff & ((1 << r)-1))
bits -= r
x = self.fp.read(1)
if not x:
raise EOFError
self.buff = ord(x)
self.bpos = 0
return v
def feed(self, code):
x = b''
if code == 256:
self.table = [bytes((c,)) for c in range(256)] # 0-255
self.table.append(None) # 256
self.table.append(None) # 257
self.prevbuf = b''
self.nbits = 9
elif code == 257:
pass
elif not self.prevbuf:
x = self.prevbuf = self.table[code]
else:
if code < len(self.table):
x = self.table[code]
self.table.append(self.prevbuf+x[:1])
elif code == len(self.table):
self.table.append(self.prevbuf+self.prevbuf[:1])
x = self.table[code]
else:
raise CorruptDataError
table_length = len(self.table)
if table_length == 511:
self.nbits = 10
elif table_length == 1023:
self.nbits = 11
elif table_length == 2047:
self.nbits = 12
self.prevbuf = x
return x
def run(self):
while 1:
try:
code = self.readbits(self.nbits)
except EOFError:
break
try:
x = self.feed(code)
except CorruptDataError:
# just ignore corrupt data and stop yielding there
break
yield x
logger.debug('nbits=%d, code=%d, output=%r, table=%r'
% (self.nbits, code, x, self.table[258:]))
return
def lzwdecode(data):
fp = BytesIO(data)
s = LZWDecoder(fp).run()
return b''.join(s)
|
goulu/pdfminer
|
pdfminer/lzw.py
|
Python
|
mit
| 2,810
|
#!/usr/bin/python3
import cherrypy, json, csv, re, time, datetime, uuid, os, sys,string, subprocess, arrow
import logging
from collections import OrderedDict
from database import *
# TODO: Remove this from model & keep in view..
from jinja2 import Environment, PackageLoader
# TODO: Security check fid variable
# TODO: Add list of 'registered devices' to config
# TODO: Swap apikeys if new marked is created with the same one..
class Model:
# Create a database object for us to use
def __init__(self):
dbstruct = self.database_structure()
self.db = Database(cherrypy.config['dbfile'], dbstruct, ignore='locals')
self.dbfields = self.db.keys
self.kwargs = {}
# Return a nicely formated dict of the db fields
def grab_dbfields(self):
return self.dbfields
# Construct 'raw' posted data structure where fid=UniqueDirectoryName
def database_structure(self):
dbstruct = OrderedDict([
('nodes', [
('nid', 'INTEGER PRIMARY KEY'),
('apikey', 'TEXT unique'),
('created', 'INTEGER'),
('createdhuman', 'DATETIME DEFAULT CURRENT_TIMESTAMP'),
('updated', 'INTEGER'),
('title', 'TEXT'),
('csvfile','TEXT'),
('description', 'TEXT'),
('datatype','TEXT'),
('lat','REAL'),
('lon','REAL'),
('fuzzylatlon', 'TEXT'),
('tags','TEXT'),
('createdby','INTEGER'),
('submissiondata','JSON'),
('latest','JSON'),
('visible','INTEGER'),
]),
# A place to store csvs
('csvs', [
('cid', 'INTEGER PRIMARY KEY'),
('nid', 'INTEGER'),
('created', 'INTEGER'),
('header', 'TEXT'),
('timestamp', 'INTEGER'),
('csv', 'TEXT')
]),
# A place to store annotations
('annotations', [
('aid', 'INTEGER PRIMARY KEY'),
('nid', 'INTEGER'),
('uid', 'INTEGER'),
('timestamp', 'INTEGER'),
('text', 'TEXT'),
('created', 'INTEGER')
]),
# This isn't created in the database, its just used for internal var storage
# TODO: Pos get rid of this as its setup in the controller
('locals',{
'info':{},
'path':[],
'body':'',
'filestosave':[],
'submitted':{},
'errors':{},
'success':{},
'altresponse':''
})
])
return dbstruct
def grab_opensessions(self):
return len(cherrypy.config['session'])
# Parse POSTED data and determine what we need to do with it
def parse_submission(self, data):
# Grab the submitted data and convert it to JSON with all commas escaped to ,
jsondump = json.dumps(data['submitted'])
data['info']['submissiondata'] = jsondump.replace(',', ',')
# Check what/who submitted this data, parse it, then fill in our data structure
plugins=(AnnotationSubmission, UploadformSubmission, SpecGatewaySubmission, CitizenSenseKitSubmission)
for plugin in plugins:
obj = plugin()
parsedoutput = obj.checksubmission(self, data)
if parsedoutput is not False: break
# Save the number of open sessions
data['nsessions'] = self.grab_opensessions()
# Prepare for json response
if parsedoutput:
# TODO: Should get rid of this and just return the response from the
# modules... they should decide to pop or not!
data.pop('filestosave', None)
data.pop('submitted', None)
# Check if we return a custom repsponse
if data['altresponse'] is not '':
return data['altresponse']
# If not, the check for errors
elif len(parsedoutput['errors']) <= 0:
# All looks OK
# TODO: SHould move this to the modules...
data['success']['code'] = 'OK'
data['success']['msg'] = 'A new node has been created'
else:
# The submission hasn't been recognised
data.pop('filestosave', None)
data.pop('info', None)
data.pop("success", None)
data['errors']['form'] = 'Post structure not recognised'
return json.dumps(data)
# CREATE A NEW ANNOTATION
def create_annotation(self, nid, uid, timestamp, text):
# And construct the ordered dict ready for the database
created = int(time.time()) # ALTER TABLE annotations ADD COLUMN created INT;
newannotation = OrderedDict([
('fieldnames',['nid', 'uid', 'timestamp', 'text', 'created']),
('values',[[nid, uid, timestamp, text, created]])
])
aid = self.db.create('annotations', newannotation)
response = {}
if aid == None or aid == False:
response['code'] = 'KO'
response['msg'] = 'Could not create annotation. DB Error: {0}'.format(self.db.msg)
else:
response['code'] = 'OK'
response['msg'] = 'Annotation #{0} has been saved'.format(aid)
response['timestamp'] = timestamp;
response['aid'] = aid;
return response
# UPDATE AN EXISTING ANNOTATION
def update_annotation(self, aid, data):
resp = {'code':'KO', 'msg':'', 'aid':aid}
# Lets check we have valid variables
try:
text = data['annotation']
user = data['username']
passwd = data['password']
sessionid = data['sessionid']
except:
resp['msg'] = 'Error: Can\'t find one of:\n annotation OR username OR password OR sessionid'
return resp
# Then check if we have permission to update
user = self.validuser(user, passwd, sessionid)
if user:
uid = user['uid']
resp['sessionid'] = user['sessionid']
else:
resp['msg'] = 'This username/password combination has not been recognised. Or you may have been automatically logged out. Please try again.'
resp['sessionid'] = ''
return resp
# And check if this specific user can update this specific annotation
anno = self.view_annotation(aid)
if anno['uid'] is not user['uid'] and user['permissions'] is not 'admin':
resp['msg'] = 'Sorry. this user cannot update this annotation.'
return resp
# Then finaly make the database call
toupdate = {'text':text}
dbresp = self.db.update('annotations', 'aid', aid, toupdate)
if dbresp:
resp['code'] = 'OK'
resp['msg'] = 'Success! The annotation has been updated.'
else:
resp['code'] = 'KO'
resp['msg'] = 'Database Error: '.format(self.db.msg)
return resp
# DELETE AN ANNOTATION
def delete_annotation(self, aid, data):
resp = {'code':'KO','msg':''}
user = self.validuser(data['username'], data['password'], data['sessionid'] )
# Check if this user can delete this node
if user == False:
resp['msg'] = 'User not recognised. Try refreshing the page.'
candelete = False
elif user['permissions'] is 'admin':
candelete = True
# Check if the user owns the marker
else:
searchfor = {'uid':user['uid'], 'aid':aid}
intable = 'annotations'
returnfields = ['aid']
anno = self.db.searchfor(intable, returnfields, searchfor)
if anno is not None:
candelete = True
else:
response['msg'] = 'This annotation was created by another user. You do not have permission to delete it.'
return response
# Ok can we now delete please?
if candelete:
db = self.db.dbquery('DELETE FROM annotations WHERE aid={}'.format(int(aid)) )
if db is not False:
resp['code'] = 'OK'
resp['msg'] = 'Deleted annotation'
else:
resp['msg'] = 'Database Error:\n {}'.format(self.db.msg)
# Update the database
return resp
# View a single annotation
def view_annotation(self, aid):
fields = ['aid', 'uid']
searchfor = {'aid':aid}
intable = 'annotations'
returnfields = ['aid', 'uid']
row = self.db.searchfor(intable, returnfields, searchfor)
resp = {}
if row:
# TODO: This shoudl be moved into database.py
resp['aid'] = row[0]
resp['uid'] = row[1]
else:
resp = row
return resp
# CREATE A NEW NODE
def create_node(self, data):
# As we are creating a single node lets create seperate field and value lists
fieldlist = []
valuelist = []
for key in data['info']:
fieldlist.append(key)
valuelist.append(data['info'][key])
# And construct the ordered dict ready for the database
newnode = OrderedDict([
('fieldnames',fieldlist),
('values',[valuelist])
])
nid = self.db.create('nodes', newnode)
logging.info('Attempted to create new marker: {}'.format(self.db.msg))
if nid == None or False: data['errors']['dbcreatenode'] = 'Database could not create node'
else: data['info']['nid'] = nid
return data
# RETURN A LIST OF ALL NODES WITH TITLE AND GPS
def view_all(self, qry=''):
fields = ['nid', 'lat', 'lon', 'title', 'visible', 'datatype', 'latest', 'created', 'updated']
qry = ' WHERE visible=1 {}'.format(qry)
jsondisplay = self.db.readasjson('nodes', fields, [], qry)
if jsondisplay:
return jsondisplay
else:
return '{}'
# VIEW AN INDIVIDUAL NODE
# TODO: This is inefficiant - no need to convert in/out of json...
def view_node(self, nid):
fields = ['datatype', 'apikey', 'title', 'description', 'lat', 'lon',
'createdhuman', 'updated',
'latest', 'nid', 'createdby']
jsonstr = self.db.readasjson('nodes', fields, [int(nid)])
if jsonstr:
data = json.loads(jsonstr)
node = data[0]
# Now bring back some actual data!
searchfor = {'nid':nid}
intable = 'csvs'
returnfields = ['created', 'csv','header']
sql = 'ORDER BY created DESC LIMIT 5'
rows = self.db.searchfor(intable, returnfields, searchfor, sql, 'many')
node['data'] = json.dumps(rows)
return json.dumps(node)
else:
return '{}'
# VIEW AN INDIVIDUAL as an HTML table
def view_node_html(self, nid):
# TODO: Move view code out of model
ENV = Environment(loader=PackageLoader('controllers', 'templates'))
template = ENV.get_template('data.html')
# Setup some base variables
ENV = Environment(loader=PackageLoader('controllers', 'templates'))
fields = ['datatype', 'apikey', 'title', 'description', 'lat', 'lon',
'createdhuman', 'updated', 'latest', 'nid', 'createdby']
timeadj = int(self.kwargs['timeadj']) if 'timeadj' in self.kwargs else 0
timeadjcalc = (timeadj*60)*60 # Timestamp adjustment for local time
count = int(self.kwargs['count']) if 'count' in self.kwargs else 3000
if count > 12000: count = 12000
countfrom = int(self.kwargs['from']) if 'from' in self.kwargs else 0
if countfrom < 0: countfrom = 0
# Now make the query
#try:
jsonstr = self.db.readasjson('nodes', fields, [int(nid)])
if jsonstr:
data = json.loads(jsonstr)
node = data[0]
graph = []
# Speck data display
if node['datatype'] == 'speck':
keyarr = ['timestamp', 'raw', 'concentration', 'humidity']
graph = {'humidity':'humidity','concentration':'particles'}
# Frackbox display
elif node['datatype'] == 'frackbox':
graph = {' NOppb':'NOppb', ' O3ppb':'O3ppb',' NO2ppb':'NO2ppb',' PIDppm':'PIDppm'}
keyarr = node['latest']['csvheader'].split(',')
# Observation
else:
node['title'] = 'Observation:<br /> {}'.format(node['title'])
graph = {}
keyarr = []
if 'name' in node['latest']:
node['title'] = '{} [{}]'.format(node['title'], node['latest']['name'])
header = '<h2>{}: Created {}</h2><p>{}</p><hr />'.format(node['title'], node['createdhuman'], node['description'])
# Now bring back some actual data!
searchfor = {'nid':nid}
intable = 'csvs'
returnfields = ['timestamp', 'csv']
sql = 'ORDER BY timestamp DESC LIMIT {}, {}'.format(countfrom, count)
rows = self.db.searchfor(intable, returnfields, searchfor, sql, 'many')
# And grab a list of annotations: TODO: Think about limits i.e. sql = 'ORDER BY timestamp DESC LIMIT {}, {}'.format(countfrom, count)
sql = 'ORDER BY timestamp DESC '
annotations = self.db.searchfor('annotations', ['aid','timestamp','text', 'nid'], {'nid':nid}, sql, 'many')
i=0
try:
for ano in annotations:
timestamp = int(ano[1]+timeadjcalc)
mydate = datetime.datetime.fromtimestamp(timestamp).strftime('%d %b %Y %H:%M:%S ({}GMT)'.format(timeadj))
annotations[i] = (ano[0], ano[1], ano[2], mydate, ano[3])
i+=1
annotationsjson = json.dumps(annotations)
except:
annotationsjson = "[]"
# And prep vars used to format the output
table = '<table class="whitetable"><tr><th>'
table += '</th><th>'.join(keyarr)+'</th></tr>\n\n\n'
starttime = ''
rowdatetime = ''
# Make a record of the position of the keys
graphpos = {}
for item in graph:
key = item
mapname = graph[item].replace(' ', '')
for position, findkey in enumerate(keyarr):
if findkey == key:
graphpos[key] = {'position':position,'color':"'#000'", 'data':[], 'name':"'{}'".format(mapname)}
i = 0
# Now loop through the data and generate data and json. The array is reversed to enable to graph to display
for row in reversed(rows):
vals = row[1].split(',')
# Create a timestamp
timestamp = int(vals[0]) #+timeadjcalc
time = arrow.get(timestamp)
local = time.to('US/Central')
rowdatetime = local.format('YYYY-MM-DD HH:mm:ss')
vals[0] = rowdatetime
if i == 0: starttime = rowdatetime
# Prep the js
for key in graph:
n = graphpos[key]['position']
val = vals[n]
if val.replace(' ', '') is not '':
graphpos[key]['data'].append( {'x':timestamp, 'y':val} )
# Prep the HTML
line = '<tr><td>'
line += '</td><td>'.join(vals)
line += '</td></tr>'
table += line
i += 1
# Now prep the final output
data = []
for item in graphpos: data.append(graphpos[item])
jsondata = json.dumps(data)
jsdata = jsondata.replace('"', '') # Javscript formated data
jsdata = jsdata.replace(' ', '')
table += '</table>'
prevcount = countfrom-count
nextlink = '<a class="prevnext" href="/api/viewhtml/{}/?count={}&from={}&timeadj={}">Next»</a>'.format(nid, count, countfrom+count, timeadj)
if prevcount <= 0:
prevcount=0
prevlink = ''
if countfrom > 0:
prevlink = '<a class="prevnext" href="/api/viewhtml/{}/?count={}&from={}&timeadj={}">«Previous</a>'.format(nid, count, prevcount, timeadj)
header += '<strong>{}</strong> | <strong>View</strong> {} Points <strong> From:</strong> {} <strong>To:</strong> {} | <strong>{}</strong>'.format(prevlink, count, rowdatetime, starttime, nextlink)
templatevars = {'nid':nid,'table':table, 'header':header, 'jsdata':jsdata, 'timeadj':timeadj, 'annotationsjson':annotationsjson, 'annotations':annotations}
return template.render(var=templatevars)
else:
return 'No data'
#except Exception as e:
# return 'error: '+str(e)
# UPDATE SPECIFIED FIELDS OF A NODE
def update_node(self, nid, fieldsnvalues):
update = self.db.update('nodes', 'nid', int(nid), fieldsnvalues)
if update:return True
else: return False
# DELETE A NODE
def delete_node(self, response, nid, user, password):
candelete = False
# This username/password combination is valid
user = self.validuser(user, password)
if user == False:
response['errors']['failed'] = 'This username/password combination has not been recognised'
return response
# Check if this user can delete this node
if user['permissions'] is 'admin':
candelete = True
# Check if the user owns the marker
else:
searchfor = {'createdby':user['uid'], 'nid':nid}
intable = 'nodes'
returnfields = ['nid', 'createdby']
node = self.db.searchfor(intable, returnfields, searchfor)
if node is not None:
candelete = True
else:
response['errors']['failed'] = 'This marker was created by another user. You do not have permission to delete it.'
return response
# Ok can we now delete please?
if candelete:
# Update the database
table = 'nodes'
idname = 'nid'
idval= nid
fieldnvalues = {'visible':0}
if self.db.update(table, idname, idval, fieldnvalues):
response['success']['completed'] = 'Marker has been deleted'
else:
response['errors']['failed'] = 'Error: Failed to delete marker'
return response
# MANAGE USER SESSIONS
# TODO: Tidy up!! Very messy at the moment
# TODO: Cleanup old session ids
def validuser(self, uname='', pwd='', sid=''):
msg = "\n==== validuser() ======="
loggedin = False
try:
# First check if there are old session id's that need to be deleted
timeout = 60*60 # How many seconds to keep people logged in
currentime = int(time.time())
todelete = []
for sessiondel in cherrypy.config['session']:
loggedinuser = cherrypy.config['session'][sessiondel]['username']
lastused = cherrypy.config['session'][sessiondel]['lastused']
sesslen = currentime - lastused
msg += "\n{} last logged in {} secs ago".format(loggedinuser, sesslen)
if sesslen >= timeout:
msg += "\nWhich is more than {} secs ago so it has been deleted".format(timeout)
todelete.append(sessiondel)
# Delete any old sessions
with cherrypy.config['sessionlock']:
for i in todelete:
del cherrypy.config['session'][i]
# Check if a session id has been set and no password or username
if sid.strip() != '' and uname.strip() == '' and pwd.strip != '':
msg += "\nA session id has been sent to us with no pass or username: \n"+sid
# We have a sesion id so check if its available
if sid in cherrypy.config['session']:
msg += "\nAnd it exists in one of the saved sessions so lets keep logged in"
sessionid = sid
uid = cherrypy.config['session'][sid]['uid']
username = cherrypy.config['session'][sid]['username']
permissions = cherrypy.config['session'][sid]['permissions']
lastused = int(time.time())
loggedin = True
else:
msg += "\nThis session ID has not been recognised!: \n "
# Ok, no sessionid, so lets check if a username/password has been set
elif len(uname.strip()) > 0 and len(pwd.strip()) > 0:
msg += "\nNo session id But recieves password + username"
if uname not in cherrypy.config['CONFIG']['users']:
msg += "\nUsername not recognised: "+uname
else:
msg += "\nUsername has been recognised:"+uname
uid = cherrypy.config['CONFIG']['users'][uname]['uid']
username = uname
password = cherrypy.config['CONFIG']['users'][uname]['password']
permissions = cherrypy.config['CONFIG']['users'][uname]['permissions']
if pwd != password:
msg += "\nPassword not recognised: "+pwd
else:
msg += "\nPassword has been recognised:"+pwd
# All looks good so set the sessionid
sessionid = str(uuid.uuid1())+str(len(cherrypy.config['session']))
timestamp = int(time.time())
with cherrypy.config['sessionlock']:
cherrypy.config['session'][sessionid] = {}
cherrypy.config['session'][sessionid]['uid'] = uid
cherrypy.config['session'][sessionid]['username'] = username
cherrypy.config['session'][sessionid]['lastused'] = timestamp
cherrypy.config['session'][sessionid]['permissions'] = permissions
msg += "\nCreated a new session:\n uid:{} \n username:{} \n pass: {} \n id: {}".format(uid, uname, password, sessionid)
loggedin = True
else:
msg += '\n No session id, No username, No password'
msg += '\n'+str(len(cherrypy.config['session']))+' sessions in list'
logging.debug('Sessions Info:{}'.format(msg))
logging.debug(cherrypy.config['session'] )
if loggedin == True:
return {'uid':uid, 'username':username, 'sessionid':sessionid, 'permissions':permissions, 'msg':msg}
else:
return False
except Exception as e:
msg += '\nError with user validation: '+str(e)
logging.error("Error Exception in model.py: ".format(msg))
return False
# MODEL UTILITIES
# Compare two lists and see if their contents match
def match_keys(self, expected, provided):
# List of field names we are expecting and specify
for key in provided:
if key not in expected:
return False
else:
state = True
if len(expected) is not len(provided):
state = False
return state
#==================================================================#
#==============PLUGINS TO HANDLE MULTIPLE TYPES OF DATA SUBMISSION==================#
#===================================================================================#
#========= THE ANNOTATION FORM ============================================#
class AnnotationSubmission:
# [REQUIRED METHOD] Check if submission is recognised, if it is, return structured data
def checksubmission(self, model, data):
self.data = data
self.model = model
response = {}
response['msg'] = ''
response['code'] = 'OK'
# List of field names we are expecting
expected = ['timecode','annotation', 'chartid', 'username', 'password', 'sessionid', 'update'] # username, password, sessionid
submitted = data['submitted'].keys()
if self.model.match_keys(expected, submitted) == False:
return False
# Now check if this submission has been made by a valid user
username = data['submitted']['username']
password = data['submitted']['password']
sessionid = data['submitted']['sessionid']
# Check if we are logged in and save the session id if we are
self.user = self.model.validuser(username, password, sessionid)
if self.user is False:
response['code'] = 'KO'
response['msg'] = 'This username/password combination has not been recognised. Or you may have been automatically logged out. Please try again.'
data['sessionid'] = ''
else:
uid = self.user['uid']
response['sessionid'] = self.user['sessionid']
# OK lets validate the data
try:
nid = int(data['submitted']['chartid'].replace('chart',''))
timecode = int(data['submitted']['timecode'])
except Exception as e:
response['code'] = 'KO'
response['msg'] += 'Invalid ChartID or timecode'
annotation = data['submitted']['annotation'].strip()
if annotation == '':
response['code'] = 'KO'
response['msg'] += 'Please fill in the annotation field'
# Now attempt to save to the database
if response['code'] != 'KO':
dbresp = self.model.create_annotation(nid, uid, timecode, annotation)
if dbresp['code'] == 'KO':
response['code'] = 'KO'
response['msg'] = 'DB Error: '+dbresp['msg']
else:
response['msg'] = dbresp['msg']
response['aid'] = dbresp['aid']
response['timestamp'] = dbresp['timestamp']
data['altresponse'] = json.dumps(response)
cherrypy.response.headers['Content-Type']= 'text/html'
return data
#========= THE MAIN UPLOAD FORM ============================================#
class UploadformSubmission:
# [REQUIRED METHOD] Check if submission is recognised, if it is, return structured data
def checksubmission(self, model, data):
self.data = data
self.model = model
# List of field names we are expecting
expected = ['gpstype','title', 'description', 'gps', 'apikey', 'file', 'datatype', 'username', 'password', 'sessionid']
self.tosubmit = {
'title':'',
'description':'',
'apikey':str(uuid.uuid1()),
'fuzzylatlon':'',
'created':int(time.time()),
'updated':int(time.time()),
'createdby':None,
'datatype':'',
'submissiondata':'{}',
'visible':1
}
submitted = data['submitted'].keys()
if self.model.match_keys(expected, submitted) == False:
return False
# Now check if this submission has been made by a valid user
username = data['submitted']['username']
password = data['submitted']['password']
sessionid = data['submitted']['sessionid']
# Check if we are logged in and save the session id if we are
self.user = self.model.validuser(username, password, sessionid)
if self.user is False:
data['errors']['user'] = 'This username/password combination has not been recognised. Or you may have been automatically logged out.'
data['sessionid'] = ''
return data
self.tosubmit['createdby'] = self.user['uid']
data['username'] = self.user['username']
data['sessionid'] = self.user['sessionid']
# Now format each of the variables and save in 'data'
for key in expected:
var = getattr(self, "format_"+key)()
self.tosubmit['submissiondata'] = json.dumps(data['submitted'])
self.data['info'] = self.tosubmit
# if there are no errors, create a new node
if len(self.data['errors']) <=0:
newdata = self.model.create_node(self.data)
# Return the data
return self.data
def format_gps(self):
latlon = self.data['submitted']['gps'].split(',')
try:
self.tosubmit['lat'] = float(latlon[0])
self.tosubmit['lon'] = float(latlon[1])
except:
return
def format_gpstype(self):
return
def format_sessionid(self):
return
def format_username(self):
# Set the created by field
# self.data['info']['createdby'] = session.userid
# if cherrypy.config['users']['f'][0])
return
def format_password(self):
return
def format_file(self):
# Check we have a file in the correct format
#filename = self.data['submitted']['file']
# Check we can save files of this type
#if cherrypy.config['filemanager'].fileisoneof(filename, 'csv' ) is False:
# return False
# Now save the filename
#self.data['info']['csvfile'] = filename
# And save the file
# TODO: Add save chunks here....
return
def format_title(self):
title = self.data['submitted']['title'].strip()
if title != '':
self.tosubmit['title'] = self.data['submitted']['title']
else:
self.data['errors']['title'] = 'Title needs to be filled in'
def format_description(self):
datatype = self.data['submitted']['datatype']
description = self.data['submitted']['description']
if datatype == 'observation' and len(description) < 1:
self.data['errors']['description'] = 'Please fill in a description for this Observation'
self.tosubmit['description'] = description
def format_datatype(self):
self.tosubmit['datatype'] = self.data['submitted']['datatype']
def format_apikey(self):
# Set some basic vars
apikey = self.data['submitted']['apikey']
if apikey.strip() != '':self.tosubmit['apikey'] = apikey
# Does this API key already exist?
datatype = self.data['submitted']['datatype']
searchfor = {'apikey':apikey}
intable = 'nodes'
returnfields = ['nid', 'createdby', 'datatype']
row = self.model.db.searchfor(intable, returnfields, searchfor)
# This key doesn't exist so go ahead and use it
if row is None:
if apikey.strip() != '':self.tosubmit['apikey'] = apikey
else:
# Check if the current user can edit the node
if self.user['uid'] == row[1] or self.user['permissions'] == 'admin':
logging.debug("SWAPPING KEYS")
# The key does exist, so lets replace the old value with a new one
newkey = str(uuid.uuid1())
# UPDATE NODE WHERE
table = 'nodes'
idname = 'nid'
idval = row[0]
fieldnvalues = {'apikey':newkey}
self.model.db.update(table, idname, idval, fieldnvalues)
else:
device = self.data['submitted']['datatype']
self.data['errors']['DeviceNameClash'] = 'Someone has already created a marker with this Device name'
self.data['errors']['action'] = 'Please contact citizensense if you would like to create a new marker using this Device Name.'
return
#================== SPEC GATEWAY APPLICATION ================================#
# Test a fack Speck Gateway post with the following curl command in the terminal:
# curl -H "Content-Type:application/json" http://localhost:8787/api/bodytrack/jupload?dev_nickname=Speck -d @etc/speck_data.json
# curl -i -u f:f http://localhost:8787/api/bodytrack/upload -d dev_nickname=test -d channel_names='["a","b"]' -d data='[[1332754616,1,10], [1332754617,-1,20]]'
#============================================================================#
class SpecGatewaySubmission:
# Check if submission is recognised, if it is, return structured data
# TODO: This is looking a bit messy so tidy up & move try/except to individual calls
def checksubmission(self, model, data):
# List of field names we are expecting. Reject if they don't match
expected = ['dev_nickname']
expected2 = ["dev_nickname", "data", "channel_names"]
submitted = data['submitted'].keys()
# Check if we recognise this post
msg =''
if model.match_keys(expected, submitted) == True: # We got a Speck Gatweway
try:
if data['body'] == '{}':
data['altresponse'] = '{"result":"OK"}'
return data
allspeckdata = json.loads(data['body'])
speckdata = allspeckdata['data']
speckchannels = allspeckdata['channel_names']
except Exception as e:
speckdata = []
msg = ' But data could be invalid from Speck Gateway: '+str(e)
elif model.match_keys(expected2, submitted) == True: # General body track post
try:
speckdata = json.loads(data['submitted']['data'])
speckchannels = json.loads(data['submitted']['channel_names'])
except Exception as e:
logging.error('BAD speckdata JSON: '+str(e))
logging.error('Speckgateway checksubmission model.py. Exception:'.format(json.dumps(data)) )
msg = 'But data could be invalid'
speckdata = []
else: # Not recognised so reject
return False
# If we have empy JSON all is ok but there is nothing to do
if len(speckdata) == 0:
data['altresponse'] = '{"result":"OK", "msg":"We have connected.'+msg+'"}'
return data
# Now lets see if we have a node to upload this data to
apikey = data['submitted']['dev_nickname'] # The device name/id
searchfor = {'apikey':apikey, 'visible':1}
intable = 'nodes'
returnfields = ['nid', 'createdby', 'datatype']
node = model.db.searchfor(intable, returnfields, searchfor)
logging.debug(model.db.msg)
logging.debug('Searched for: '+apikey)
logging.debug(node)
if node is None:
data['altresponse'] = '{"result":"KO", "message":"No marker to upload to"}'
return data
else:
nid = node[0]
# We have a marker so lets read the data
# TODO: Create a log so we can track uploads
# Create a csv header
headerlist = ['timestamp']+speckchannels
csvheader = ','.join(headerlist)
csvheader = '#MARKER_ID:{}\n{}'.format(nid,csvheader)
# Create the csv value strings
csvstrlist = []
csvvaluelist = []
created = int(time.time())
for x in speckdata:
csvline = ','.join(map(str, x))
csvstrlist.append(csvline)
timestamp = x[0]
#logging.debug('{}: {}'.format(timestamp, csvline))
csvvaluelist.append([nid, timestamp, created, csvheader, csvline])
csvstring = '\n'.join(csvstrlist)
# Now try and save the data to file
try:
directory = 'data/csvs/'+str(nid)
# Check we have a folder
cherrypy.config['datalogger'].createDir(directory)
# Now save the file
myfile = '{0}/{1}.csv'.format(directory, nid)
cherrypy.config['datalogger'].log(myfile, csvheader, csvstring)
except Exception as e:
data['altresponse'] = '{"result":"KO","message":"Unable to save speck data to file"}'
return data
# And create a 'latest' summary for the display in a nice key:value json string
try:
latest = OrderedDict()
values = speckdata[-1]
print('THE SPECK DATA')
print(json.dumps( speckdata ))
i = 0
for key in headerlist:
#print(key)
if key == 'raw_particles' : key = 'raw'
if key == 'particle_concentration' : key = 'concentration'
latest[key] = values[i]
i += 1
lateststr = json.dumps(latest)
except:
data['altresponse'] = '{"result":"KO","message":"Unable to create summary"}'
return data
# Now save the latest data
model.db.update('nodes', 'nid', nid, {'latest':lateststr, 'updated':int(time.time())})
# And save a copy of the csv in the database (should seperate into seperate fields...)
newcsvs = OrderedDict([
('fieldnames',['nid', 'timestamp', 'created', 'header', 'csv']),
('values', csvvaluelist)
])
resp = model.db.create('csvs', newcsvs)
logging.debug(newcsvs)
data['altresponse'] = '{"result":"OK","message":"Upload successful!","payload":{"successful_records":"1","failed_records":"0"}}'
return data
#================== Citizen Sense Kit submission ============================#
class CitizenSenseKitSubmission:
# Check if submission is recognised, if it is, return structured data
def checksubmission(self, model, data):
# Check if we recognise this post
expected = ["serial", "name", "jsonkeys", "jsonvalues", "MAC"]
submitted = data['submitted'].keys()
msg =''
if model.match_keys(expected, submitted) is not True: # We got a csk submission
return False
# Check if this is a known MAC address
if data['submitted']['MAC'] not in cherrypy.config['CONFIG']['MACS']:
data['altresponse'] = '{"success":"KO", "errors":[{"MAC":"MAC not recognised"}]}'
return data
# Save the name to use later
name = data['submitted']['name'] # The kit name
# Check if there is a node to save this data to
apikey = data['submitted']['serial'] # The raspberry pi serial number
searchfor = {'apikey':apikey, 'visible':1}
intable = 'nodes'
returnfields = ['nid', 'createdby', 'datatype']
node = model.db.searchfor(intable, returnfields, searchfor)
if node is None:
data['altresponse'] = '{"success":"KO", "errors":[{"serial":"No marker to submit to. Either no serial or not visible"}]}'
return data
else:
nid = node[0]
# OK lets parse the response
try:
# Create container for 'latest' data for display in a nice 'key:value' display
latest = OrderedDict()
latest['csvheader'] = data['submitted']['jsonkeys'].replace('[', '')
latest['csvheader'] = latest['csvheader'].replace(']', '')
latest['csvheader'] = latest['csvheader'].replace('"', '')
latest['name'] = data['submitted']['name']
keys = json.loads(data['submitted']['jsonkeys'])
for key in keys:
latest[key] = ''
newvalues = []
# Fomat the data ready to save
rows = json.loads(data['submitted']['jsonvalues'] )
created = int(time.time())
newvalues = []
for row in rows:
i = 0
values = row.split(',')
csvtimecode = values[0]
newvalues.append([nid, created, row, csvtimecode])
for key in keys:
if values[i] != '': latest[key] = values[i]
i += 1
lateststr = json.dumps(latest)
except Exception as e:
logging.error('Failed to read POSTED json: '+str(e))
data['altresponse'] = '{"success":"KO", "errors":[{"json":"Posted values are not in a recognised json format: {0}"}]}'.format(str(e))
return data
logging.debug('===========Attempt to save CSK=======')
logging.debug(json.dumps(latest))
# Now update the node and save the 'latest' data
success = model.db.update('nodes', 'nid', nid, {'latest':lateststr, 'updated':int(time.time())})
if success is not True:
logging.error('Failed: To save \'latest\' data in node')
data['altresponse'] = '{"success":"KO", "errors":[{"database":"Unable to update node "}]}'
return data
else:
logging.debug('Sucess: Saved latest in node:')
# And now create a new csv record
newcsvs = OrderedDict([
('fieldnames',['nid', 'created', 'csv', 'timestamp']),
('values', newvalues)
])
resp = model.db.create('csvs', newcsvs)
if resp is not None:
logging.error('Failed: To create new csvDBrecord')
data['altresponse'] = '{"success":"KO", "errors":[{"database":"Unable to create new csv records in database"}]}'
return data
else:
logging.debug('Sucess: Created new csvDBrecord')
# Now try and save the data to file
csvheader = ','.join(keys)
csvvalues = '\n'.join(rows)
csvheader = '#MARKER_ID:{}\n{}'.format(nid,csvheader)
try:
directory = 'data/csvs/'+str(nid)
# Check we have a folder
cherrypy.config['datalogger'].createDir(directory)
# Now save the file
myfile = '{}/{}.csv'.format(directory, nid)
cherrypy.config['datalogger'].log(myfile, csvheader, csvvalues)
logging.debug('Save to file: Success')
except Exception as e:
logging.error('Couldn\'t save data to file')
logging.error(data)
data['altresponse'] = '{"success":[{"OK":"Data saved to database but not file"}], "errors":[]}'
return data
# All done we have complete sucess
logging.debug("Sucess, we have save new data to DB and file for marker: "+str(nid))
data['altresponse'] = '{"success":"Saved submitted data", "errors":[]}'
return data
|
citizensense/pasture
|
model.py
|
Python
|
gpl-2.0
| 43,115
|
#!/usr/bin/env python
from setuptools import setup, os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='PyBabel-hbs',
version='0.4.0',
description='PyBabel handlebars gettext strings extractor',
author='Anton Bykov aka Tigra San',
author_email='tigrawap@gmail.com',
long_description=read('README.rst'),
packages=['pybabel_hbs'],
url="https://github.com/sbrieuc/pybabel-hbs",
install_requires=[
'babel',
'pexpect-u'
],
include_package_data=True,
entry_points = """
[babel.extractors]
hbs = pybabel_hbs.extractor:extract_hbs
""",
)
|
sbrieuc/pybabel-hbs
|
setup.py
|
Python
|
mit
| 674
|
"""
ORCA grid specific methods
"""
from collections import defaultdict
import numpy as np
def north_fold(longitudes, latitudes):
"""Northern hemisphere tri-polar fold
:param longitudes: 1D array representing longitudes on north fold
:param latitudes: 1D array representing latitudes on north fold
:returns: mapping between folded edges
"""
# Match indices to coordinates
coordinates = defaultdict(list)
for ikey, key in enumerate(zip(longitudes, latitudes)):
coordinates[key].append(ikey)
# Create bijective map between north fold indices
result = {}
for indices in coordinates.itervalues():
if len(indices) == 2:
j1, j2 = indices
result[j1] = j2
result[j2] = j1
return result
def remove_halo(field):
"""Removes extra row and columns used by NEMO models
:param field: 2D array dimensioned (x, y)
"""
if not isinstance(field, np.ndarray):
field = np.asarray(field)
if len(field) <= 2:
return np.array([])
return field[1:-1, :-1]
|
met-office-ocean/obsoper
|
obsoper/orca.py
|
Python
|
bsd-3-clause
| 1,075
|
''' Golix: A python library for Golix object manipulation.
Golix: a python library for Golix object manipulation. Create and
read encrypted Golix file objects.
Notes to self
-----
BUILD WITH:
python setup.py sdist
python setup.py bdist_wheel
DISTRIBUTE WITH:
twine upload dist/<version>*
'''
import sys
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
long_description = '''Golix is a python library for Golix object manipulation.
Create, read, validate, and otherwise manipulate cryptographic Golix
objects without worrying about the bits and bytes.'''
# # If we're installing, don't bother building the long_description
# # Ewwww, this is dirty.
# if sys.argv[1] == 'sdist':
# with open('README.md', 'r') as f:
# s_readme = f.read()
# # Get the long description from the README file
# import pypandoc
# long_description = pypandoc.convert(s_readme, 'rst', format='md')
# with open('README.rst', 'w') as f:
# f.write(long_description)
setup(
name='golix',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.1.6',
description='A python library for Golix object manipulation.',
long_description=long_description,
# The project's main homepage.
url='https://github.com/Muterra/py_golix',
# Author details
author='Muterra, Inc',
author_email='badg@muterra.io',
# Choose your license
license='LGPL',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
'Topic :: Utilities',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU Lesser General Public License v2 or ' +
'later (LGPLv2+)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='golix, encryption, security, privacy, private, identity, ' +
'sharing',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'full': ['donna25519>=0.1.1',
'cryptography>=1.6',
'smartyparse>=0.1.3']
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
},
)
|
Muterra/py_muse
|
setup.py
|
Python
|
lgpl-2.1
| 3,836
|
from subprocess import call
from os import path
import hitchpostgres
import hitchselenium
import hitchpython
import hitchserve
import hitchredis
import hitchtest
import hitchsmtp
# Get directory above this file
PROJECT_DIRECTORY = path.abspath(path.join(path.dirname(__file__), '..'))
class ExecutionEngine(hitchtest.ExecutionEngine):
"""Engine for orchestating and interacting with the app."""
def set_up(self):
"""Ensure virtualenv present, then run all services."""
python_package = hitchpython.PythonPackage(
python_version=self.settings['python_version']
)
python_package.build()
call([
python_package.pip, "install", "-r",
path.join(PROJECT_DIRECTORY, "requirements/local.txt")
])
postgres_package = hitchpostgres.PostgresPackage()
postgres_package.build()
redis_package = hitchredis.RedisPackage()
redis_package.build()
self.services = hitchserve.ServiceBundle(
project_directory=PROJECT_DIRECTORY,
startup_timeout=float(self.settings["startup_timeout"]),
shutdown_timeout=float(self.settings["shutdown_timeout"]),
)
postgres_user = hitchpostgres.PostgresUser("shorl", "password")
self.services['Postgres'] = hitchpostgres.PostgresService(
postgres_package=postgres_package,
users=[postgres_user, ],
databases=[hitchpostgres.PostgresDatabase("shorl", postgres_user), ]
)
self.services['HitchSMTP'] = hitchsmtp.HitchSMTPService(port=1025)
self.services['Django'] = hitchpython.DjangoService(
python=python_package.python,
port=8000,
settings="config.settings.local",
needs=[self.services['Postgres'], ],
env_vars=self.settings['environment_variables'],
)
self.services['Redis'] = hitchredis.RedisService(
redis_package=redis_package,
port=16379,
)
self.services['Firefox'] = hitchselenium.SeleniumService(
xvfb=self.settings.get("xvfb", False),
no_libfaketime=True,
)
# import hitchcron
# self.services['Cron'] = hitchcron.CronService(
# run=self.services['Django'].manage("trigger").command,
# every=1,
# needs=[ self.services['Django'], ],
# )
self.services.startup(interactive=False)
# Docs : https://hitchtest.readthedocs.org/en/latest/plugins/hitchselenium.html
self.driver = self.services['Firefox'].driver
self.webapp = hitchselenium.SeleniumStepLibrary(
selenium_webdriver=self.driver,
wait_for_timeout=5,
)
# Add selenium steps
self.click = self.webapp.click
self.wait_to_appear = self.webapp.wait_to_appear
self.wait_to_contain = self.webapp.wait_to_contain
self.wait_for_any_to_contain = self.webapp.wait_for_any_to_contain
self.click_and_dont_wait_for_page_load = self.webapp.click_and_dont_wait_for_page_load
# Configure selenium driver
self.driver.set_window_size(self.settings['window_size']['height'], self.settings['window_size']['width'])
self.driver.set_window_position(0, 0)
self.driver.implicitly_wait(2.0)
self.driver.accept_next_alert = True
def pause(self, message=None):
"""Stop. IPython time."""
if hasattr(self, 'services'):
self.services.start_interactive_mode()
self.ipython(message)
if hasattr(self, 'services'):
self.services.stop_interactive_mode()
def load_website(self):
"""Navigate to website in Firefox."""
self.driver.get(self.services['Django'].url())
def fill_form(self, **kwargs):
"""Fill in a form with id=value."""
for element, text in kwargs.items():
self.driver.find_element_by_id(element).send_keys(text)
def confirm_emails_sent(self, number):
"""Count number of emails sent by app."""
assert len(self.services['HitchSMTP'].logs.json()) == int(number)
def click_on_link_in_last_email(self, which=1):
"""Click on the nth link in the last email sent."""
self.driver.get(
self.services['HitchSMTP'].logs.json()[-1]['links'][which - 1]
)
def wait_for_email(self, containing=None):
"""Wait for, and return email."""
self.services['HitchSMTP'].logs.out.tail.until_json(
lambda email: containing in email['payload'] or containing in email['subject'],
timeout=25,
lines_back=1,
)
def time_travel(self, days=""):
"""Make all services think that time has skipped forward."""
self.services.time_travel(days=int(days))
def on_failure(self):
"""Stop and IPython."""
if not self.settings['quiet']:
if self.settings.get("pause_on_failure", False):
self.pause(message=self.stacktrace.to_template())
def on_success(self):
"""Pause on success if enabled."""
if self.settings.get("pause_on_success", False):
self.pause(message="SUCCESS")
def tear_down(self):
"""Shut down services required to run your test."""
if hasattr(self, 'services'):
self.services.shutdown()
|
aaronmyatt/mv_shorl
|
tests/engine.py
|
Python
|
bsd-3-clause
| 5,403
|
#Copyright 2018 Tim Wentlau.
#Distributed under the MIT License. See LICENSE in root of project.
import os
from kervi.hal.one_wire import OneWire
BASE_DIR = '/sys/bus/w1/devices/'
class OneWireDeviceDriver(OneWire):
def __init__(self, address):
OneWire.__init__(self, address)
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
def read_raw(self):
tempfile = open(BASE_DIR+self.address+"/w1_slave")
text = tempfile.readlines()
tempfile.close()
return text
|
kervi/kervi
|
ukervi/ukervi/platforms/upython/one_wire.py
|
Python
|
mit
| 534
|
# coding: UTF-8
"""
Copyright (c) 2009 Marian Tietz
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
"""
import gtk
import glib
import gobject
import dbus
import logging
from gettext import gettext as _
import traceback
from .. import config
from ..com import parse_from, sushi
from .. import gui
from ..helper.dcc import s_incoming
widgets = None
(COL_STATUS,
COL_ID,
COL_SERVER,
COL_PARTNER,
COL_FILE,
COL_SIZE,
COL_PROGRESS,
COL_SPEED) = range(8)
class DCCWatcher(object):
def __init__(self):
self.timer_id = glib.timeout_add(1000, self._refresh_sends)
self._init_cache()
def stop(self):
""" stop watching for dcc sends periodically """
gobject.source_remove(self.timer_id)
def refresh(self):
""" manually refresh the list by calling this method """
self._refresh_sends()
def _init_cache(self):
self.last_sends = []
self.row_id_map = {}
def _refresh_sends(self):
sends = sushi.dcc_sends()
if (sends == None and not sushi.connected) or len(sends) == 0:
return
view = widgets.get_object("transferView")
store = view.get_model()
# all ids
act_sends = sends[0]
to_remove = set(self.last_sends) - set(act_sends)
to_update = set(act_sends) - to_remove
for i in range(len(sends[0])):
id, server, sender, filename, size, progress, speed, status = \
[sends[n][i] for n in range(len(sends))]
if id in to_update:
if self.row_id_map.has_key(id):
# update existing entry
iter = self.row_id_map[id].iter
store.set(iter,
COL_STATUS, status,
COL_SIZE, size,
COL_PROGRESS, get_progress(progress, size),
COL_SPEED, speed)
else:
# add new entry
if not config.get_bool("dcc", "show_ident_in_dialog"):
sender = parse_from(sender)[0]
iter = store.append(row = (
status, id, server,
sender, filename,
size, get_progress(progress, size),
speed))
self.row_id_map[id] = store[store.get_path(iter)]
for id in to_remove:
if self.row_id_map.has_key(id):
store.remove(self.row_id_map[id].iter)
self.last_sends = act_sends
return True
def get_progress(p, s):
return int(float(p)/s*100)
def cancel_transfer(transferID, watcher):
sushi.dcc_send_remove(transferID)
watcher.refresh()
def get_selected_transfer_id():
view = widgets.get_object("transferView")
store = view.get_model()
cursor = view.get_cursor()
try:
id = dbus.UInt64(store[cursor[0]][COL_ID])
except:
return None
else:
return id
def dialog_response_cb(dialog, id, watcher):
if id == 333: # FIXME: replace this with a meaningful ID
# FIXME:: or connect to the button directly
# remove was clicked
def ask_are_you_sure():
# ask if the user is sure about removing the transfer
def dialog_reponse_cb(dialog, id, transferID):
if id == gtk.RESPONSE_YES:
# yes, remove it!
cancel_transfer(transferID, watcher)
dialog.destroy()
transferID = get_selected_transfer_id()
if None == transferID:
gui.mgmt.show_error_dialog(
title=_("No transfer selected!"),
message=_("You must select a transfer to remove it."))
else:
d = gui.builder.question_dialog(
title = _("Remove file transfer?"),
message = _("Are you sure you want to remove the "
"file transfer %(id)d?" % {
"id": transferID }))
d.connect("response", dialog_reponse_cb, transferID)
d.show()
ask_are_you_sure()
else:
global widgets
watcher.stop()
dialog.destroy()
widgets = None
def run():
dialog = widgets.get_object("DCCDialog")
if dialog.get_property("visible"):
return
watcher = DCCWatcher()
main_window = gui.mgmt.widgets.get_object("main_window")
dialog.set_transient_for(main_window)
dialog.connect("response", dialog_response_cb, watcher)
dialog.show()
def setup():
global widgets
if widgets != None:
return
widgets = gui.builder.load_dialog("dcc")
# add direction icon column
def type_symbol_render_cb(column, renderer, model, iter):
status = model.get(iter, COL_STATUS)
if status:
if status[0] & s_incoming:
# incoming
renderer.set_property("stock-id", gtk.STOCK_GO_DOWN)
else:
# outgoing
renderer.set_property("stock-id", gtk.STOCK_GO_UP)
widgets.get_object("statusColumn").set_cell_data_func(
widgets.get_object("statusRenderer"),
type_symbol_render_cb)
|
sushi-irc/tekka
|
tekka/dialogs/dcc.py
|
Python
|
bsd-2-clause
| 5,531
|
# -*- coding: utf-8 -*-
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
# © 2015 Grupo ESOC Ingeniería de Servicios, S.L.U.
# © 2015 Antiun Ingenieria S.L. - Antonio Espinosa
"""These tests try to mimic the behavior of the UI form.
The form operates in onchange mode, with its limitations.
"""
from odoo.tests.common import TransactionCase
class OnChangeCase(TransactionCase):
is_company = False
def setUp(self):
super(OnChangeCase, self).setUp()
self.env['ir.config_parameter'].set_param(
'partner_names_order', 'last_first_comma')
def new_partner(self):
"""Create an empty partner. Ensure it is (or not) a company."""
new = self.env["res.partner"].new()
new.is_company = self.is_company
return new
class PartnerCompanyCase(OnChangeCase):
is_company = True
def tearDown(self):
"""Companies never have ``firstname`` nor ``lastname2``."""
super(PartnerCompanyCase, self).tearDown()
self.assertEqual(self.partner.firstname, False)
self.assertEqual(self.partner.lastname2, False)
def set_name(self, value):
self.partner.name = value
# It triggers onchange
self.partner._onchange_name()
# Ensure it's properly set
self.assertEqual(self.partner.name, value)
def test_create_from_form(self):
"""A user creates a company from the form."""
name = u"Sôme company"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_name(name)
self.assertEqual(self.partner.lastname, name)
def test_empty_name_and_subnames(self):
"""If the user empties ``name``, subnames must be ``False``.
Otherwise, the ``required`` attr will not work as expected.
"""
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_name(u"Fóo")
self.set_name(u"")
self.assertEqual(self.partner.lastname, False)
class PartnerContactCase(OnChangeCase):
def set_field(self, field, value):
# Changes the field
setattr(self.partner, field, value)
if field in ("firstname", "lastname", "lastname2"):
# Trigger onchanges
self.partner._onchange_subnames()
self.partner._onchange_name()
# Check it's set OK
self.assertEqual(getattr(self.partner, field), value)
def test_create_from_form_empty(self):
"""A user creates a contact from the form.
All subfields must be false, or the ``required`` attr will not work as
expected.
"""
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# Odoo tries to compute the name
self.partner._compute_name()
# This is then triggered
self.partner._onchange_name()
# Subnames must start as False to make the UI work fine
self.assertEqual(self.partner.firstname, False)
self.assertEqual(self.partner.lastname, False)
self.assertEqual(self.partner.lastname2, False)
# ``name`` cannot be False, or upstream Odoo will fail
self.assertEqual(self.partner.name, u"")
def test_create_from_form_only_firstname(self):
"""A user creates a contact with only the firstname from the form."""
firstname = u"Fïrst"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("firstname", firstname)
self.assertEqual(self.partner.lastname, False)
self.assertEqual(self.partner.lastname2, False)
self.assertEqual(self.partner.name, firstname)
def test_create_from_form_only_lastname(self):
"""A user creates a contact with only the lastname from the form."""
lastname = u"Läst"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("lastname", lastname)
self.assertEqual(self.partner.firstname, False)
self.assertEqual(self.partner.lastname2, False)
self.assertEqual(self.partner.name, lastname)
def test_create_from_form_only_lastname2(self):
"""A user creates a contact with only the lastname2 from the form."""
lastname2 = u"Läst2"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("lastname2", lastname2)
self.assertEqual(self.partner.firstname, False)
self.assertEqual(self.partner.lastname, False)
self.assertEqual(self.partner.name, lastname2)
def test_create_from_without_firstname(self):
"""A user creates a contact without firstname from the form."""
lastname = u"Läst"
lastname2 = u"Läst2"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("lastname", lastname)
self.set_field("lastname2", lastname2)
self.assertEqual(self.partner.firstname, False)
self.assertEqual(
self.partner.name,
u"%s %s" % (lastname, lastname2))
def test_create_from_without_lastname(self):
"""A user creates a contact without lastname from the form."""
firstname = u"Fïrst"
lastname2 = u"Läst2"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("firstname", firstname)
self.set_field("lastname2", lastname2)
self.assertEqual(self.partner.lastname, False)
self.assertEqual(
self.partner.name,
u"%s, %s" % (lastname2, firstname))
def test_create_from_without_lastname2(self):
"""A user creates a contact without lastname2 from the form."""
firstname = u"Fïrst"
lastname = u"Läst"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("firstname", firstname)
self.set_field("lastname", lastname)
self.assertEqual(self.partner.lastname2, False)
self.assertEqual(
self.partner.name,
u"%s, %s" % (lastname, firstname))
def test_create_from_form_all(self):
"""A user creates a contact with all names from the form."""
firstname = u"Fïrst"
lastname = u"Läst"
lastname2 = u"Läst2"
with self.env.do_in_onchange():
# User presses ``new``
self.partner = self.new_partner()
# User changes fields
self.set_field("firstname", firstname)
self.set_field("lastname", lastname)
self.set_field("lastname2", lastname2)
self.assertEqual(
self.partner.name,
u"%s %s, %s" % (lastname, lastname2, firstname))
|
sergiocorato/partner-contact
|
partner_second_lastname/tests/test_onchange.py
|
Python
|
agpl-3.0
| 7,604
|
import numpy as np
from scipy.io.wavfile import read,write
def speedup(soundarray, factor):
indices = np.round(np.arange(0,len(soundarray),factor))
indices = indices[indices < len(soundarray)].astype(int)
return soundarray[indices.astype(int)]
def stretch(soundarray, factor, window_size, h):
phase = np.zeros(window_size)
hanning_window = np.hanning(window_size)
result = np.zeros(len(soundarray)/factor + window_size)
for i in np.arange(0, len(soundarray)-window_size-h, h*factor):
a1 = soundarray[i:i+window_size]
a2 = soundarray[i+h:i+window_size+h]
s1 = np.fft.fft(hanning_window*a1)
s2 = np.fft.fft(hanning_window*a2)
phase = (phase+np.angle(s2/s1))%2*np.pi
a2_rephased = np.fft.ifft(np.abs(s2)*np.exp(1j*phase))
i2 = int(i/factor)
result[i2:i2+window_size] += hanning_window*a2_rephased
result= ((2**(16-4)) * result/result.max())
return result.astype('int16')
def pitchshift(soundarray, x, window_size=2**13, h =2*11):
factor = 2**(x/12.0)
stretched_sound = stretch(soundarray, 1.0/factor, window_size, h)
return speedup(stretched_sound[window_size:], factor)
rate,data=read('rap.wav')
y=data[:]
print pitchshift(y, 2)
|
robertsiegel/AudioFFT
|
shiftpitch.py
|
Python
|
mit
| 1,248
|
'''
for use to generate flatland_simulator_api documentation with doxygen
'''
# -*- coding: utf-8 -*-
#
# Flatland documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 15 13:55:12 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Flatland'
copyright = u'2017, Avidbots Corp.'
author = u'Avidbots Corp.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
release = u''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Flatlanddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Flatland.tex', u'Flatland Documentation',
u'Avidbots Corp.'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'flatland', u'Flatland Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Flatland', u'Flatland Documentation',
author, 'Flatland', 'One line description of project.',
'Miscellaneous'),
]
import subprocess
subprocess.call('cd ../../ ; doxygen Doxyfile', shell=True)
html_extra_path = ['html/']
|
avidbots/flatland
|
docs/doxygen/conf.py
|
Python
|
bsd-3-clause
| 5,327
|
#!/usr/bin/python2.7
import re, ast, random, argparse, os
from time import time, strftime, sleep
from datetime import datetime
from cStringIO import StringIO
from Queue import Queue
from threading import Thread
try:
import simplejson as json
except:
import json
#local libraries
from lib.helper import sqlQuery, thisSystem, cleanAndLower, hashString, encrypt, decrypt, toUnicode
# non-standard library
_PARAMIKO = False
try:
import paramiko
_PARAMIKO = True
except Exception, err:
print("[ERROR] Unable to import paramiko: %s" % err)
exit(-1)
###
# action = 0, attempt to run cron on system in the order they are provided, first success, return
# action = 1, shuffle list of available systems, first success, return
# action = 2, run cron on all configured systems at the same time
# action = 3, if previous iteration of cron is running, clobber and start new; useful for long running
# crons
###
class pyrcron:
def __init__(self, test=False):
self.dt = datetime.now()
self.db = "pyrcron"
self.cache_file = "/tmp/pyrcron.cache"
self.cache_file_temp = "%s.tmp" % self.cache_file
self.cache_stale_mins = 5
self.cache_file_key = "8a10465a559947c58983516a6ce179b6bd52e4df7b39aef75d43c7e27892450b"
self.max_threads = 4
self.auth = {}
self.hosts = {}
self.test = test
if self.test:
print("[INFO] Test only")
def add(self, add_type=None, add_data={}):
if not self._create():
print("[ERROR] Unable to create pyrcron tables")
return False
if not add_type:
return {"error": "no add type provided"}
if add_type == "auth":
auth_res = self._addAuth(add_data)
if not auth_res:
return {"error": auth_res}
if add_type == "host":
host_res = self._addHost(add_data)
if not host_res:
return {"error": host_res}
if add_type == "cron":
cron_res = self._addCron(add_data)
if not cron_res:
return {"error": cron_res}
return True
def _addAuth(self, auth_data):
if not self._create():
print("[ERROR] Unable to create pyrcron tables")
return False
if len(auth_data) == 0:
return False
if not isinstance(auth_data, list):
auth_data = [auth_data]
sql = "INSERT INTO pyrcron_auth (user, password, checksum) VALUES (%s, %s, %s)"
sql_vals = []
try:
for k in auth_data:
sql_vals.append((toUnicode(k.keys()[0]), toUnicode(k.values()[0]), toUnicode(hashString(k.values()[0], "sha256"))))
except Exception, err:
print("[ERROR] Unable to parse auth_data: %s" % err)
return False
try:
if self.test:
print("[SQL] %s, %s" %(sql, sql_vals))
else:
sqlQuery(sql, db=self.db, _params=sql_vals, bulk=True, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to add pyrcron auth data: %s" % err)
return False
return True
def _addHost(self, host_data):
if not self._create():
print("[ERROR] Unable to create pyrcron tables")
return False
if len(host_data) == 0:
return False
if not isinstance(host_data, list):
host_data = [host_data]
_host_data = []
for i, hd in enumerate(host_data):
try:
host = hd["host"]
except:
print("[WARN] No host provided")
continue
try:
port = int(hd["port"])
except:
port = 22
try:
lport = int(hd["local_port"])
except:
lport = 22
_host_data.append((toUnicode(host), toUnicode(port), toUnicode(lport)))
if len(_host_data) == 0:
print("[WARN] No host data parsed")
return False
sql = "INSERT INTO pyrcron_hosts (host, port, local_port) VALUES (%s, %s, %s)"
sql_vals = _host_data
try:
if self.test:
print("[SQL] %s, %s" %(sql, sql_vals))
else:
sqlQuery(sql, db=self.db, _params=sql_vals, bulk=True, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to add pyrcron host data: %s" % err)
return False
return True
def _addCron(self, cron_data):
if not self._create():
print("[ERROR] Unable to create pyrcron tables")
return False
if len(cron_data) == 0:
return False
if not isinstance(cron_data, list):
cron_data = [cron_data]
_cron_data = []
for cd in cron_data:
try:
cron_name = cleanAndLower(cd["name"])
except:
print("[WARN] Unable to parse cron name: %s" % err)
continue
try:
cron_path = cleanAndLower(cd["path"])
except Exception, err:
print("[WARN] Unable to parse cron path: %s" % err)
continue
try:
action = int(cd["action"])
except:
action = 0
try:
enabled = int(cd["enabled"])
except:
enabled = 0
try:
cmin = self._sanitizeInterval(str(cd["min"]))
except:
try:
if re.search("/", str(cd["min"])):
continue
except:
cmin = "*"
try:
chour = self._sanitizeInterval(str(cd["hour"]))
except:
try:
if re.search("/", str(cd["hour"])):
continue
except:
chour = "*"
try:
cdom = self._sanitizeInterval(str(cd["dom"]))
except:
try:
if re.search("/", str(cd["dom"])):
continue
except:
cdom = "*"
try:
cmon = self._sanitizeInterval(str(cd["mon"]))
except:
try:
if re.search("/", str(cd["mon"])):
continue
except:
cmon = "*"
try:
cdow = self._sanitizeInterval(str(cd["dow"]))
except:
try:
if re.search("/", str(cd["dow"])):
continue
except:
cdow = "*"
dt = datetime.strptime("1970-01-01T00:00:00", "%Y-%m-%dT%H:%M:%S")
try:
users = cd["users"]
for i, h in enumerate(cd["hosts"]):
_h = self.pyrcronQuery("hosts", {"host": h})
try:
_h = int(_h[0][0])
if not _h:
continue
except:
continue
userid = self.pyrcronQuery("auth", {"user": users[i]})
try:
userid = int(userid[0][0])
if not userid:
continue
except:
continue
_cron_data.append((toUnicode(cron_name), toUnicode(cron_path), toUnicode(cmin), toUnicode(chour), toUnicode(cdom), toUnicode(cmon), toUnicode(cdow), toUnicode(enabled), toUnicode(_h), toUnicode(userid), toUnicode(action), dt))
except Exception, err:
print("[WARN] Unable to parse user and host data for cron: %s" % err)
continue
if len(_cron_data) == 0:
print("[WARN] No cron data to add")
return False
sql = "INSERT IGNORE INTO pyrcron_crons (cron_name, cron_path, cron_min, cron_hour, cron_dom, cron_mon, cron_dow, enabled, host, user, action, last_run) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
sql_vals = _cron_data
try:
if self.test:
print("[SQL] %s, %s" %(sql, sql_vals))
else:
sqlQuery(sql, db=self.db, _params=sql_vals, bulk=True, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to add pyrcron cron data: %s" % err)
return False
return True
def delete(self, delete_type=None, delete_data={}, clean=False):
if not delete_type:
print("[WARN] No delete type provided")
return False
delete_data = self._sanitizeFilterFields(delete_data)
if len(delete_data) == 0:
print("[WARN] No delete data provided")
if delete_type == "cron":
_crons = self.pyrcronQuery("crons", delete_data, ["id", "host", "user"])
_ids = {}
_hosts = {}
_users = {}
_delete = {}
for c in _crons:
_ids[c[0]] = 1
try:
_hosts[c[1]]
except:
_hosts[c[1]] = 1
if clean:
host_crons = self.pyrcronQuery("crons", {"host": c[1]})
try:
if len(host_crons) == 1:
try:
_delete["host"].append({"id": c[1]})
except:
_delete.update({"host": [{"id": c[1]}]})
except:
pass
try:
_users[c[2]]
except:
_users[c[2]] = 1
if clean:
user_crons = self.pyrcronQuery("crons", {"user": c[2]})
try:
if len(user_crons) == 1:
try:
_delete["auth"].appende({"id": c[2]})
except:
_delete.update({"auth": [{"id": c[2]}]})
except:
pass
if len(_ids) > 0:
ids = ["'%s'" % i for i in _ids.keys()]
#print("[DEBUG] Attempting to delete crons: %s" % _ids.keys())
sql = "DELETE FROM pyrcron_crons WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s" % sql)
else:
sqlQuery(sql, db=self.db, commit=True)
try:
for dl in _delete["host"]:
self.delete("host", dl, clean=clean)
except:
pass
try:
for dl in _delete["auth"]:
self.delete("auth", dl, clean=clean)
except:
pass
except Exception, err:
print("[ERROR] Unable to delete from pyrcron crons table: %s" % err)
return False
else:
print("[INFO] No matching crons for delete")
return False
elif delete_type == "host":
_hosts = self.pyrcronQuery("hosts", delete_data)
_ids = {h[0]: 1 for h in _hosts}
if len(_ids) > 0:
ids = ["'%s'" % i for i in _ids.keys()]
#print("[DEBUG] Attempting to delete hosts: %s" % _ids.keys())
sql = "DELETE FROM pyrcron_hosts WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s" % sql)
else:
sqlQuery(sql, db=self.db, commit=True)
except Exception, err:
print("[ERROR] Unable to delete from pyrcron hosts table: %s" % err)
return False
else:
print("[INFO] No matching hosts for delete")
return False
elif delete_type == "auth":
_users = self.pyrcronQuery("auth", delete_data)
_ids = {u[0]: 1 for u in _users}
if len(_ids) > 0:
ids = ["'%s'" % i for i in _ids.keys()]
#print("[DEBUG] Attempting to delete users: %s" % _ids.keys())
sql = "DELETE FROM pyrcron_auth WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s" % sql)
else:
sqlQuery(sql, db=self.db, commit=True)
except Exception, err:
print("[ERROR] Unable to delete from pyrcron hosts table: %s" % err)
return False
else:
print("[INFO] No matching hosts for delete")
return False
else:
print("[WARN] Invalid type provided to delete: %s" % repr(delete_type))
return False
return True
###
#
# TODO: Still rudimentary; still needs some sanity checks for what the user is providing
###
def update(self, update_type=None, select_data={}, update_data={}):
if not update_type:
print("[WARN] No updated type provided")
return False
if len(select_data) == 0:
print("[WARN] No select data provided")
return False
if len(update_data) == 0:
print("[WARN] No update data provided")
return False
if update_type == "cron":
cron_ids = self.pyrcronQuery("crons", select_data)
_ids = {c[0]: 1 for c in cron_ids}
ids = ["'%s'" % i for i in _ids.keys()]
sql = "UPDATE pyrcron_crons SET " + ',' . join(toUnicode(k) + "=%(" + k + ")s" for k in update_data.keys()) + " WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s, %s" %(sql, update_data))
else:
sqlQuery(sql, db=self.db, _params=update_data, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to update pyrcron crons: %s" % err)
return False
elif update_type == "host":
host_ids = self.pyrcronQuery("hosts", select_data)
_ids = {h[0]: 1 for h in host_ids}
ids = ["'%s'" % i for i in _ids.keys()]
sql = "UPDATE pyrcron_hosts SET " + ',' . join(toUnicode(k) + "=%(" + k + ")s" for k in update_data.keys()) + " WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s, %s" %(sql, update_data))
else:
sqlQuery(sql, db=self.db, _params=update_data, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to update pyrcron hosts: %s" % err)
return False
elif update_type == "auth":
user_ids = self.pyrcronQuery("auth", select_data)
_ids = {u[0]: 1 for u in user_ids}
ids = ["'%s'" % i for i in _ids.keys()]
sql = "UPDATE pyrcron_auth SET " + ',' . join(toUnicode(k) + "=%(" + k + ")s" for k in update_data.keys()) + " WHERE id IN (%s)" % ',' . join(ids)
try:
if self.test:
print("[SQL] %s, %s" %(sql, update_data))
else:
sqlQuery(sql, db=self.db, _params=update_data, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to update pyrcron auth: %s" % err)
return False
else:
print("[ERROR] Invalid update type provided: %s" % repr(update_type))
return False
return True
def pyrcronQuery(self, _table, filter_fields, return_values=["id"]):
if len(filter_fields) == 0:
print("[WARN] No filter fields provided")
return False
pyrcron_info = True
sql = "SELECT " + ',' .join(return_values) + " FROM pyrcron_{} WHERE ".format(_table) + ' AND ' .join(k + "=%(" + k + ")s" for (k, v) in filter_fields.iteritems())
try:
if self.test:
print("[SQL] %s, %s" %(sql, filter_fields))
else:
pyrcron_info = sqlQuery(sql, db=self.db, _params=filter_fields, multi=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to get pyrcron info: %s" % err)
return False
return pyrcron_info
def _sanitizeInterval(self, interval):
if re.search("/", interval):
if re.search("\*(\s+)?/(\s+)?[1-9]+", interval):
interval = cleanAndLower(interval, doLower=False)
interval = re.sub("\s+", "", interval)
else:
print("[ERROR] Invalid interval format (%s)" % repr(interval))
raise Exception("Invalid interval format, must be */[0-9]+")
return interval
def _sanitizeFilterFields(self, filter_fields):
try:
ix = filter_fields.values().index("*")
if ix >= 0:
k = filter_fields.keys()[ix]
try:
del filter_fields[k]
except Exception, err:
print("[ERROR] Unable to sanitize filter fields (%s): %s" %(repr(filter_fields), err))
return {}
return self._sanitizeFilterFields(filter_fields)
except:
pass
return filter_fields
def testHost(self, userid, hostid="all"):
run_results = self._getInfo(ignore_cache=True)
cmd = "ls -l"
try:
self.auth[str(userid)]
except:
return {"error": "invalid userid"}
if hostid == "all":
hostid = self.hosts.keys()
else:
try:
self.hosts[str(hostid)]
hostid = [hostid]
except:
return {"error": "invalid hostid"}
test_results = {"success": {}, "failure": {}}
for h in hostid:
#print("[DEBUG] testHost, (host, user, cmd): (%s, %s, %s)" %(h, userid, cmd))
_h = self._runCron(cmd, h, userid, 0)
try:
if _h[str(h)][0]:
test_results["success"].update({self.hosts[str(h)][0]: _h[str(h)][1]})
else:
test_results["failure"].update({self.hosts[str(h)][0]: _h[str(h)][2]})
except Exception, err:
test_results["failure"].update({self.hosts[str(h)][0]: str(err)})
return test_results
def show(self, show_type="all"):
run_results = self._getInfo(ignore_cache=True)
if show_type == "auth":
run_results = {ak: av[0] for ak, av in self.auth.iteritems()}
elif show_type == "hosts":
run_results = {hk: hv[0] for hk, hv in self.hosts.iteritems()}
elif show_type == "crons":
_run_results = {}
for rr in run_results:
t_hosts = [self.hosts[str(r)][0] for r in rr[8]]
t_users = [self.auth[str(r)][0] for r in rr[9]]
_run_results[rr[0]] = list(rr[1:11])
_run_results[rr[0]][7] = t_hosts
_run_results[rr[0]][8] = t_users
run_results = _run_results
return run_results
def run(self, cron_id=None):
if not self._create():
print("[ERROR] Unable to create pyrcron tables")
return False
if not _PARAMIKO:
print("[ERROR] Paramiko library missing or not found. Exiting")
return False
current_time = self.dt.strftime("%w_%m_%d_%H_%M").split("_")
_now_dow = current_time[0]
_now_mon = current_time[1]
_now_dom = current_time[2]
_now_hour = current_time[3]
_now_min = current_time[4]
run_results = self._getInfo()
if not run_results:
return False
#0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 11
#id, cron_name, cron_path, cron_min, cron_hour, cron_dom, cron_mon, cron_dow, host, user, action, enabled
self.cronQ = Queue()
#print("[DEBUG] cronQ created")
for i in range(self.max_threads):
try:
cronWorker = Thread(target=self._cronThreadWrapper, args=(self.cronQ,))
cronWorker.setDaemon(True)
cronWorker.start()
#print("[DEBUG] cronWorker thread %d created" % i)
except Exception, err:
print("[WARN] Unable to create cronWorker thread: %s" % err)
cronsToRun = False
for rr in run_results:
try:
if int(rr[0]) == int(cron_id):
#print("[DEBUG] Manually adding to cronQ: %s, %s, %s, %s, %s" %(rr[1], rr[2], rr[8], rr[9], rr[10]))
self.cronQ.put([rr[1], rr[2], rr[8], rr[9], rr[10]])
cronsToRun = True
continue
except:
pass
if rr[11] == 0:
continue
if self._intervalCheck(rr[7], _now_dow):
if self._intervalCheck(rr[6], _now_mon):
if self._intervalCheck(rr[5], _now_dom):
if self._intervalCheck(rr[4], _now_hour):
if self._intervalCheck(rr[3], _now_min):
#print("[DEBUG] Adding to cronQ: %s, %s, %s, %s, %s" %(rr[1], rr[2], rr[8], rr[9], rr[10]))
self.cronQ.put([rr[1], rr[2], rr[8], rr[9], rr[10]])
cronsToRun = True
self.cronQ.join()
if cronsToRun:
print("[INFO] All crons completed")
return True
def _cronThreadWrapper(self, cq):
while True:
c = cq.get()
cron_name = c[0]
#print("[DEBUG] From cronQ: %s" % c)
results = self._runCron(c[1], c[2], c[3], c[4])
for rk, rv in results.iteritems():
_now = self.dt.strftime("%Y-%m-%d %H:%M:%S")
if rv[0]:
print("[INFO] cron (%s) successfully run on host (%s)" %(cron_name, self.hosts[str(rk)][0]))
else:
print("[INFO] cron (%s) failed to run on host (%s)" %(cron_name, self.hosts[str(rk)][0]))
cron_success_sql = "UPDATE pyrcron_crons SET last_run=%s, duration=%s, errors=%s WHERE cron_name=%s AND host=%s"
cron_success_vals = (_now, rv[1], rv[2], cron_name, rk)
try:
if self.test:
print("[SQL] %s, %s" %(cron_success_sql, cron_success_vals))
else:
sqlQuery(cron_success_sql, db=self.db, _params=cron_success_vals, commit=True)
except Exception, err:
print("[WARN] unable to update last run: %s" % err)
cq.task_done()
def _getInfo(self, ignore_cache=False):
run_results = False
if not ignore_cache:
if os.path.exists(self.cache_file):
mtime = datetime.fromtimestamp(os.path.getctime(self.cache_file))
if (self.dt - mtime).total_seconds() < (self.cache_stale_mins * 60):
#print("[DEBUG] Loading pyrcron cache file: %s" % self.cache_file)
try:
with open(self.cache_file, "rb") as cyphertext, open(self.cache_file_temp, "wb") as plaintext:
decrypt(cyphertext, plaintext, self.cache_file_key)
try:
with open(self.cache_file_temp, "r") as cf:
data = json.load(cf)
self.auth = data["auth"]
self.hosts = data["host"]
run_results = data["run_results"]
except Exception, err:
print("[WARN] Unable to load cache file at '%s': %s" %(self.cache_file_temp, err))
try:
os.remove(self.cache_file_temp)
except Exception, err:
print("[ERROR] Unable to remove plaintext cache file (%s): %s" %(self.cache_file_temp, err))
if run_results:
return run_results
except Exception, err:
print("[WARN] Unable to decrypt local cache file (%s): %s" %(self.cache_file, err))
sql = "SELECT pyrcron_crons.id, cron_name, cron_path, cron_min, cron_hour, cron_dom, cron_mon, cron_dow, action, pyrcron_crons.host, pyrcron_hosts.host, pyrcron_hosts.port, pyrcron_hosts.local_port, pyrcron_crons.user, pyrcron_auth.user, pyrcron_auth.password, enabled FROM pyrcron_crons, pyrcron_hosts, pyrcron_auth WHERE pyrcron_crons.user=pyrcron_auth.id AND pyrcron_crons.host=pyrcron_hosts.id ORDER BY cron_name"
try:
cron_data = sqlQuery(sql, db=self.db, multi=True)
except Exception, err:
print("[ERROR] Unable to retrieve pyrcron data: %s" % err)
return False
# 0 1 2 3 4 5 6 7 8 9 10 11
#pyrcron_crons.id, cron_name, cron_path, cron_min, cron_hour, cron_dom, cron_mon, cron_dow, action, pyrcron_crons.host, pyrcron_hosts.host, pyrcron_hosts.port
# 12 13 14 15 16
#, pyrcron_hosts.local_port, user, pyrcron_auth.user, pyrcron_auth.password, enabled
crons = {}
for cd in cron_data:
try:
self.auth[str(cd[13])]
except:
self.auth[str(cd[13])] = [cd[14], cd[15]]
try:
self.hosts[str(cd[9])]
except:
self.hosts[str(cd[9])] = [cd[10], cd[11], cd[12]]
try:
#if action is 2, then all crons should be run at the same time
if cd[8] == 2:
raise Exception("action is 2")
crons[cd[1]][7].append(cd[9])
crons[cd[1]][8].append(cd[13])
except:
_t = []
_t = [cd[0]] + list(cd[2:9])
_t.insert(7, [cd[9]])
_t.insert(8, [cd[13]])
_t.append(cd[16])
_key = cd[1]
if cd[8] == 2:
_key = "%s:%s" %(cd[1], cd[0])
crons.update({_key: _t})
run_results = []
for ck, cv in crons.iteritems():
_t = cv
try:
_key = ck.split(":")[0]
except:
_key = ck
_t.insert(1, _key)
run_results.append(tuple(_t))
if not ignore_cache:
try:
with open(self.cache_file_temp, "w") as cf:
json.dump({"auth": self.auth, "host": self.hosts, "run_results": run_results}, cf)
try:
with open(self.cache_file_temp, "rb") as plaintext, open(self.cache_file, "wb") as cyphertext:
encrypt(plaintext, cyphertext, self.cache_file_key)
except Exception, err:
print("[WARN] Unable to encrypt local cache file (%s): %s" %(self.cache_file_temp, err))
try:
os.remove(self.cache_file_temp)
except Exception, err:
print("[ERROR] Unable to remove plaintext cache file (%s): %s" %(self.cache_file_temp, err))
except Exception, err:
print("[WARN] Unable to write cache file '%s': %s" %(self.cache_file, err))
return run_results
def _intervalCheck(self, cron_val, now_val):
#print("[_intervalCheck] %s, %s" %(cron_val, now_val))
if cron_val == "*":
return True
elif re.search("\*/[0-9]+", cron_val):
try:
_cron_val = int(cron_val.split("/")[1])
except Exception, err:
print("[ERROR] Unable to cast cron interval (%s) to int: %s" %(repr(cron_val), err))
return False
if _cron_val <= 0:
print("[WARN] Cron attempted interval is <= 0: %s" % repr(cron_val))
return False
if int(now_val) % _cron_val == 0:
return True
elif int(cron_val) == int(now_val):
return True
return False
def _runCron(self, cron_path, hosts, users, action):
if not isinstance(hosts, list):
hosts = [hosts]
if not isinstance(users, list):
users = [users]
runOnAll = False
if action == 1:
#random shuffle hosts
random.shuffle(hosts)
elif action == 2:
runOnAll = True
success = False
duration = None
errors = None
results = {}
for i, host in enumerate(hosts):
_host, _port, _user, _passwd = self._parseConnectionDetails(host, users[i])
if not (_host or _port or _user or _passwd or cron_path):
continue
try:
self.disabled[thisSystem(_host)]
#print("[DEBUG] Systems' (%s) host (%s) is disabled; will not run cron (%s) on it" %(_host, thisSystem(_host), cron_path))
continue
except:
pass
checksum = hashString("%s,%s,%s,%s" %(cron_path, _host, _user, action), "sha256")
checksum_file = "/tmp/%s.pyrcron" % checksum
if os.path.exists(checksum_file):
if int(action) == 3:
print("[INFO] Previous cron for %s@%s:%s is still running (%s); attempting to kill" %(_user, _host, cron_path, checksum_file))
if not self._checkRemoteProcesses(_host, _port, _user, _passwd, cron_path, kill=True):
errors = "Unable to kill previous running cron"
return {host :[False, duration, errors]}
else:
print("[INFO] Previous cron for %s@%s:%s is still running: %s" %(_user, _host, cron_path, checksum_file))
return {host :[False, duration, errors]}
with open(checksum_file, "w") as cfile:
cfile.write(self.dt.strftime("%Y-%m-%dT%H:%M:%S"))
#print("[DEBUG] pyrcron file created, %s" % checksum_file)
try:
conn_res, errors = self.connect(_host, _port, _user, _passwd, cron_path)
if not conn_res or errors:
print("[WARN] Unable to run cron (%s) on host (%s): %s" %(cron_path, _host, errors))
else:
success = host
duration = conn_res
except Exception, err:
print("[ERROR] Unable to run cron (%s) on host (%s): %s" %(cron_path, _host, err))
errors = str(err)
try:
os.remove(checksum_file)
except Exception, err:
print("[WARN] Unable to remove file (%s): %s" %(checksum_file, err))
if success:
if not runOnAll:
return {host: [True, duration, errors]}
else:
results.update({host: [True, duration, errors]})
else:
if not runOnAll:
return {host: [False, duration, errors]}
else:
results.update({host: [False, duration, errors]})
return results
def _checkRemoteProcesses(self, _host, _port, _user, _passwd, proc_name, kill=False):
cmd = "ps -eaf"
conn_res, std_out = self.connect(_host, _port, _user, _passwd, cmd, getstdout=True)
if not conn_res or not std_out:
print("[WARN] Unable to find running processes on host: %s" % _host)
return False
proc_rgx = re.compile(re.escape(proc_name), re.IGNORECASE)
ps_list = std_out.split("\n")
ps_ids = []
for ps in ps_list:
if proc_rgx.search(ps):
_ps = re.split("\s+", ps)
if not _ps[1] in ps_ids:
ps_ids.append(_ps[1])
if len(ps_ids) > 0:
#print("[DEBUG] Found process ids (%s) for process (%s) on host (%s)" %(',' . join(ps_ids), proc_name, _host))
if kill:
for psi in ps_ids:
cmd = "kill -9 %d" % int(psi)
conn_res, errors = self.connect(_host, _port, _user, _passwd, cmd)
if not conn_res or errors:
print("[WARN] Unable to kill process (%s) with id (%s) on host (%s): %s" %(proc_name, psi, _host, errors))
return False
#print("[DEBUG] Successfully killed process (%s) with id (%s) on host (%s)" %(proc_name, psi, _host))
return True
def _parseConnectionDetails(self, hostid, userid):
_host = None
_port = None
_user = None
_passwd = None
if hostid:
host = str(hostid)
try:
_host = self.hosts[host][0]
_port = self.hosts[host][1]
_local_port = self.hosts[host][2]
if _local_port:
if thisSystem(_host) == thisSystem():
_port = _local_port
except Exception, err:
print("[ERROR] Unable to parse connection host details for (%s): %s" %(host, err))
try:
_user = self.auth[str(userid)][0]
_passwd = self.auth[str(userid)][1]
except Exception, err:
print("[ERROR] Unable to parse connection authentication details for (%s): %s" %(host, err))
else:
print("[WARN] Unable to parse connection details; no host provided")
return _host, _port, _user, _passwd
def connect(self, host, port, user, passwd, cmd, getstdout=False):
if not (host or port or user or passwd or cmd):
return False
if self.test:
return 0.5
start_time = time()
#ssh into system, execute cmd
conn_res = False
errors = None
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
if re.search("^(-)+?BEGIN RSA PRIVATE KEY(-)+?", passwd, re.IGNORECASE):
try:
_passwd = StringIO(passwd)
except Exception, err:
print("[ERROR] Unable to read in certificate: %s" % err)
#return False, "Unable to read in certificate: %s" % err
raise Exception("Unable to read in certificate: %s" % err)
_passwd.seek(0)
pk = paramiko.RSAKey.from_private_key(_passwd)
#print("[DEBUG] Attempting ssh connection using private key from string")
ssh.connect(hostname=host, port=int(port), username=user, pkey=pk)
elif os.path.isfile(passwd):
pk = paramiko.RSAKey.from_private_key_file(passwd)
#print("[DEBUG] Attempting ssh connection using private key from file")
ssh.connect(hostname=host, port=int(port), username=user, pkey=pk)
else:
#print("[DEBUG] Attempting ssh connection using password")
ssh.connect(hostname=host, port=int(port), username=user, password=passwd)
try:
#print("[DEBUG] Attempting connection: %s@%s:%s:%s" %(user, host, port, cmd))
stdin, stdout, stderr = ssh.exec_command(cmd)
status = stdout.channel.recv_exit_status()
_stderr = stderr.read()
if not _stderr == "":
raise Exception(str(_stderr))
if status == 0:
if getstdout:
errors = stdout.read()
conn_res = True
#print("[DEBUG] connection status: %s, connection results: %s" %(status, conn_res))
except Exception, err:
print("[ERROR] Unable to run command (%s) on host (%s): %s" %(cmd, host, err))
errors = "Unable to run command (%s) on host (%s): %s" %(cmd, host, err)
except Exception, err:
print("[ERROR] Unable to connect to %s@%s:%s to run %s: %s" %(user, host, port, cmd))
errors = "Unable to connect to %s@%s:%s to run %s: %s" %(user, host, port, cmd, err)
try:
ssh.close()
except Exception, err:
print("[WARN] Unable to close connection: %s" % err)
end_time = time() - start_time
if conn_res:
conn_res = end_time
return conn_res, errors
def _create(self):
check_sql = "SHOW TABLES LIKE 'pyrcron_auth'"
try:
table_exists = sqlQuery(check_sql, db=self.db)
if not table_exists:
try:
sql = "CREATE TABLE IF NOT EXISTS pyrcron_auth (id INT AUTO_INCREMENT, user VARCHAR(255) NOT NULL, password TEXT NOT NULL, checksum VARCHAR(64), PRIMARY KEY(id), UNIQUE KEY(user, checksum)) DEFAULT CHARACTER SET='utf8' DEFAULT COLLATE 'utf8_general_ci'"
sqlQuery(sql, db=self.db, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to create pyrcron_auth table: %s" % err)
return None
except Exception, err:
print("[ERROR] Unable to determine if pyrcron_auth table exists: %s" % err)
return False
check_sql = "SHOW TABLES LIKE 'pyrcron_hosts'"
try:
table_exists = sqlQuery(check_sql, db=self.db)
if not table_exists:
try:
sql = "CREATE TABLE IF NOT EXISTS pyrcron_hosts (id INT AUTO_INCREMENT, host VARCHAR(255) NOT NULL, port INT NOT NULL DEFAULT 22, local_port INT NOT NULL DEFAULT 22, PRIMARY KEY(id), UNIQUE KEY(host, port, local_port)) DEFAULT CHARACTER SET='utf8' DEFAULT COLLATE 'utf8_general_ci'"
sqlQuery(sql, db=self.db, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to create pyrcron_hosts table: %s" % err)
return None
except Exception, err:
print("[ERROR] Unable to determine if pyrcron_hosts table exists: %s" % err)
return False
check_sql = "SHOW TABLES LIKE 'pyrcron_crons'"
try:
table_exists = sqlQuery(check_sql, db=self.db)
if not table_exists:
try:
sql = "CREATE TABLE IF NOT EXISTS pyrcron_crons (id INT AUTO_INCREMENT, cron_name VARCHAR(255) NOT NULL, cron_path VARCHAR(255) NOT NULL, cron_min VARCHAR(255) NOT NULL DEFAULT '*', cron_hour VARCHAR(255) NOT NULL DEFAULT '*', cron_dom VARCHAR(255) NOT NULL DEFAULT '*', cron_mon VARCHAR(255) NOT NULL DEFAULT '*', cron_dow VARCHAR(255) NOT NULL DEFAULT '*', enabled TINYINT DEFAULT 1 NOT NULL, host INT NOT NULL, user INT NOT NULL, action TINYINT NOT NULL DEFAULT 0, last_run TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, duration DOUBLE(8, 2) NULL, errors VARCHAR(255) NULL, PRIMARY KEY(id), FOREIGN KEY (host) REFERENCES pyrcron_hosts(id), FOREIGN KEY (user) REFERENCES pyrcron_auth(id), UNIQUE KEY(cron_name, cron_path, host, user)) DEFAULT CHARACTER SET='utf8' DEFAULT COLLATE 'utf8_general_ci'"
sqlQuery(sql, db=self.db, commit=True, _raise=True)
except Exception, err:
print("[ERROR] Unable to create pyrcron_crons table: %s" % err)
return None
except Exception, err:
print("[ERROR] Unable to determine if pyrcron_crons table exists: %s" % err)
return False
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--create", action="store_true", default=False, help="create database table if necessary")
parser.add_argument("-t", "--test", action="store_true", default=False, help="test only; do not write to SQL")
parser.add_argument("-n", "--host", default=None, help="provide a host id to test connection to; 'all' will test all")
parser.add_argument("-u", "--auth", default=None, help="used in combination with --host to provide the user id to connect to --host with")
parser.add_argument("-s", "--show", default=None, help="print to screen items of interest (crons, hosts, auth);'all' will print all entries")
parser.add_argument("-r", "--run", action="store_true", default=False, help="run crons")
parser.add_argument("-i", "--cronid", default=None, help="used in combination with --run to manually run a cron; find id via --show crons")
parser.add_argument("-a", "--add_auth", default=None, help="add new auth entry: [{'user': <user>, 'password': <password>}]")
parser.add_argument("-o", "--add_host", default=None, help="add new host entry: [{'host': <host>, 'user': <user>, ('port': <port>, 'local_port': <local_port>)}]")
parser.add_argument("-x", "--add_cron", default=None, help="add new cron entry: [{'name': <cron_name>, 'path': <cron_path>, 'hosts': <[hosts]>, 'users': <[users]>, ('enabled': <0|1>, 'action': <0|1>, 'min': <min>, 'hour': <hour>', 'dom': <dom>, 'mon': <mon>, 'dow': <dow>)}]")
args = parser.parse_args()
r = pyrcron(args.test)
if args.create:
print r._create()
elif args.show:
try:
if not args.show.lower() in ["auth", "hosts", "crons", "all"]:
print("[ERROR] Invalid --show argument")
exit(-1)
except:
print("[ERROR] Invalid --show argument")
exit(-1)
print json.dumps(r.show(args.show), indent=4)
elif args.run:
r.run(cron_id=args.cronid)
sleep(1)
elif args.add_auth:
print r.add("auth", args.add_auth)
elif args.add_host:
print r.add("host", args.add_host)
elif args.add_cron:
print r.add("cron", args.add_cron)
elif args.host:
if not args.auth:
print("[ERROR] Must provide --auth hostid")
exit(-1)
if not re.search("^[0-9]+$", str(args.auth)):
print("[ERROR] --auth userid must be an integer")
exit(-1)
if not re.search("^[0-9]+$", str(args.host)):
print("[ERROR] --host hostid must be an integer")
exit(-1)
print r.testHost(args.auth, args.host)
if __name__ == "__main__":
main()
|
jontaimanglo/pyrcron
|
pyrcron.py
|
Python
|
gpl-2.0
| 35,301
|
# Django settings for pyjsDemo project.
import os
STATIC = str(os.path.join(os.path.dirname(__file__), 'media').replace('\\','/'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'todo' # Or path to database file if using sqlite3.
DATABASE_USER = 'todo' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#*jv)6zbb15!9z8oru*3irida-24@_5+ib$k6$-&k&oy84ww87'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'djangotasks.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
#'django.contrib.auth',
#'django.contrib.contenttypes',
#'django.contrib.sessions',
#'django.contrib.sites',
'djangotasks.todo',
)
|
Hasimir/pyjs
|
examples/misc/djangotasks/settings.py
|
Python
|
apache-2.0
| 2,910
|
TEMPLATE_DIR = '/Users/marcoslh/Documents/waku/templates'
STATIC_URL = ''
|
marcoslhc/waku
|
settings.py
|
Python
|
mit
| 74
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAuth 2.0 Provider."""
from __future__ import absolute_import
import os
from functools import wraps
from flask import Blueprint, current_app, request, render_template, jsonify, \
abort, redirect
from flask_oauthlib.contrib.oauth2 import bind_cache_grant, bind_sqlalchemy
from flask.ext.login import login_required
from flask.ext.breadcrumbs import register_breadcrumb
from oauthlib.oauth2.rfc6749.errors import OAuth2Error
from werkzeug.urls import url_encode
from invenio.ext.sqlalchemy import db
from invenio.ext.login import login_user
from invenio.base.i18n import _
from invenio.base.globals import cfg
from ..provider import oauth2
from ..models import Client, OAuthUserProxy
from ..registry import scopes as scopes_registry
blueprint = Blueprint(
'oauth2server',
__name__,
url_prefix='/oauth',
static_folder="../static",
template_folder="../templates",
)
@blueprint.before_app_first_request
def setup_app():
"""Setup OAuth2 provider."""
# Initialize OAuth2 provider
oauth2.init_app(current_app)
# Configures the OAuth2 provider to use the SQLALchemy models for getters
# and setters for user, client and tokens.
bind_sqlalchemy(oauth2, db.session, client=Client)
# Flask-OAuthlib does not support CACHE_REDIS_URL
if cfg['OAUTH2_CACHE_TYPE'] == 'redis' and \
cfg.get('CACHE_REDIS_URL'):
from redis import from_url as redis_from_url
cfg.setdefault(
'OAUTH2_CACHE_REDIS_HOST',
redis_from_url(cfg['CACHE_REDIS_URL'])
)
# Configures an OAuth2Provider instance to use configured caching system
# to get and set the grant token.
bind_cache_grant(current_app, oauth2, OAuthUserProxy.get_current_user)
# Disables oauthlib's secure transport detection in in debug mode.
if current_app.debug or current_app.testing:
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
@oauth2.after_request
def login_oauth2_user(valid, oauth):
"""Log in a user after having been verified."""
if valid:
login_user(oauth.user.id)
return valid, oauth
def error_handler(f):
"""Handle uncaught OAuth errors."""
@wraps(f)
def decorated(*args, **kwargs):
try:
return f(*args, **kwargs)
except OAuth2Error as e:
# Only FatalClientError are handled by Flask-OAuthlib (as these
# errors should not be redirect back to the client - see
# http://tools.ietf.org/html/rfc6749#section-4.2.2.1)
if hasattr(e, 'redirect_uri'):
return redirect(e.in_uri(e.redirect_uri))
else:
return redirect(e.in_uri(oauth2.error_uri))
return decorated
def urlreencode(f):
"""Re-encode query string.
oauthlib's URL decoding is very strict and very often chokes on
common user mistakes like not encoding colons, hence let Flask decode the
request args and reencode them.
"""
@wraps(f)
def decorated(*args, **kwargs):
if request.args:
request.url = request.base_url + "?" + url_encode(request.args)
return f(*args, **kwargs)
return decorated
#
# Views
#
@blueprint.route('/authorize', methods=['GET', 'POST'])
@register_breadcrumb(blueprint, '.', _('Authorize application'))
@login_required
@error_handler
@urlreencode
@oauth2.authorize_handler
def authorize(*args, **kwargs):
"""View for rendering authorization request."""
if request.method == 'GET':
client = Client.query.filter_by(
client_id=kwargs.get('client_id')
).first()
if not client:
abort(404)
ctx = dict(
client=client,
oauth_request=kwargs.get('request'),
scopes=map(lambda x: scopes_registry[x], kwargs.get('scopes', []))
)
return render_template('oauth2server/authorize.html', **ctx)
confirm = request.form.get('confirm', 'no')
return confirm == 'yes'
@blueprint.route('/token', methods=['POST', ])
@oauth2.token_handler
def access_token():
"""Token view handles exchange/refresh access tokens."""
# Return None or a dictionary. Dictionary will be merged with token
# returned to the client requesting the access token.
# Response is in application/json
return None
@blueprint.route('/errors/')
def errors():
"""Error view in case of invalid oauth requests."""
from oauthlib.oauth2.rfc6749.errors import raise_from_error
try:
raise_from_error(request.values.get('error'), params=dict())
return render_template('oauth2server/errors.html', error=None)
except OAuth2Error as e:
return render_template('oauth2server/errors.html', error=e)
@blueprint.route('/ping/', methods=['GET', 'POST'])
@oauth2.require_oauth()
def ping():
"""Test to verify that you have been authenticated."""
return jsonify(dict(ping="pong"))
@blueprint.route('/info/')
@oauth2.require_oauth('test:scope')
def info():
"""Test to verify that you have been authenticated."""
if current_app.testing or current_app.debug:
return jsonify(dict(
user=request.oauth.user.id,
client=request.oauth.client.client_id,
scopes=list(request.oauth.scopes)
))
else:
abort(404)
@blueprint.route('/invalid/')
@oauth2.require_oauth('invalid_scope')
def invalid():
"""Test to verify that you have been authenticated."""
if current_app.testing or current_app.debug:
# Not reachable
return jsonify(dict(ding="dong"))
else:
abort(404)
|
egabancho/invenio
|
invenio/modules/oauth2server/views/server.py
|
Python
|
gpl-2.0
| 6,368
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, unicode_literals
import datetime
import decimal
from json import (
dump as default_dump,
dumps as default_dumps,
load,
loads,
JSONDecoder,
JSONEncoder as DefaultJSONEncoder,
)
__all__ = ('dump', 'dumps', 'load', 'loads', 'JSONDecoder', 'JSONEncoder')
class JSONEncoder(DefaultJSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time and decimal types.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
r = o.isoformat()
if o.microsecond:
r = r[:12] + r[15:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, decimal.Decimal):
return self.default_decimal(o)
try:
o = o.to_decimal()
except AttributeError:
return super(JSONEncoder, self).default(o)
else:
return self.default_decimal(o)
def default_decimal(self, o):
o = str(o)
if '.' not in o:
return int(o)
i, d = o.split('.', 1)
d = d.rstrip('0')
if not d:
return int(i)
else:
return float(o)
def dump(*args, **kwargs):
kwargs.setdefault('cls', JSONEncoder)
return default_dump(*args, **kwargs)
def dumps(*args, **kwargs):
kwargs.setdefault('cls', JSONEncoder)
return default_dumps(*args, **kwargs)
|
samuelmaudo/yepes
|
yepes/utils/json.py
|
Python
|
bsd-3-clause
| 1,864
|
# -*- coding: utf-8 -*-
"""Tests for course home page date summary blocks."""
from datetime import datetime, timedelta
import ddt
import waffle
from django.contrib.messages.middleware import MessageMiddleware
from django.urls import reverse
from django.test import RequestFactory
from freezegun import freeze_time
from mock import patch
from nose.plugins.attrib import attr
from pytz import utc
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from courseware.courses import get_course_date_blocks
from courseware.date_summary import (
CertificateAvailableDate,
CourseEndDate,
CourseStartDate,
TodaysDate,
VerificationDeadlineDate,
VerifiedUpgradeDeadlineDate
)
from courseware.models import (
CourseDynamicUpgradeDeadlineConfiguration,
DynamicUpgradeDeadlineConfiguration,
OrgDynamicUpgradeDeadlineConfiguration
)
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.verify_student.models import VerificationDeadline
from lms.djangoapps.verify_student.tests.factories import SoftwareSecurePhotoVerificationFactory
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.schedules.signals import CREATE_SCHEDULE_WAFFLE_FLAG
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from openedx.core.djangoapps.site_configuration.tests.factories import SiteFactory
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangoapps.waffle_utils.testutils import override_waffle_flag
from openedx.features.course_experience import UNIFIED_COURSE_TAB_FLAG, UPGRADE_DEADLINE_MESSAGE, CourseHomeMessages
from student.tests.factories import TEST_PASSWORD, CourseEnrollmentFactory, UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr(shard=1)
@ddt.ddt
class CourseDateSummaryTest(SharedModuleStoreTestCase):
"""Tests for course date summary blocks."""
def setUp(self):
super(CourseDateSummaryTest, self).setUp()
SelfPacedConfiguration.objects.create(enable_course_home_improvements=True)
def test_course_info_feature_flag(self):
SelfPacedConfiguration(enable_course_home_improvements=False).save()
course = create_course_run()
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
self.client.login(username=user.username, password=TEST_PASSWORD)
url = reverse('info', args=(course.id,))
response = self.client.get(url)
self.assertNotIn('date-summary', response.content)
def test_course_home_logged_out(self):
course = create_course_run()
url = reverse('openedx.course_experience.course_home', args=(course.id,))
response = self.client.get(url)
self.assertEqual(200, response.status_code)
# Tests for which blocks are enabled
def assert_block_types(self, course, user, expected_blocks):
"""Assert that the enabled block types for this course are as expected."""
blocks = get_course_date_blocks(course, user)
self.assertEqual(len(blocks), len(expected_blocks))
self.assertEqual(set(type(b) for b in blocks), set(expected_blocks))
@ddt.data(
# Verified enrollment with no photo-verification before course start
({}, {}, (CourseEndDate, CourseStartDate, TodaysDate, VerificationDeadlineDate)),
# Verified enrollment with `approved` photo-verification after course end
({'days_till_start': -10,
'days_till_end': -5,
'days_till_upgrade_deadline': -6,
'days_till_verification_deadline': -5,
},
{'verification_status': 'approved'},
(TodaysDate, CourseEndDate)),
# Verified enrollment with `expired` photo-verification during course run
({'days_till_start': -10},
{'verification_status': 'expired'},
(TodaysDate, CourseEndDate, VerificationDeadlineDate)),
# Verified enrollment with `approved` photo-verification during course run
({'days_till_start': -10, },
{'verification_status': 'approved'},
(TodaysDate, CourseEndDate)),
# Verified enrollment with *NO* course end date
({'days_till_end': None},
{},
(CourseStartDate, TodaysDate, VerificationDeadlineDate)),
# Verified enrollment with no photo-verification during course run
({'days_till_start': -1},
{},
(TodaysDate, CourseEndDate, VerificationDeadlineDate)),
# Verification approved
({'days_till_start': -10,
'days_till_upgrade_deadline': -1,
'days_till_verification_deadline': 1,
},
{'verification_status': 'approved'},
(TodaysDate, CourseEndDate)),
# After upgrade deadline
({'days_till_start': -10,
'days_till_upgrade_deadline': -1},
{},
(TodaysDate, CourseEndDate, VerificationDeadlineDate)),
# After verification deadline
({'days_till_start': -10,
'days_till_upgrade_deadline': -2,
'days_till_verification_deadline': -1},
{},
(TodaysDate, CourseEndDate, VerificationDeadlineDate)),
)
@ddt.unpack
def test_enabled_block_types(self, course_kwargs, user_kwargs, expected_blocks):
course = create_course_run(**course_kwargs)
user = create_user(**user_kwargs)
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
self.assert_block_types(course, user, expected_blocks)
@ddt.data(
# Course not started
({}, (CourseStartDate, TodaysDate, CourseEndDate)),
# Course active
({'days_till_start': -1}, (TodaysDate, CourseEndDate)),
# Course ended
({'days_till_start': -10, 'days_till_end': -5},
(TodaysDate, CourseEndDate)),
)
@ddt.unpack
def test_enabled_block_types_without_enrollment(self, course_kwargs, expected_blocks):
course = create_course_run(**course_kwargs)
user = create_user()
self.assert_block_types(course, user, expected_blocks)
def test_enabled_block_types_with_non_upgradeable_course_run(self):
course = create_course_run(days_till_start=-10, days_till_verification_deadline=None)
user = create_user()
CourseMode.objects.get(course_id=course.id, mode_slug=CourseMode.VERIFIED).delete()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.AUDIT)
self.assert_block_types(course, user, (TodaysDate, CourseEndDate))
def test_todays_date_block(self):
"""
Helper function to test that today's date block renders correctly
and displays the correct time, accounting for daylight savings
"""
with freeze_time('2015-01-02'):
course = create_course_run()
user = create_user()
block = TodaysDate(course, user)
self.assertTrue(block.is_enabled)
self.assertEqual(block.date, datetime.now(utc))
self.assertEqual(block.title, 'current_datetime')
@ddt.data(
'info',
'openedx.course_experience.course_home',
)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_todays_date_no_timezone(self, url_name):
with freeze_time('2015-01-02'):
course = create_course_run()
user = create_user()
self.client.login(username=user.username, password=TEST_PASSWORD)
html_elements = [
'<h3 class="hd hd-6 handouts-header">Important Course Dates</h3>',
'<div class="date-summary-container">',
'<div class="date-summary date-summary-todays-date">',
'<span class="hd hd-6 heading localized-datetime"',
'data-datetime="2015-01-02 00:00:00+00:00"',
'data-string="Today is {date}"',
'data-timezone="None"'
]
url = reverse(url_name, args=(course.id,))
response = self.client.get(url, follow=True)
for html in html_elements:
self.assertContains(response, html)
@ddt.data(
'info',
'openedx.course_experience.course_home',
)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_todays_date_timezone(self, url_name):
with freeze_time('2015-01-02'):
course = create_course_run()
user = create_user()
self.client.login(username=user.username, password=TEST_PASSWORD)
set_user_preference(user, 'time_zone', 'America/Los_Angeles')
url = reverse(url_name, args=(course.id,))
response = self.client.get(url, follow=True)
html_elements = [
'<h3 class="hd hd-6 handouts-header">Important Course Dates</h3>',
'<div class="date-summary-container">',
'<div class="date-summary date-summary-todays-date">',
'<span class="hd hd-6 heading localized-datetime"',
'data-datetime="2015-01-02 00:00:00+00:00"',
'data-string="Today is {date}"',
'data-timezone="America/Los_Angeles"'
]
for html in html_elements:
self.assertContains(response, html)
## Tests Course Start Date
def test_course_start_date(self):
course = create_course_run()
user = create_user()
block = CourseStartDate(course, user)
self.assertEqual(block.date, course.start)
@ddt.data(
'info',
'openedx.course_experience.course_home',
)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_start_date_render(self, url_name):
with freeze_time('2015-01-02'):
course = create_course_run()
user = create_user()
self.client.login(username=user.username, password=TEST_PASSWORD)
url = reverse(url_name, args=(course.id,))
response = self.client.get(url, follow=True)
html_elements = [
'data-string="in 1 day - {date}"',
'data-datetime="2015-01-03 00:00:00+00:00"'
]
for html in html_elements:
self.assertContains(response, html)
@ddt.data(
'info',
'openedx.course_experience.course_home',
)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_start_date_render_time_zone(self, url_name):
with freeze_time('2015-01-02'):
course = create_course_run()
user = create_user()
self.client.login(username=user.username, password=TEST_PASSWORD)
set_user_preference(user, 'time_zone', 'America/Los_Angeles')
url = reverse(url_name, args=(course.id,))
response = self.client.get(url, follow=True)
html_elements = [
'data-string="in 1 day - {date}"',
'data-datetime="2015-01-03 00:00:00+00:00"',
'data-timezone="America/Los_Angeles"'
]
for html in html_elements:
self.assertContains(response, html)
## Tests Course End Date Block
def test_course_end_date_for_certificate_eligible_mode(self):
course = create_course_run(days_till_start=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = CourseEndDate(course, user)
self.assertEqual(
block.description,
'To earn a certificate, you must complete all requirements before this date.'
)
def test_course_end_date_for_non_certificate_eligible_mode(self):
course = create_course_run(days_till_start=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.AUDIT)
block = CourseEndDate(course, user)
self.assertEqual(
block.description,
'After this date, course content will be archived.'
)
self.assertEqual(block.title, 'Course End')
def test_course_end_date_after_course(self):
course = create_course_run(days_till_start=-2, days_till_end=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = CourseEndDate(course, user)
self.assertEqual(
block.description,
'This course is archived, which means you can review course content but it is no longer active.'
)
self.assertEqual(block.title, 'Course End')
def test_ecommerce_checkout_redirect(self):
"""Verify the block link redirects to ecommerce checkout if it's enabled."""
sku = 'TESTSKU'
configuration = CommerceConfiguration.objects.create(checkout_on_ecommerce_service=True)
course = create_course_run()
user = create_user()
course_mode = CourseMode.objects.get(course_id=course.id, mode_slug=CourseMode.VERIFIED)
course_mode.sku = sku
course_mode.save()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerifiedUpgradeDeadlineDate(course, user)
self.assertEqual(block.link, '{}?sku={}'.format(configuration.basket_checkout_page, sku))
## CertificateAvailableDate
@waffle.testutils.override_switch('certificates.auto_certificate_generation', True)
def test_no_certificate_available_date(self):
course = create_course_run(days_till_start=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.AUDIT)
block = CertificateAvailableDate(course, user)
self.assertEqual(block.date, None)
self.assertFalse(block.is_enabled)
## CertificateAvailableDate
@waffle.testutils.override_switch('certificates.auto_certificate_generation', True)
def test_no_certificate_available_date_for_self_paced(self):
course = create_self_paced_course_run()
verified_user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=verified_user, mode=CourseMode.VERIFIED)
course.certificate_available_date = datetime.now(utc) + timedelta(days=7)
course.save()
block = CertificateAvailableDate(course, verified_user)
self.assertNotEqual(block.date, None)
self.assertFalse(block.is_enabled)
def test_no_certificate_available_date_for_audit_course(self):
"""
Tests that Certificate Available Date is not visible in the course "Important Course Dates" section
if the course only has audit mode.
"""
course = create_course_run()
audit_user = create_user()
# Enroll learner in the audit mode and verify the course only has 1 mode (audit)
CourseEnrollmentFactory(course_id=course.id, user=audit_user, mode=CourseMode.AUDIT)
CourseMode.objects.get(course_id=course.id, mode_slug=CourseMode.VERIFIED).delete()
all_course_modes = CourseMode.modes_for_course(course.id)
self.assertEqual(len(all_course_modes), 1)
self.assertEqual(all_course_modes[0].slug, CourseMode.AUDIT)
course.certificate_available_date = datetime.now(utc) + timedelta(days=7)
course.save()
# Verify Certificate Available Date is not enabled for learner.
block = CertificateAvailableDate(course, audit_user)
self.assertFalse(block.is_enabled)
self.assertNotEqual(block.date, None)
@waffle.testutils.override_switch('certificates.auto_certificate_generation', True)
def test_certificate_available_date_defined(self):
course = create_course_run()
audit_user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=audit_user, mode=CourseMode.AUDIT)
verified_user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=verified_user, mode=CourseMode.VERIFIED)
course.certificate_available_date = datetime.now(utc) + timedelta(days=7)
enable_course_certificates(course)
CertificateAvailableDate(course, audit_user)
for block in (CertificateAvailableDate(course, audit_user), CertificateAvailableDate(course, verified_user)):
self.assertIsNotNone(course.certificate_available_date)
self.assertEqual(block.date, course.certificate_available_date)
self.assertTrue(block.is_enabled)
## VerificationDeadlineDate
def test_no_verification_deadline(self):
course = create_course_run(days_till_start=-1, days_till_verification_deadline=None)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerificationDeadlineDate(course, user)
self.assertFalse(block.is_enabled)
def test_no_verified_enrollment(self):
course = create_course_run(days_till_start=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.AUDIT)
block = VerificationDeadlineDate(course, user)
self.assertFalse(block.is_enabled)
def test_verification_deadline_date_upcoming(self):
with freeze_time('2015-01-02'):
course = create_course_run(days_till_start=-1)
user = create_user()
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerificationDeadlineDate(course, user)
self.assertEqual(block.css_class, 'verification-deadline-upcoming')
self.assertEqual(block.title, 'Verification Deadline')
self.assertEqual(block.date, datetime.now(utc) + timedelta(days=14))
self.assertEqual(
block.description,
'You must successfully complete verification before this date to qualify for a Verified Certificate.'
)
self.assertEqual(block.link_text, 'Verify My Identity')
self.assertEqual(block.link, reverse('verify_student_verify_now', args=(course.id,)))
def test_verification_deadline_date_retry(self):
with freeze_time('2015-01-02'):
course = create_course_run(days_till_start=-1)
user = create_user(verification_status='denied')
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerificationDeadlineDate(course, user)
self.assertEqual(block.css_class, 'verification-deadline-retry')
self.assertEqual(block.title, 'Verification Deadline')
self.assertEqual(block.date, datetime.now(utc) + timedelta(days=14))
self.assertEqual(
block.description,
'You must successfully complete verification before this date to qualify for a Verified Certificate.'
)
self.assertEqual(block.link_text, 'Retry Verification')
self.assertEqual(block.link, reverse('verify_student_reverify'))
def test_verification_deadline_date_denied(self):
with freeze_time('2015-01-02'):
course = create_course_run(days_till_start=-10, days_till_verification_deadline=-1)
user = create_user(verification_status='denied')
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerificationDeadlineDate(course, user)
self.assertEqual(block.css_class, 'verification-deadline-passed')
self.assertEqual(block.title, 'Missed Verification Deadline')
self.assertEqual(block.date, datetime.now(utc) + timedelta(days=-1))
self.assertEqual(
block.description,
"Unfortunately you missed this course's deadline for a successful verification."
)
self.assertEqual(block.link_text, 'Learn More')
self.assertEqual(block.link, '')
@ddt.data(
(-1, '1 day ago - {date}'),
(1, 'in 1 day - {date}')
)
@ddt.unpack
def test_render_date_string_past(self, delta, expected_date_string):
with freeze_time('2015-01-02'):
course = create_course_run(days_till_start=-10, days_till_verification_deadline=delta)
user = create_user(verification_status='denied')
CourseEnrollmentFactory(course_id=course.id, user=user, mode=CourseMode.VERIFIED)
block = VerificationDeadlineDate(course, user)
self.assertEqual(block.relative_datestring, expected_date_string)
@attr(shard=1)
@ddt.ddt
class TestDateAlerts(SharedModuleStoreTestCase):
"""
Unit tests for date alerts.
"""
def setUp(self):
super(TestDateAlerts, self).setUp()
with freeze_time('2017-07-01 09:00:00'):
self.course = create_course_run(days_till_start=0)
self.course.certificate_available_date = self.course.start + timedelta(days=21)
enable_course_certificates(self.course)
self.enrollment = CourseEnrollmentFactory(course_id=self.course.id, mode=CourseMode.AUDIT)
self.request = RequestFactory().request()
self.request.session = {}
self.request.user = self.enrollment.user
MessageMiddleware().process_request(self.request)
@ddt.data(
['2017-01-01 09:00:00', u'in 6 months on <span class="date localized-datetime" data-format="shortDate"'],
['2017-06-17 09:00:00', u'in 2 weeks on <span class="date localized-datetime" data-format="shortDate"'],
['2017-06-30 10:00:00', u'in 1 day at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-01 08:00:00', u'in 1 hour at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-01 08:55:00', u'in 5 minutes at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-01 09:00:00', None],
['2017-08-01 09:00:00', None],
)
@ddt.unpack
def test_start_date_alert(self, current_time, expected_message_html):
"""
Verify that course start date alerts are registered.
"""
with freeze_time(current_time):
block = CourseStartDate(self.course, self.request.user)
block.register_alerts(self.request, self.course)
messages = list(CourseHomeMessages.user_messages(self.request))
if expected_message_html:
self.assertEqual(len(messages), 1)
self.assertIn(expected_message_html, messages[0].message_html)
else:
self.assertEqual(len(messages), 0)
@ddt.data(
['2017-06-30 09:00:00', None],
['2017-07-01 09:00:00', u'in 2 weeks on <span class="date localized-datetime" data-format="shortDate"'],
['2017-07-14 10:00:00', u'in 1 day at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-15 08:00:00', u'in 1 hour at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-15 08:55:00', u'in 5 minutes at <span class="date localized-datetime" data-format="shortTime"'],
['2017-07-15 09:00:00', None],
['2017-08-15 09:00:00', None],
)
@ddt.unpack
def test_end_date_alert(self, current_time, expected_message_html):
"""
Verify that course end date alerts are registered.
"""
with freeze_time(current_time):
block = CourseEndDate(self.course, self.request.user)
block.register_alerts(self.request, self.course)
messages = list(CourseHomeMessages.user_messages(self.request))
if expected_message_html:
self.assertEqual(len(messages), 1)
self.assertIn(expected_message_html, messages[0].message_html)
else:
self.assertEqual(len(messages), 0)
@ddt.data(
['2017-06-20 09:00:00', None],
['2017-06-21 09:00:00', u'Don't forget, you have 2 weeks left to upgrade to a Verified Certificate.'],
['2017-07-04 10:00:00', u'Don't forget, you have 1 day left to upgrade to a Verified Certificate.'],
['2017-07-05 08:00:00', u'Don't forget, you have 1 hour left to upgrade to a Verified Certificate.'],
['2017-07-05 08:55:00', u'Don't forget, you have 5 minutes left to upgrade to a Verified Certificate.'],
['2017-07-05 09:00:00', None],
['2017-08-05 09:00:00', None],
)
@ddt.unpack
@override_waffle_flag(UPGRADE_DEADLINE_MESSAGE, active=True)
def test_verified_upgrade_deadline_alert(self, current_time, expected_message_html):
"""
Verify the verified upgrade deadline alerts.
"""
with freeze_time(current_time):
block = VerifiedUpgradeDeadlineDate(self.course, self.request.user)
block.register_alerts(self.request, self.course)
messages = list(CourseHomeMessages.user_messages(self.request))
if expected_message_html:
self.assertEqual(len(messages), 1)
self.assertIn(expected_message_html, messages[0].message_html)
else:
self.assertEqual(len(messages), 0)
@ddt.data(
['2017-07-15 08:00:00', None],
['2017-07-15 09:00:00', u'If you have earned a certificate, you will be able to access it 1 week from now.'],
['2017-07-21 09:00:00', u'If you have earned a certificate, you will be able to access it 1 day from now.'],
['2017-07-22 08:00:00', u'If you have earned a certificate, you will be able to access it 1 hour from now.'],
['2017-07-22 09:00:00', None],
['2017-07-23 09:00:00', None],
)
@ddt.unpack
@waffle.testutils.override_switch('certificates.auto_certificate_generation', True)
def test_certificate_availability_alert(self, current_time, expected_message_html):
"""
Verify the verified upgrade deadline alerts.
"""
with freeze_time(current_time):
block = CertificateAvailableDate(self.course, self.request.user)
block.register_alerts(self.request, self.course)
messages = list(CourseHomeMessages.user_messages(self.request))
if expected_message_html:
self.assertEqual(len(messages), 1)
self.assertIn(expected_message_html, messages[0].message_html)
else:
self.assertEqual(len(messages), 0)
@ddt.ddt
@attr(shard=1)
class TestScheduleOverrides(SharedModuleStoreTestCase):
def setUp(self):
super(TestScheduleOverrides, self).setUp()
patcher = patch('openedx.core.djangoapps.schedules.signals.get_current_site')
mock_get_current_site = patcher.start()
self.addCleanup(patcher.stop)
mock_get_current_site.return_value = SiteFactory.create()
@override_waffle_flag(CREATE_SCHEDULE_WAFFLE_FLAG, True)
def test_date_with_self_paced_with_enrollment_before_course_start(self):
""" Enrolling before a course begins should result in the upgrade deadline being set relative to the
course start date. """
global_config = DynamicUpgradeDeadlineConfiguration.objects.create(enabled=True)
course = create_self_paced_course_run(days_till_start=3)
overview = CourseOverview.get_from_id(course.id)
expected = overview.start + timedelta(days=global_config.deadline_days)
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
self.assertEqual(block.date, expected)
self._check_text(block)
def _check_text(self, upgrade_date_summary):
self.assertEqual(upgrade_date_summary.title, 'Upgrade to Verified Certificate')
self.assertEqual(
upgrade_date_summary.description,
'Don\'t miss the opportunity to highlight your new knowledge and skills by earning a verified'
' certificate.'
)
self.assertEqual(upgrade_date_summary.relative_datestring, 'by {date}')
@override_waffle_flag(CREATE_SCHEDULE_WAFFLE_FLAG, True)
def test_date_with_self_paced_with_enrollment_after_course_start(self):
""" Enrolling after a course begins should result in the upgrade deadline being set relative to the
enrollment date.
Additionally, OrgDynamicUpgradeDeadlineConfiguration should override the number of days until the deadline,
and CourseDynamicUpgradeDeadlineConfiguration should override the org-level override.
"""
global_config = DynamicUpgradeDeadlineConfiguration.objects.create(enabled=True)
course = create_self_paced_course_run(days_till_start=-1, org_id='TestOrg')
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
expected = enrollment.created + timedelta(days=global_config.deadline_days)
self.assertEqual(block.date, expected)
# Orgs should be able to override the deadline
org_config = OrgDynamicUpgradeDeadlineConfiguration.objects.create(
enabled=True, org_id=course.org, deadline_days=4
)
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
expected = enrollment.created + timedelta(days=org_config.deadline_days)
self.assertEqual(block.date, expected)
# Courses should be able to override the deadline (and the org-level override)
course_config = CourseDynamicUpgradeDeadlineConfiguration.objects.create(
enabled=True, course_id=course.id, deadline_days=3
)
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
expected = enrollment.created + timedelta(days=course_config.deadline_days)
self.assertEqual(block.date, expected)
@override_waffle_flag(CREATE_SCHEDULE_WAFFLE_FLAG, True)
def test_date_with_self_paced_without_dynamic_upgrade_deadline(self):
""" Disabling the dynamic upgrade deadline functionality should result in the verified mode's
expiration date being returned. """
DynamicUpgradeDeadlineConfiguration.objects.create(enabled=False)
course = create_self_paced_course_run()
expected = CourseMode.objects.get(course_id=course.id, mode_slug=CourseMode.VERIFIED).expiration_datetime
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
self.assertEqual(block.date, expected)
@override_waffle_flag(CREATE_SCHEDULE_WAFFLE_FLAG, True)
def test_date_with_existing_schedule(self):
""" If a schedule is created while deadlines are disabled, they shouldn't magically appear once the feature is
turned on. """
course = create_self_paced_course_run(days_till_start=-1)
DynamicUpgradeDeadlineConfiguration.objects.create(enabled=False)
course_config = CourseDynamicUpgradeDeadlineConfiguration.objects.create(enabled=False, course_id=course.id)
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT)
# The enrollment has a schedule, but the upgrade deadline should be None
self.assertIsNone(enrollment.schedule.upgrade_deadline)
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
expected = CourseMode.objects.get(course_id=course.id, mode_slug=CourseMode.VERIFIED).expiration_datetime
self.assertEqual(block.date, expected)
# Now if we turn on the feature for this course, this existing enrollment should be unaffected
course_config.enabled = True
course_config.save()
block = VerifiedUpgradeDeadlineDate(course, enrollment.user)
self.assertEqual(block.date, expected)
@ddt.data(
# (enroll before configs, org enabled, org opt-out, course enabled, course opt-out, expected dynamic deadline)
(False, False, False, False, False, True),
(False, False, False, False, True, True),
(False, False, False, True, False, True),
(False, False, False, True, True, False),
(False, False, True, False, False, True),
(False, False, True, False, True, True),
(False, False, True, True, False, True),
(False, False, True, True, True, False),
(False, True, False, False, False, True),
(False, True, False, False, True, True),
(False, True, False, True, False, True),
(False, True, False, True, True, False), # course-level overrides org-level
(False, True, True, False, False, False),
(False, True, True, False, True, False),
(False, True, True, True, False, True), # course-level overrides org-level
(False, True, True, True, True, False),
(True, False, False, False, False, True),
(True, False, False, False, True, True),
(True, False, False, True, False, True),
(True, False, False, True, True, False),
(True, False, True, False, False, True),
(True, False, True, False, True, True),
(True, False, True, True, False, True),
(True, False, True, True, True, False),
(True, True, False, False, False, True),
(True, True, False, False, True, True),
(True, True, False, True, False, True),
(True, True, False, True, True, False), # course-level overrides org-level
(True, True, True, False, False, False),
(True, True, True, False, True, False),
(True, True, True, True, False, True), # course-level overrides org-level
(True, True, True, True, True, False),
)
@ddt.unpack
@override_waffle_flag(CREATE_SCHEDULE_WAFFLE_FLAG, True)
def test_date_with_org_and_course_config_overrides(self, enroll_first, org_config_enabled, org_config_opt_out,
course_config_enabled, course_config_opt_out,
expected_dynamic_deadline):
""" Runs through every combination of org-level plus course-level DynamicUpgradeDeadlineConfiguration enabled
and opt-out states to verify that course-level overrides the org-level config. """
course = create_self_paced_course_run(days_till_start=-1, org_id='TestOrg')
DynamicUpgradeDeadlineConfiguration.objects.create(enabled=True)
if enroll_first:
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT, course__self_paced=True)
OrgDynamicUpgradeDeadlineConfiguration.objects.create(
enabled=org_config_enabled, opt_out=org_config_opt_out, org_id=course.id.org
)
CourseDynamicUpgradeDeadlineConfiguration.objects.create(
enabled=course_config_enabled, opt_out=course_config_opt_out, course_id=course.id
)
if not enroll_first:
enrollment = CourseEnrollmentFactory(course_id=course.id, mode=CourseMode.AUDIT, course__self_paced=True)
# The enrollment has a schedule, and the upgrade_deadline is set when expected_dynamic_deadline is True
if not enroll_first:
self.assertEqual(enrollment.schedule.upgrade_deadline is not None, expected_dynamic_deadline)
# The CourseEnrollment.upgrade_deadline property method is checking the configs
self.assertEqual(enrollment.dynamic_upgrade_deadline is not None, expected_dynamic_deadline)
def create_user(verification_status=None):
""" Create a new User instance.
Arguments:
verification_status (str): User's verification status. If this value is set an instance of
SoftwareSecurePhotoVerification will be created for the user with the specified status.
"""
user = UserFactory()
if verification_status is not None:
SoftwareSecurePhotoVerificationFactory.create(user=user, status=verification_status)
return user
def create_course_run(
days_till_start=1, days_till_end=14, days_till_upgrade_deadline=4, days_till_verification_deadline=14,
):
""" Create a new course run and course modes.
All date-related arguments are relative to the current date-time (now) unless otherwise specified.
Both audit and verified `CourseMode` objects will be created for the course run.
Arguments:
days_till_end (int): Number of days until the course ends.
days_till_start (int): Number of days until the course starts.
days_till_upgrade_deadline (int): Number of days until the course run's upgrade deadline.
days_till_verification_deadline (int): Number of days until the course run's verification deadline. If this
value is set to `None` no deadline will be verification deadline will be created.
"""
now = datetime.now(utc)
course = CourseFactory.create(start=now + timedelta(days=days_till_start))
course.end = None
if days_till_end is not None:
course.end = now + timedelta(days=days_till_end)
CourseModeFactory(course_id=course.id, mode_slug=CourseMode.AUDIT)
CourseModeFactory(
course_id=course.id,
mode_slug=CourseMode.VERIFIED,
expiration_datetime=now + timedelta(days=days_till_upgrade_deadline)
)
if days_till_verification_deadline is not None:
VerificationDeadline.objects.create(
course_key=course.id,
deadline=now + timedelta(days=days_till_verification_deadline)
)
return course
def create_self_paced_course_run(days_till_start=1, org_id=None):
""" Create a new course run and course modes.
All date-related arguments are relative to the current date-time (now) unless otherwise specified.
Both audit and verified `CourseMode` objects will be created for the course run.
Arguments:
days_till_start (int): Number of days until the course starts.
org_id (string): String org id to assign the course to (default: None; use CourseFactory default)
"""
now = datetime.now(utc)
course = CourseFactory.create(start=now + timedelta(days=days_till_start), self_paced=True,
org=org_id if org_id else 'TestedX')
CourseModeFactory(
course_id=course.id,
mode_slug=CourseMode.AUDIT
)
CourseModeFactory(
course_id=course.id,
mode_slug=CourseMode.VERIFIED,
expiration_datetime=now + timedelta(days=100)
)
return course
def enable_course_certificates(course):
"""
Enable course certificate configuration.
"""
course.certificates = {
u'certificates': [{
u'course_title': u'Test',
u'name': u'',
u'is_active': True,
}]
}
course.save()
|
Stanford-Online/edx-platform
|
lms/djangoapps/courseware/tests/test_date_summary.py
|
Python
|
agpl-3.0
| 39,275
|
import grpc
import json
class ConfigException(RuntimeError):
pass
class RpcException(Exception):
code = None
details = None
def __init__(self, message=None, *args, **kwargs):
if isinstance(message, (list, dict)):
message = json.dumps(
message, default=str, ensure_ascii=True)
if not isinstance(message, str):
message = str(message)
if message is not None:
self.details = message
super().__init__(message, *args, **kwargs)
class NotFoundException(RpcException):
code = grpc.StatusCode.NOT_FOUND
details = 'Not Found'
class BadRequestException(RpcException):
code = grpc.StatusCode.INVALID_ARGUMENT
details = 'Invalid Argument'
|
yandy/sea
|
sea/exceptions.py
|
Python
|
mit
| 754
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 29 10:34:00 2012
Author: Josef Perktold
"""
from statsmodels.compat import lrange
from itertools import combinations, zip_longest
from numpy.testing import assert_
def test_zip_longest():
lili = [
["a0", "b0", "c0", "d0"],
["a1", "b1", "c1"],
["a2", "b2", "c2", "d2"],
["a3", "b3", "c3", "d3"],
["a4", "b4"],
]
transposed = [
("a0", "a1", "a2", "a3", "a4"),
("b0", "b1", "b2", "b3", "b4"),
("c0", "c1", "c2", "c3", None),
("d0", None, "d2", "d3", None),
]
assert_(
list(zip_longest(*lili)) == transposed,
"%r not equal %r" % (zip_longest(*lili), transposed),
)
def test_combinations():
actual = list(combinations("ABCD", 2))
desired = [
("A", "B"),
("A", "C"),
("A", "D"),
("B", "C"),
("B", "D"),
("C", "D"),
]
assert_(actual == desired, "%r not equal %r" % (actual, desired))
actual = list(combinations(lrange(4), 3))
desired = [(0, 1, 2), (0, 1, 3), (0, 2, 3), (1, 2, 3)]
assert_(actual == desired, "%r not equal %r" % (actual, desired))
|
bashtage/statsmodels
|
statsmodels/compat/tests/test_itercompat.py
|
Python
|
bsd-3-clause
| 1,185
|
# -*- coding: utf-8 -*-
import MySQLdb
import functools
import time
import datetime
import urllib
import urllib2
import Logger
class DataAccess:
###################
@staticmethod
def GetDb():
conn = None
conn = MySQLdb.connect(host="irweb", port=3306,user="root", passwd="root",db="mydb",charset="utf8")
#conn = MySQLdb.connect(host="localhost", port=4000,user="root", passwd="root",db="root",charset="utf8")
return conn
@staticmethod
def GetDb_project():
conn = None
#conn = MySQLdb.connect(host="127.0.0.1", port=3306,user="root", passwd="dashboard54321",db="mydb",charset="utf8")
#conn = MySQLdb.connect(host="localhost", port=3306,user="root", passwd="root",db="mydb",charset="utf8")
conn = MySQLdb.connect(host="localhost", port=3306,user="root", passwd="root",db="mydb",charset="utf8")
return conn
@staticmethod
def get_investorlist(uid):
conn = None
cursor=None
try:
conn = DataAccess.GetDb_project()
cursor = conn.cursor()
cursor.execute("set NAMES utf8 ")
sql = "SELECT i.userid,u.name,u.`company` ,i.fundid FROM `mydb`.`investor_info` as i inner join user as u on i.userid=u.userid where i.userid >"+str(uid)+" ORDER BY i.`userid` LIMIT 1000;"
print sql
cursor.execute(sql)
results = cursor.fetchall()
return results
except MySQLdb.Error, e:
#logger.info("get url mysqldb error!")
print "add into cluster mysqldb error! --%d: %s" % ( e.args[0], e.args[1] )
return None
finally:
if(cursor!=None):
cursor.close()
if(conn!= None):
conn.close()
@staticmethod
def get_project_attributes(projectid):
conn = None
cursor=None
try:
conn = DataAccess.GetDb_project()
cursor = conn.cursor()
cursor.execute("set NAMES utf8 ")
sql = "SELECT projectid,attributeid,value,id FROM `mydb`.`project_category_backend` where projectid= "+str(projectid)+" ORDER BY `id` "
cursor.execute(sql)
results = cursor.fetchall()
return results
except MySQLdb.Error, e:
#logger.info("get url mysqldb error!")
print "add into cluster mysqldb error! --%d: %s" % ( e.args[0], e.args[1] )
return None
finally:
if(cursor!=None):
cursor.close()
if(conn!= None):
conn.close()
@staticmethod
def update_team_info__ (projectid,teaminfo_by_editor):
cursor = None
conn = None
try:
conn = DataAccess.GetDb()
cursor = conn.cursor()
cursor.execute("set NAMES utf8")
sql ="update team_tab set teaminfo='%s' where projectid=%d;" % (teaminfo_by_editor,projectid)
print sql#[0:32]
cursor.execute(sql)
conn.commit()
#print 'add ok',projectid
except MySQLdb.Error, e:
print 'insert team error:', e.args[0], e.args[1]
finally:
if(cursor!=None):
cursor.close()
if(conn!= None):
conn.close()
@staticmethod
def add_team_categoryinfo(projectid,categorystr,matchedstr,teaminfo_by_editor):
cursor = None
conn = None
try:
conn = DataAccess.GetDb()
cursor = conn.cursor()
cursor.execute("set NAMES utf8")
existed=DataAccess.get_team(projectid)
if(existed==0):
sql ="insert into team_tab(categorystr,catematchedstr,projectid,teaminfo) values ('%s','%s',%d,'%s');" % (categorystr,matchedstr,projectid,teaminfo_by_editor)
#sql ="update team_tab set categorystr='%s',catematchedstr='%s' where projectid=%d;" % (categorystr,matchedstr,projectid)
else:
sql ="update team_tab set categorystr='%s',catematchedstr='%s' where projectid=%d;" % (categorystr,matchedstr,projectid)
print sql#[0:32]
cursor.execute(sql)
conn.commit()
#print 'add ok',projectid
except MySQLdb.Error, e:
print 'insert team error:', e.args[0], e.args[1]
finally:
if(cursor!=None):
cursor.close()
if(conn!= None):
conn.close()
|
zhaochl/python-utils
|
python_jobs_tpl/data_util_demo.py
|
Python
|
apache-2.0
| 4,507
|
from functools import wraps
import time
import traceback
import inspect
import threading
import types
class LoggerException(Exception):
pass
class ReturnTypeMismatchError(Exception):
pass
def _raiseReturnTypeMismatchError(expctedType, actualType):
raise ReturnTypeMismatchError(
"Type mismatch: "
"Expected is %s but got %s" % (expctedType, actualType))
def returns(returnType):
def realDecorator(f):
@wraps(f)
def wrapperMethod(*args, **kwds):
result= f(*args, **kwds)
if not isinstance(result, returnType):
resultType= type(result)
_raiseReturnTypeMismatchError(returnType, resultType)
return result
return wrapperMethod
return realDecorator
def returnsNone(f):
return returns(type(None))(f)
def returnsForExample(exampleInstance):
def realDecorator(f):
@wraps(f)
def wrapperMethod(*args, **kwds):
result= f(*args, **kwds)
resultType= type(result)
exampleType= type(exampleInstance)
if resultType != exampleType:
_raiseReturnTypeMismatchError(exampleType, resultType)
return result
return wrapperMethod
return realDecorator
def suppressException(resultInCaseOfFailure= None):
def decorate(f):
@wraps(f)
def wrapper(self, *args, **kwds):
try:
return f(self, *args, **kwds)
except Exception as e:
self._logger.error(str(e))
traceback.print_exc()
return resultInCaseOfFailure
return wrapper
return decorate
def _logEnterAndExit(loggerMethod, enterMessage, exitMessage,
f, self, *args, **kwds):
loggerMethod(enterMessage)
res= f(self, *args, **kwds)
loggerMethod(exitMessage)
return res
def logEnterAndExit(enterMessage, exitMessage, level='notice'):
def wrapperFunc(f):
@wraps(f)
def wrapper(self, *args, **kwds):
if self._logger is None:
raise LoggerException(
"Logger unavailable for message '%s' '%s'" %
(enterMessage, exitMessage))
loggerMethod= self._logger.__getattribute__(level)
return _logEnterAndExit(loggerMethod,
enterMessage, exitMessage,
f, self, *args, **kwds)
return wrapper
return wrapperFunc
def logTime(f):
@wraps(f)
def wrappedMethod(self, *args, **kwds):
t0= time.time()
try:
return f(self, *args, **kwds)
finally:
diffSec= time.time() - t0
self._logger.notice("Method '%s' took %.3f sec" % (
f.__name__, diffSec))
return wrappedMethod
def cacheResult(f):
@wraps(f)
def wrapper(self, *args):
cacheName= f.__name__ + "_cached_result"
if cacheName not in self.__dict__:
self.__dict__[cacheName]= {}
key= (args)
if key not in self.__dict__[cacheName]:
result= f(self, *args)
self.__dict__[cacheName][key]= result
return self.__dict__[cacheName][key]
return wrapper
def override(f):
return f
def logFailureAndContinue(func):
@wraps(func)
def wrappedMethod(self, *args, **kwds):
try:
return func(self, *args, **kwds)
except Exception as e:
traceback.print_exc()
self._logger.error("'%s' failed: %s" % (
func.__name__, str(e)))
return wrappedMethod
def _synchronizedWith(lock):
def decorator(func):
@wraps(func)
def synchedFunc(*args, **kwds):
with lock:
return func(*args, **kwds)
return synchedFunc
return decorator
def _synchronizedWithAttr(lockName):
def decorator(method):
@wraps(method)
def synchronizedMethod(self, *args, **kwds):
lock= self.__dict__[lockName]
with lock:
return method(self, *args, **kwds)
return synchronizedMethod
return decorator
def synchronized(item):
if isinstance(item, str):
return _synchronizedWithAttr(item)
elif inspect.isclass(item):
syncClass= item
lock= threading.RLock()
origInit= syncClass.__init__
def __init__(self, *args, **kwds):
self.__lock__= lock
origInit(self, *args, **kwds)
syncClass.__init__= __init__
for key in syncClass.__dict__:
val= syncClass.__dict__[key]
if isinstance(val, types.FunctionType):
decorator= _synchronizedWith(lock)
setattr(syncClass, key, decorator(val))
return syncClass
else:
assert False, "Unsupported item type: %s is of type %s" % (
str(item), type(item))
|
lbusoni/pi_gcs
|
pi_gcs/decorator.py
|
Python
|
mit
| 4,972
|
# Copyright (c) 2013 Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.scheduler import fakes
from climate.filters import host_reservation
from climate import tests
class ClimateSchedulerTestCase(tests.TestCase):
def test_climate_scheduler(self):
f = host_reservation.ClimateFilter()
host = fakes.FakeHostState('host1', 'node1', {})
filter_properties = {"scheduler_hints": {"foo": "bar"}}
self.assertTrue(f.host_passes(host, filter_properties))
|
scroiset/climate-nova
|
climate/tests/test_climate_scheduler.py
|
Python
|
apache-2.0
| 1,034
|
"""
Imports Cell datastructures for merging and dealing with partial information.
"""
__version__ = "0.1"
from cell import Cell
from bools import *
from numeric import *
from sets import *
from lists import *
from strings import *
from posets import *
from dicts import *
# special cells
from colors import *
from spatial import *
from lazy import *
import networkx as nx
import networkx as nx
class LexicaTaxonomyCell(PartialOrderedCell):
""" A type system for lexica """
def __init__(self, initial_value=None):
"""
Initializes a graph containing the partial ordering of TV show
genre names.
"""
genre_dag = None
if not self.has_domain(): # only initialize once
genre_dag = nx.DiGraph()
# represents IS-A relationships
genre_dag.add_edge("entity", "thing")
genre_dag.add_edge("entity", "event")
genre_dag.add_edge("event", "action")
genre_dag.add_edge("thing", "kindle")
genre_dag.add_edge("thing", "shape")
genre_dag.add_edge("shape", "shape_with_tail")
genre_dag.add_edge("shape", "shape_without_tail")
genre_dag.add_edge("event", "media")
genre_dag.add_edge("media", "tv_show")
genre_dag.add_edge("media", "movie")
PartialOrderedCell.__init__(self, genre_dag)
if initial_value:
self.merge(initial_value)
|
EventTeam/beliefs
|
src/beliefs/cells/__init__.py
|
Python
|
gpl-2.0
| 1,491
|
# -*- coding: utf-8 -*-
import errno
import logging
import os
from atomicwrites import atomic_write
from .base import normalize_meta_value
from ._rust import RustStorage
from .. import native
from ..utils import checkdir, expand_path
logger = logging.getLogger(__name__)
class FilesystemStorage(RustStorage):
storage_name = 'filesystem'
_repr_attributes = ('path',)
def __init__(self, path, fileext, encoding='utf-8', post_hook=None,
**kwargs):
super(FilesystemStorage, self).__init__(**kwargs)
checkdir(expand_path(path), create=False)
self.path = path
self.encoding = encoding
self.fileext = fileext
self.post_hook = post_hook
self._native_storage = native.ffi.gc(
native.lib.vdirsyncer_init_filesystem(
path.encode('utf-8'),
fileext.encode('utf-8'),
(post_hook or "").encode('utf-8')
),
native.lib.vdirsyncer_storage_free
)
@classmethod
def create_collection(cls, collection, **kwargs):
kwargs = dict(kwargs)
path = kwargs['path']
if collection is not None:
path = os.path.join(path, collection)
checkdir(expand_path(path), create=True)
kwargs['path'] = path
kwargs['collection'] = collection
return kwargs
def get_meta(self, key):
fpath = os.path.join(self.path, key)
try:
with open(fpath, 'rb') as f:
return normalize_meta_value(f.read().decode(self.encoding))
except IOError as e:
if e.errno == errno.ENOENT:
return u''
else:
raise
def set_meta(self, key, value):
value = normalize_meta_value(value)
fpath = os.path.join(self.path, key)
with atomic_write(fpath, mode='wb', overwrite=True) as f:
f.write(value.encode(self.encoding))
|
untitaker/vdirsyncer
|
vdirsyncer/storage/filesystem.py
|
Python
|
mit
| 1,959
|
import string
import httplib, sys
import myparser
import re
import time
class search_google_labs:
def __init__(self,list):
self.results=""
self.totalresults=""
self.server="labs.google.com"
self.hostname="labs.google.com"
self.userAgent="(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
id=0
self.set=""
for x in list:
id+=1
if id==1:
self.set=self.set+"q"+str(id)+"="+str(x)
else:
self.set=self.set+"&q"+str(id)+"="+str(x)
def do_search(self):
h = httplib.HTTP(self.server)
h.putrequest('GET', "/sets?hl=en&"+self.set)
h.putheader('Host', self.hostname)
h.putheader('User-agent', self.userAgent)
h.endheaders()
returncode, returnmsg, headers = h.getreply()
self.results = h.getfile().read()
self.totalresults+= self.results
def get_set(self):
rawres=myparser.parser(self.totalresults,list)
return rawres.set()
def process(self):
self.do_search()
|
WesternCyber/Hacking-Tools
|
the-harvester/theHarvester/discovery/googlesets.py
|
Python
|
gpl-3.0
| 956
|
from multiprocessing import Pool
import os, time, random
def long_time_task(name):
print 'Run task %s (%s)...' % (name, os.getpid())
start = time.time()
time.sleep(random.random() * 3)
end = time.time()
print 'Task %s runs %0.2f seconds.' % (name, (end - start))
if __name__ == '__main__':
print 'Parent process %s.' % os.getpid()
p = Pool()
for i in range(5):
p.apply_async(long_time_task, args=(i,))
print 'Waiting for all subprocesses done...'
p.close()
p.join()
print 'All subprocesses done.'
"""
代码解读:
对Pool对象调用join()方法会等待所有子进程执行完毕,调用join()之前必须先调用close(),调用close()之后就不能继续添加新的Process了。
请注意输出的结果,task 0,1,2,3是立刻执行的,而task 4要等待前面某个task完成后才执行,这是因为Pool的默认大小在我的电脑上是4,因此,最多同时执行4个进程。这是Pool有意设计的限制,并不是操作系统的限制。如果改成:
p = Pool(5)
"""
|
Jayin/practice_on_py
|
Process&Thread/PoolTest.py
|
Python
|
mit
| 1,094
|
from .tree import *
from .node import *
from . import query
|
mbodenhamer/syn
|
syn/tree/b/__init__.py
|
Python
|
mit
| 60
|
__author__ = 'ehonlia'
from rdflib import Namespace
SSN = Namespace('http://purl.oclc.org/NET/ssnx/ssn#')
DUL = Namespace('http://www.loa-cnr.it/ontologies/DUL.owl#')
GEO = Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#')
SAO = Namespace('http://purl.oclc.org/NET/sao/')
CT = Namespace('http://www.insight-centre.org/citytraffic#')
PROV = Namespace('http://purl.org/NET/provenance.owl#')
TL = Namespace('http://purl.org/NET/c4dm/timeline.owl#')
UCUM = Namespace('http://purl.oclc.org/NET/muo/ucum/')
ID = '_id'
METADATA = 'metadata'
|
EricssonResearch/iot-framework-engine
|
semantic-adapter/lib/constants.py
|
Python
|
apache-2.0
| 542
|
"""
test_case_3_simple_mk_msh.py is a simulation example for EMUstack.
Copyright (C) 2015 Bjorn Sturmberg, Kokou Dossou, Felix Lawrence
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""
Test simulation of a relatively simple structure;
a dilute InP nanowire array.
Tests the creation of new .mail file via Gmsh.
"""
import time
import datetime
import numpy as np
import sys
from multiprocessing import Pool
sys.path.append("../backend/")
import objects
import materials
import plotting
from stack import *
import testing
from numpy.testing import assert_allclose as assert_ac
from numpy.testing import assert_equal
def setup_module(module):
################ Light parameters #####################
# Set up light objects
wavelengths = np.array([700])
light_list = [objects.Light(wl, max_order_PWs = 2, theta = 0.0, phi = 0.0) for wl in wavelengths]
light = light_list[0]
#period must be consistent throughout simulation!!!
period = 500
NW_diameter = 120
num_BMs = 40
NW_array = objects.NanoStruct('2D_array', period, NW_diameter, height_nm = 2330,
inclusion_a = materials.InP, background = materials.Air,
loss = True, make_mesh_now = True, force_mesh = True,
lc_bkg = 0.07, lc2= 1.5, lc3= 2.0)
superstrate = objects.ThinFilm(period = period, height_nm = 'semi_inf',
material = materials.Air, loss = False)
substrate = objects.ThinFilm(period = period, height_nm = 'semi_inf',
material = materials.SiO2, loss = False)
################ Evaluate each layer individually ##############
sim_superstrate = superstrate.calc_modes(light)
sim_NW_array = NW_array.calc_modes(light, num_BMs = num_BMs)
sim_substrate = substrate.calc_modes(light)
stack = Stack((sim_substrate, sim_NW_array, sim_superstrate))
stack.calc_scat(pol = 'TE')
module.stack_list = [stack]
plotting.t_r_a_write_files(stack_list, wavelengths)
# # SAVE DATA AS REFERENCE
# # Only run this after changing what is simulated - this
# # generates a new set of reference answers to check against
# # in the future
# testing.save_reference_data("case_3", stack_list)
def results_match_reference(filename):
rtol = 1e-3
atol = 1e-1
reference = np.loadtxt("ref/case_3/" + filename)
result = np.loadtxt(filename)
np.testing.assert_allclose(result, reference, rtol, atol, filename)
def test_txt_results():
result_files = (
"Absorptance_stack0001.txt",
"Lay_Absorb_0_stack0001.txt",
"Lay_Trans_0_stack0001.txt",
"Reflectance_stack0001.txt",
"Transmittance_stack0001.txt",
)
for f in result_files:
yield results_match_reference, f
def test_stack_list_matches_saved(casefile_name = 'case_3'):
rtol = 1e-1
atol = 1e-0
ref = np.load("ref/%s.npz" % casefile_name, allow_pickle=True, encoding='latin1')
yield assert_equal, len(stack_list), len(ref['stack_list'])
for stack, rstack in zip(stack_list, ref['stack_list']):
yield assert_equal, len(stack.layers), len(rstack['layers'])
lbl_s = "wl = %f, " % stack.layers[0].light.wl_nm
for i, (lay, rlay) in enumerate(zip(stack.layers, rstack['layers'])):
lbl_l = lbl_s + "lay %i, " % i
yield assert_ac, lay.R12, rlay['R12'], rtol, atol, lbl_l + 'R12'
yield assert_ac, lay.T12, rlay['T12'], rtol, atol, lbl_l + 'T12'
yield assert_ac, lay.R21, rlay['R21'], rtol, atol, lbl_l + 'R21'
yield assert_ac, lay.T21, rlay['T21'], rtol, atol, lbl_l + 'T21'
yield assert_ac, lay.k_z, rlay['k_z'], rtol, atol, lbl_l + 'k_z'
#TODO: yield assert_ac, lay.sol1, rlay['sol1']
yield assert_ac, stack.R_net, rstack['R_net'], rtol, atol, lbl_s + 'R_net'
yield assert_ac, stack.T_net, rstack['T_net'], rtol, atol, lbl_s + 'T_net'
|
gevero/EMUstack
|
tests/test_case_3_simple_mk_msh.py
|
Python
|
gpl-3.0
| 4,511
|
from . import purchase_order
from . import res_partner
|
OCA/purchase-workflow
|
purchase_partner_incoterm/models/__init__.py
|
Python
|
agpl-3.0
| 55
|
# Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Multiple DB API backend support.
A DB backend module should implement a method named 'get_backend' which
takes no arguments. The method can return any object that implements DB
API methods.
"""
import functools
import logging
import threading
import time
from climate.openstack.common.db import exception
from climate.openstack.common.gettextutils import _LE
from climate.openstack.common import importutils
LOG = logging.getLogger(__name__)
def safe_for_db_retry(f):
"""Enable db-retry for decorated function, if config option enabled."""
f.__dict__['enable_retry'] = True
return f
class wrap_db_retry(object):
"""Retry db.api methods, if DBConnectionError() raised
Retry decorated db.api methods. If we enabled `use_db_reconnect`
in config, this decorator will be applied to all db.api functions,
marked with @safe_for_db_retry decorator.
Decorator catchs DBConnectionError() and retries function in a
loop until it succeeds, or until maximum retries count will be reached.
"""
def __init__(self, retry_interval, max_retries, inc_retry_interval,
max_retry_interval):
super(wrap_db_retry, self).__init__()
self.retry_interval = retry_interval
self.max_retries = max_retries
self.inc_retry_interval = inc_retry_interval
self.max_retry_interval = max_retry_interval
def __call__(self, f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
next_interval = self.retry_interval
remaining = self.max_retries
while True:
try:
return f(*args, **kwargs)
except exception.DBConnectionError as e:
if remaining == 0:
LOG.exception(_LE('DB exceeded retry limit.'))
raise exception.DBError(e)
if remaining != -1:
remaining -= 1
LOG.exception(_LE('DB connection error.'))
# NOTE(vsergeyev): We are using patched time module, so
# this effectively yields the execution
# context to another green thread.
time.sleep(next_interval)
if self.inc_retry_interval:
next_interval = min(
next_interval * 2,
self.max_retry_interval
)
return wrapper
class DBAPI(object):
def __init__(self, backend_name, backend_mapping=None, lazy=False,
**kwargs):
"""Initialize the chosen DB API backend.
:param backend_name: name of the backend to load
:type backend_name: str
:param backend_mapping: backend name -> module/class to load mapping
:type backend_mapping: dict
:param lazy: load the DB backend lazily on the first DB API method call
:type lazy: bool
Keyword arguments:
:keyword use_db_reconnect: retry DB transactions on disconnect or not
:type use_db_reconnect: bool
:keyword retry_interval: seconds between transaction retries
:type retry_interval: int
:keyword inc_retry_interval: increase retry interval or not
:type inc_retry_interval: bool
:keyword max_retry_interval: max interval value between retries
:type max_retry_interval: int
:keyword max_retries: max number of retries before an error is raised
:type max_retries: int
"""
self._backend = None
self._backend_name = backend_name
self._backend_mapping = backend_mapping or {}
self._lock = threading.Lock()
if not lazy:
self._load_backend()
self.use_db_reconnect = kwargs.get('use_db_reconnect', False)
self.retry_interval = kwargs.get('retry_interval', 1)
self.inc_retry_interval = kwargs.get('inc_retry_interval', True)
self.max_retry_interval = kwargs.get('max_retry_interval', 10)
self.max_retries = kwargs.get('max_retries', 20)
def _load_backend(self):
with self._lock:
if not self._backend:
# Import the untranslated name if we don't have a mapping
backend_path = self._backend_mapping.get(self._backend_name,
self._backend_name)
backend_mod = importutils.import_module(backend_path)
self._backend = backend_mod.get_backend()
def __getattr__(self, key):
if not self._backend:
self._load_backend()
attr = getattr(self._backend, key)
if not hasattr(attr, '__call__'):
return attr
# NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry
# DB API methods, decorated with @safe_for_db_retry
# on disconnect.
if self.use_db_reconnect and hasattr(attr, 'enable_retry'):
attr = wrap_db_retry(
retry_interval=self.retry_interval,
max_retries=self.max_retries,
inc_retry_interval=self.inc_retry_interval,
max_retry_interval=self.max_retry_interval)(attr)
return attr
|
frossigneux/blazar
|
climate/openstack/common/db/api.py
|
Python
|
apache-2.0
| 5,989
|
""" Provides Access to the GitHub API """
from __future__ import absolute_import
from __future__ import print_function, unicode_literals
from datetime import datetime, timedelta, time
import logging
import os
import socket
import backoff
from github import Github
from github.PullRequest import PullRequest
from github.Commit import Commit
from github.GitCommit import GitCommit
from github.GithubException import UnknownObjectException, GithubException
from github.InputGitAuthor import InputGitAuthor
from pytz import timezone
import six
from validators import url as url_validator
from .exception import InvalidUrlException
from .utils import envvar_get_int
from .git_repo import LocalGitAPI
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
PR_PREFIX = '**EdX Release Notice**: '
PR_ON_STAGE_BASE_MESSAGE = PR_PREFIX + 'This PR has been deployed to the staging environment '
PR_ON_STAGE_DATE_MESSAGE = 'in preparation for a release to production on {date:%A, %B %d, %Y}. {extra_text}'
PR_ON_PROD_MESSAGE = PR_PREFIX + 'This PR has been deployed to the production environment. {extra_text}'
PR_RELEASE_CANCELED_MESSAGE = PR_PREFIX + 'This PR has been rolled back from the production environment. {extra_text}'
PR_BROKE_VAGRANT_DEVSTACK_MESSAGE = PR_PREFIX + 'This PR may have broken Vagrant Devstack CI. {extra_text}'
DEFAULT_TAG_USERNAME = 'no_user'
DEFAULT_TAG_EMAIL_ADDRESS = 'no.public.email@edx.org'
# Day of week constant
_MONDAY = 0
_FRIDAY = 4
_NORMAL_RELEASE_WEEKDAYS = tuple(range(_MONDAY, _FRIDAY + 1))
RELEASE_TZ = timezone('US/Eastern')
RELEASE_CUTOFF = time(10, tzinfo=RELEASE_TZ)
# Defaults for the polling of a PR's tests.
MAX_PR_TEST_TRIES_DEFAULT = 5
PR_TEST_INITIAL_WAIT_INTERVAL_DEFAULT = 10
PR_TEST_POLL_INTERVAL_DEFAULT = 10
class NoValidCommitsError(Exception):
"""
Error indicating that there are no commits with valid statuses
"""
pass
class InvalidPullRequestError(Exception):
"""
Error indicating that a PR could not be found
"""
pass
class PullRequestCreationError(Exception):
"""
Error indicating that a PR could not be created
"""
pass
class GitTagMismatchError(Exception):
"""
Error indicating that a tag is pointing at an incorrect SHA.
"""
pass
def extract_message_summary(message, max_length=50):
"""
Take a commit message and return the first part of it.
"""
title = message.split('\n')[0] or ''
if len(title) < max_length:
return title
else:
return title[0:max_length] + '...'
def default_expected_release_date(at_time=None, release_days=_NORMAL_RELEASE_WEEKDAYS):
"""
Returns the default expected release date given the current date.
Currently the nearest weekday in the future (can't be today).
"""
if at_time is None:
at_time = datetime.now(RELEASE_TZ)
if at_time.timetz() < RELEASE_CUTOFF:
proposal = at_time.date()
else:
proposal = at_time.date() + timedelta(days=1)
while proposal.weekday() not in release_days:
proposal = proposal + timedelta(days=1)
return datetime.combine(proposal, RELEASE_CUTOFF)
def rc_branch_name_for_date(date):
"""
Returns the standard release candidate branch name
"""
return 'rc/{date}'.format(date=date.isoformat())
def _backoff_handler(details):
"""
Simple logging handler for when polling backoff occurs.
"""
LOGGER.info('Trying again in {wait:0.1f} seconds after {tries} tries calling {target}'.format(**details))
def _constant_with_initial_wait(initial_wait=0, interval=1):
"""
Generator with initial wait (after the first request) built-in.
The first request is made immediately.
The second request is made after "initial_wait" seconds.
All remaining requests made after "interval" seconds.
Useful for polling processes expected to not have results for a substantial interval from process start.
Arguments:
initial_wait: Number of seconds to wait between the first and second requests.
interval: Constant value in seconds to yield after second request.
"""
yield initial_wait
while True:
yield interval
class GitHubAPI(object):
"""
Manages requests to the GitHub api for a given org/repo
"""
def __init__(self, org, repo, token):
"""
Creates a new API access object.
Arguments:
org (string): Github org to access
repo (string): Github repo to access
token (string): Github API access token
"""
self.github_connection = Github(token)
self.github_repo = self.github_connection.get_repo('{org}/{repo}'.format(org=org, repo=repo))
self.github_org = self.github_connection.get_organization(org)
self.org = org
self.repo = repo
def clone(self, branch=None, reference_repo=None):
"""
Clone this Github repo as a LocalGitAPI instance.
"""
clone_url = self.github_repo.ssh_url
return LocalGitAPI.clone(clone_url, branch, reference_repo)
def user(self):
"""
Calls GitHub's '/user' endpoint.
See
https://developer.github.com/v3/users/#get-the-authenticated-user
Returns:
github.NamedUser.NamedUser: Information about the current user.
Raises:
RequestFailed: If the response fails validation.
"""
return self.github_connection.get_user()
def get_head_commit_from_pull_request(self, pr_number):
"""
Given a PR number, return the HEAD commit hash.
Arguments:
pr_number (int): Number of PR to check.
Returns:
Commit SHA of the PR HEAD.
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the branch does not exist
"""
return self.get_pull_request(pr_number).head.sha
def get_diff_url(self, organization, repository, base_sha, head_sha):
"""
Given the organization and repository, generate a github URL that will compare the provided SHAs.
Arguments:
organization (str): An organization name as it will appear in github
repository (str): The organization's repository name
base_sha (str): The base commit's SHA
head_sha (str): Compare the base SHA with this commit
Returns:
A string constaining the URL
Raises:
InvalidUrlException: If the basic validator does not believe this to be a valid URL
"""
calculated_url = 'https://github.com/{}/{}/compare/{}...{}'.format(
organization, repository, base_sha, head_sha
)
if not url_validator(calculated_url):
raise InvalidUrlException(calculated_url)
return calculated_url
def get_head_commit_from_branch_name(self, branch_name):
"""
Given a branch name, return the HEAD commit hash.
Arguments:
branch_name (str): Name of branch from which to extract HEAD commit hash.
Returns:
Commit SHA of the branch HEAD.
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the branch does not exist
"""
return self.get_commits_by_branch(branch_name)[0].sha
def get_merge_commit_from_pull_request(self, pr_number):
"""
Given a pull request number, return the PR's merge commit hash.
Arguments:
pr_number (int): Number of PR to check.
Returns:
Commit SHA of the merge commit which merged the PR into the base branch.
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the PR does not exist.
"""
return self.get_pull_request(pr_number).merge_commit_sha
def get_commit_combined_statuses(self, commit):
"""
Calls GitHub's '<commit>/statuses' endpoint for a given commit. See
https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
Arguments:
commit: One of:
- string (interprets as git SHA and fetches commit)
- GitCommit (uses the accompanying git SHA and fetches commit)
- Commit (directly gets the combined status)
Returns:
github.CommitCombinedStatus.CommitCombinedStatus
Raises:
RequestFailed: If the response fails validation.
"""
if isinstance(commit, six.string_types):
commit = self.github_repo.get_commit(commit)
elif isinstance(commit, GitCommit):
commit = self.github_repo.get_commit(commit.sha)
elif not isinstance(commit, Commit):
raise UnknownObjectException(500, 'commit is neither a valid sha nor github.Commit.Commit object.')
return commit.get_combined_status()
def _is_commit_successful(self, sha):
"""
Returns whether the passed commit has passed all its tests.
Ensures there is at least one status update so that
commits whose tests haven't started yet are not valid.
Arguments:
sha (str): The SHA of which to get the status.
Returns:
tuple(bool, dict):
bool: True when the combined state equals 'success', False otherwise
dict: Key/values of ci_context:ci_url
"""
commit_status = self.get_commit_combined_statuses(sha)
# Determine if the commit has passed all checks
if len(commit_status.statuses) < 1 or commit_status.state is None:
return (False, {})
return (
commit_status.state.lower() == 'success',
{cs.context: cs.target_url for cs in commit_status.statuses}
)
def check_combined_status_commit(self, commit_sha):
"""
Given a commit SHA, query the current combined status of the commit's tests.
Arguments:
commit_sha (str): Commit SHA to check.
Returns:
tuple(bool, dict):
bool: True if all tests have passed successfully, False otherwise
dict: Key/values of ci_context:ci_url
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the SHA does not exist
"""
return self._is_commit_successful(commit_sha)
def check_combined_status_pull_request(self, pr_number):
"""
Given a PR number, query the current combined status of the PR's tests.
Arguments:
pr_number (int): Number of PR to check.
Returns:
tuple(bool, dict):
bool: True if all tests have passed successfully, False otherwise
dict: Key/values of ci_context:ci_url
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the PR does not exist
"""
return self._is_commit_successful(
self.get_head_commit_from_pull_request(pr_number)
)
@backoff.on_exception(
backoff.expo,
socket.timeout,
max_tries=5
)
@backoff.on_predicate(
_constant_with_initial_wait,
lambda x: x not in ('success', 'failure'),
max_tries=envvar_get_int("MAX_PR_TEST_POLL_TRIES", MAX_PR_TEST_TRIES_DEFAULT),
initial_wait=envvar_get_int("PR_TEST_INITIAL_WAIT_INTERVAL", PR_TEST_INITIAL_WAIT_INTERVAL_DEFAULT),
interval=envvar_get_int("PR_TEST_POLL_INTERVAL", PR_TEST_POLL_INTERVAL_DEFAULT),
jitter=None,
on_backoff=_backoff_handler
)
def _poll_commit(self, sha):
"""
Poll whether the passed commit has passed all its tests.
Ensures there is at least one status update so that
commits whose tests haven't started yet are not valid.
Arguments:
sha (str): The SHA of which to get the status.
Returns:
bool: true when the combined state equals 'success'
"""
commit_status = self.get_commit_combined_statuses(sha)
# Ensure that at least one status update exists to guard against commits whose tests haven't started yet.
if len(commit_status.statuses) < 1 or commit_status.state is None:
return 'not_started'
return commit_status.state.lower()
def poll_pull_request_test_status(self, pr_number):
"""
Given a PR number, poll the combined status of the PR's tests.
Arguments:
pr_number (int): Number of PR to check.
Returns:
True if all tests have passed successfully, else False.
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the branch does not exist
"""
commit_sha = self.get_head_commit_from_pull_request(pr_number)
return self.poll_for_commit_successful(commit_sha)
def poll_for_commit_successful(self, sha):
"""
Poll whether the passed commit has passed all its tests.
Arguments:
sha (str): The SHA of which to get the status.
Returns:
True when the commit's combined state equals 'success', else False.
"""
return self._poll_commit(sha) == 'success'
def is_branch_base_of_pull_request(self, pr_number, branch_name):
"""
Check if the PR is against the specified branch,
i.e. if the base of the PR is the specified branch.
Arguments:
pr_number (int): Number of PR to check.
branch_name (str): Name of branch to check.
Returns:
True if PR is opened against the branch, else False.
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the branch does not exist
"""
pull_request = self.get_pull_request(pr_number)
repo_branch_name = '{}:{}'.format(self.org, branch_name)
return pull_request.base.label == repo_branch_name
def get_commits_by_branch(self, branch):
"""
Calls GitHub's 'commits' endpoint for master.
See
https://developer.github.com/v3/repos/commits/#list-commits-on-a-repository
Arguments:
branch (str): branch to search for commits.
Returns:
github.PaginatedList.PaginatedList: of github.GitCommit.GitCommit
Raises:
github.GithubException: If the response fails validation.
"""
branch = self.github_repo.get_branch(branch)
return self.github_repo.get_commits(branch.commit.sha)
def delete_branch(self, branch_name):
"""
Call GitHub's delete ref (branch) API
Args:
branch_name (str): The name of the branch to delete
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the branch does not exist
"""
ref = self.github_repo.get_git_ref(
ref='heads/{ref}'.format(ref=branch_name)
)
ref.delete()
def create_branch(self, branch_name, sha):
"""
Calls GitHub's create ref (branch) API
Arguments:
branch_name (str): The name of the branch to create
sha (str): The commit to base the branch off of
Returns:
github.GitRef.GitRef
Raises:
github.GithubException.GithubException: If the branch isn't created/already exists.
github.GithubException.UnknownObjectException: if the branch can not be fetched after creation
"""
return self.github_repo.create_git_ref(
ref='refs/heads/{}'.format(branch_name),
sha=sha
)
def create_pull_request(
self,
head,
base='release',
title='',
body=''):
"""
Creates a new pull request from a branch
Arguments:
head (str): The name of the branch to create the PR from
base (str): The Branch the PR will be merged in to
title (str): Title of the pull request
body (str): Text body of the pull request
Returns:
github.PullRequest.PullRequest
Raises:
github.GithubException.GithubException:
"""
try:
return self.github_repo.create_pull(
title=title,
body=body,
head=head,
base=base
)
except GithubException as exc:
# PR could not be created.
raise PullRequestCreationError(str(exc.data))
def get_pull_request(self, pr_number):
"""
Given a PR number, return the PR object.
Arguments:
pr_number (int): Number of PR to get.
Returns:
github.PullRequest.PullRequest
Raises:
github.GithubException.GithubException: If the response fails.
github.GithubException.UnknownObjectException: If the PR ID does not exist
"""
return self.github_repo.get_pull(pr_number)
def merge_pull_request(self, pr_number):
"""
Given a PR number, merge the pull request (if possible).
Arguments:
pr_number (int): Number of PR to merge.
Raises:
github.GithubException.GithubException: If the PR merge fails.
github.GithubException.UnknownObjectException: If the PR ID does not exist.
"""
pull_request = self.get_pull_request(pr_number)
pull_request.merge()
def create_tag(
self,
sha,
tag_name,
message='',
tag_type='commit'):
"""
Creates a tag associated with the sha provided
Arguments:
sha (str): The commit we references by the newly created tag
tag_name (str): The name of the tag
message (str): The optional description of the tag
tag_type (str): The type of the tag. Could be 'tree' or 'blob'. Default is 'commit'.
Returns:
github.GitTag.GitTag
Raises:
github.GithubException.GithubException:
"""
tag_user = self.user()
tagger = InputGitAuthor(
name=tag_user.name or DEFAULT_TAG_USERNAME,
# GitHub users without a public email address will use a default address.
email=tag_user.email or DEFAULT_TAG_EMAIL_ADDRESS,
date=datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
)
created_tag = self.github_repo.create_git_tag(
tag=tag_name,
message=message,
object=sha,
type=tag_type,
tagger=tagger
)
try:
# We need to create a reference based on the tag
self.github_repo.create_git_ref(ref='refs/tags/{}'.format(tag_name), sha=sha)
except GithubException as exc:
# Upon trying to create a tag with a tag name that already exists,
# an "Unprocessable Entity" error with a status code of 422 is returned
# with a message of 'Reference already exists'.
# https://developer.github.com/v3/#client-errors
if exc.status != 422:
raise
# Tag is already created. Verify it's on the correct hash.
existing_tag = self.github_repo.get_git_ref('tags/{}'.format(tag_name))
if existing_tag.object.sha != sha:
# The tag is already created and pointed to a different SHA than requested.
raise GitTagMismatchError(
"Tag '{}' exists but points to SHA {} instead of requested SHA {}.".format(
tag_name, existing_tag.object.sha, sha
)
)
return created_tag
def have_branches_diverged(self, base_branch, compare_branch):
"""
Checks to see if all the commits that are in the compare_branch are already in the base_branch.
Arguments:
base_branch (str): Branch to use as a base when comparing.
compare_branch (str): Branch to compare against base to see if it contains commits the base does not.
Returns:
bool: False if all commits in the compare_branch are already in the base_branch.
True if the compare_branch contains commits which the base_branch does not.
Raises:
github.GithubException.GithubException: If the call fails.
github.GithubException.UnknownObjectException: If either branch does not exist.
"""
return self.github_repo.compare(
base='refs/heads/{}'.format(base_branch),
head='refs/heads/{}'.format(compare_branch)
).status == 'diverged'
def most_recent_good_commit(self, branch):
"""
Returns the most recent commit on master that has passed the tests
Arguments:
branch (str): branch name to check for valid commits
Returns:
github.GitCommit.GitCommit
Raises:
NoValidCommitsError: When no commit is found
"""
commits = self.get_commits_by_branch(branch)
result = None
for commit in commits:
if self._is_commit_successful(commit.sha)[0]:
result = commit
return result
# no result
raise NoValidCommitsError()
def get_pr_range(self, start_sha, end_sha):
"""
Given a start SHA and an end SHA, returns a list of PRs between the two,
excluding the start SHA and including the end SHA.
This has been done in the past by parsing PR numbers out of merge commit
messages. However, merge commits are becoming less common on GitHub with
the advent of new PR merge strategies (i.e., squash merge, rebase merge).
If you merge a PR using either squash or rebase merging, there will be no
merge commit corresponding to your PR on master. The merge commit message
parsing approach will subsequently fail to locate your PR.
The GitHub Search API helps us address this by allowing us to search issues
by SHA. Note that the GitHub Search API has custom rate limit rules (30 RPM).
For more, see https://developer.github.com/v3/search.
Arguments:
start_sha (str): SHA from which to begin the PR search, exclusive.
end_sha (str): SHA at which to conclude the PR search, inclusive.
Returns:
list: of github.PullRequest.PullRequest
"""
# The Search API limits search queries to 256 characters. Untrimmed SHA1s
# are 40 characters long. To avoid exceeding the rate and search query size
# limits, we can batch SHAs in our searches. Reserving 56 characters for
# qualifiers (i.e., type, base, user, repo) leaves us with 200 characters.
# As with all other terms in the query, the batched SHAs need to be separated
# by a character of whitespace. A batch size of 18 10-character SHAs requires
# 17 characters of whitespace, for a total of 18*10 + 17 = 197 characters.
# We'd need to search for >540 commits in a minute to exceed the rate limit.
sha_length = int(os.environ.get('SHA_LENGTH', 10))
batch_size = int(os.environ.get('BATCH_SIZE', 18))
def batch(batchable):
"""
Utility to facilitate batched iteration over a list.
Arguments:
batchable (list): The list to break into batches.
Yields:
list
"""
length = len(batchable)
for index in range(0, length, batch_size):
yield batchable[index:index + batch_size]
comparison = self.github_repo.compare(start_sha, end_sha)
shas = [commit.sha[:sha_length] for commit in comparison.commits]
issues = []
for sha_batch in batch(shas):
# For more about searching issues,
# see https://help.github.com/articles/searching-issues.
issues += self.github_connection.search_issues(
' '.join(sha_batch),
type='pr',
base='master',
user=self.org,
repo=self.repo,
)
pulls = {}
for issue in issues:
# Merge commits link back to the same PR as the actual commits merged
# by that PR. We want to avoid listing the PR twice in this situation,
# and also when a PR includes more than one commit.
if not pulls.get(issue.number):
pulls[issue.number] = issue.repository.get_pull(issue.number)
return list(pulls.values())
def message_pull_request(self, pull_request, message, message_filter, force_message=False):
"""
Messages a pull request. Will only message the PR if the message has not already been posted to the discussion
Args:
pull_request (github.PullRequest.PullRequest or int): the pull request (object or number) to message
message (str): the message to post to the pull request
message_filter (str): the message filter used to avoid duplicate messages
force_message (bool): if set true the message will be posted without duplicate checking
Returns:
github.IssueComment.IssueComment
Raises:
github.GithubException.GithubException:
InvalidPullRequestError: When the PR does not exist
"""
def _not_duplicate(pr_messages, new_message):
"""
Returns True if the comment does not exist on the PR
Returns False if the comment exists on the PR
Args:
pr_messages (list<str>)
new_message (str):
existing_messages (str):
Returns:
bool
"""
new_message = new_message.lower()
result = False
for comment in pr_messages:
if new_message in comment.body.lower():
break
else:
result = True
return result
if not isinstance(pull_request, PullRequest):
try:
pull_request = self.github_repo.get_pull(pull_request)
except UnknownObjectException:
raise InvalidPullRequestError('PR #{} does not exist'.format(pull_request))
if force_message or _not_duplicate(pull_request.get_issue_comments(), message_filter):
return pull_request.create_issue_comment(message)
else:
return None
def message_pr_deployed_stage(self, pr_number, deploy_date=None, force_message=False, extra_text=''):
"""
Sends a message that this PRs commits have been deployed to the staging environment
Args:
pr_number (int): The number of the pull request
force_message (bool): if set true the message will be posted without duplicate checking
extra_text (str): Extra text that will be inserted at the end of the PR message
Returns:
github.IssueComment.IssueComment
"""
if deploy_date is None:
deploy_date = default_expected_release_date()
return self.message_pull_request(
pr_number,
(PR_ON_STAGE_BASE_MESSAGE + PR_ON_STAGE_DATE_MESSAGE).format(date=deploy_date, extra_text=extra_text),
PR_ON_STAGE_BASE_MESSAGE,
force_message,
)
def message_pr_deployed_prod(self, pr_number, force_message=False, extra_text=''):
"""
sends a message that this PRs commits have been deployed to the production environment
Args:
pr_number (int): The number of the pull request
force_message (bool): if set true the message will be posted without duplicate checking
extra_text (str): Extra text that will be inserted at the end of the PR message
Returns:
github.IssueComment.IssueComment
"""
return self.message_pull_request(
pr_number,
PR_ON_PROD_MESSAGE.format(extra_text=extra_text),
PR_ON_PROD_MESSAGE.format(extra_text=''),
force_message
)
def message_pr_release_canceled(self, pr_number, force_message=False, extra_text=''):
"""
Sends a message that this PRs commits have not made it to production as the release was canceled
Args:
pr_number (int): The number of the pull request
force_message (bool): if set true the message will be posted without duplicate checking
extra_text (str): Extra text that will be inserted at the end of the PR message
Returns:
github.IssueComment.IssueComment
"""
return self.message_pull_request(
pr_number,
PR_RELEASE_CANCELED_MESSAGE.format(extra_text=extra_text),
PR_RELEASE_CANCELED_MESSAGE.format(extra_text=''),
force_message
)
def message_pr_broke_vagrant(self, pr_number, force_message=False, extra_text=''):
"""
Sends a message that this PRs commits have broken vagrant devstack
Args:
pr_number (int): The number of the pull request
force_message (bool): if set true the message will be posted without duplicate checking
extra_text (str): Extra text that will be inserted at the end of the PR message
Returns:
github.IssueComment.IssueComment
"""
return self.message_pull_request(
pr_number,
PR_BROKE_VAGRANT_DEVSTACK_MESSAGE.format(extra_text=extra_text),
PR_BROKE_VAGRANT_DEVSTACK_MESSAGE.format(extra_text=''),
force_message
)
def has_been_merged(self, base, candidate):
"""
Return whether ``candidate`` has been merged into ``base``.
"""
try:
comparison = self.github_repo.compare(base, candidate)
except UnknownObjectException:
return False
return comparison.status in ('behind', 'identical')
def find_approved_not_closed_prs(self, pr_base):
"""
Yield all pull requests in the repo against ``pr_base`` that are approved and not closed.
"""
query = "type:pr review:approved base:{} state:open state:merged".format(pr_base)
for issue in self.github_connection.search_issues(query):
yield self.github_repo.get_pull(issue.number)
|
eltoncarr/tubular
|
tubular/github_api.py
|
Python
|
agpl-3.0
| 31,256
|
#!python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys
import os
from os.path import (join, dirname, exists, expanduser, splitext, basename,
split, abspath, isabs, isdir, isfile)
import threading
from hashlib import md5
from pprint import pprint, pformat
import logging
import codecs
import weakref
from codeintel2.common import *
#---- globals
log = logging.getLogger("codeintel.db")
#---- Database zone and lib implementations
class ProjectZone(object):
"""Manage a 'db/projs/<proj-hash>/...' area of the database.
A project zone works with a project object(*) to provide quick
mapping of a (lang, blobname) to a file in the project, if any.
# Dealing with updating
Knowing when a file has been removed from a project is fairly easy:
we hit it in the cache, then do a quick stat (or query on the
project) to ensure it it still there.
Knowing when a file has been added to a project is harder. Fully
hooking into Komodo's file system-level dir watching and various
in-Komodo update notifications is hard (doesn't translate well to
simply requiring an API on the project object) and isn't perfect
anyway. Ideally .dirs_from_basename() is all handled by the project
object and we don't have to worry about it. However, Komodo Projects
aren't currently setup to do this well, so codeintel is taking the
burden of caching.
The planned solution is to attempt a reasonable job of creating the
dirs_from_basename cache and then providing a manual interface
(perhaps right-click on Project -> "Refresh Status") to update.
(*) The project object is required to have the following API:
TODO: spec the API.
"""
def __init__(self, mgr, db, proj):
self.mgr = mgr
self.db = db
self.proj = proj
self.name = basename(proj.path)
self.base_dir = join(self.db.base_dir, "db", "projs",
md5(proj.path).hexdigest())
self._proj_lib_from_lang = weakref.WeakValueDictionary()
self._idx_lock = threading.RLock()
self._dirs_from_basename = None
self._is_idx_dirty = False
def __repr__(self):
return "<proj '%s' zone>" % self.name
def __del__(self):
try:
self.save()
except:
log.exception("error saving %s" % self)
def get_dirs_from_basename(self):
self._idx_lock.acquire()
try:
if self._dirs_from_basename is None:
log.debug("fs-read: load %s 'dirs_from_basename' index", self)
self._dirs_from_basename = self.db.load_pickle(
join(self.base_dir, "dirs_from_basename"), {})
return self._dirs_from_basename
finally:
self._idx_lock.release()
def set_dirs_from_basename(self, value):
self._idx_lock.acquire()
try:
old_value = self.dirs_from_basename
self._dirs_from_basename = value
if old_value != value:
# PERF: can this be smarter? Would have to be on
# .update() for that.
self._is_idx_dirty = True
finally:
self._idx_lock.release()
dirs_from_basename = property(get_dirs_from_basename,
set_dirs_from_basename, None, "index of basenames in project")
def _mk_dbdir(self):
log.debug("fs-write: mkdir '%s'", self.base_dir)
os.makedirs(self.base_dir)
log.debug("fs-write: '%s/path'", self.base_dir)
fout = codecs.open(join(self.base_dir, "path"), 'wb', 'utf-8')
try:
fout.write(self.proj.path)
finally:
fout.close()
def save(self):
self._idx_lock.acquire()
try:
if self._is_idx_dirty:
if not exists(self.base_dir):
self._mk_dbdir()
self.db.save_pickle(join(self.base_dir, "dirs_from_basename"),
self._dirs_from_basename)
self._is_idx_dirty = False
finally:
self._idx_lock.release()
def update(self, nice=False):
"""Update the index for the list of files in the project.
"nice" (default False) is a boolean indicating if this
update process should attempt to keep the CPU load low.
"""
if nice:
XXX
# XXX Update this to handle includes, excludes,
# static-project-entries. I.e. move this logic to the
# project where it can handle this stuff.
dirs_from_basename = {}
for dirpath, dirnames, filenames in os.walk(self.proj.base_dir):
for filename in filenames:
dirs_from_basename.setdefault(filename, []).append(dirpath)
self.dirs_from_basename = dirs_from_basename
def _likely_filename_from_lang_and_blobname(self, lang, blobname):
# XXX Need to canonicalize filename.
# XXX Shouldn't be hardcoding this stuff here. Defer out to the
# lang_*.py modules.
# XXX Do we have to worry about multi-level imports here? E.g.,
# Python: os.path
# Perl: LWP::UserAgent
# Ruby: yaml/context
# PHP: blah/blam.php
if lang in ("Python", "Python3"):
return blobname+".py"
else:
XXX
def has_blob(self, lang, blobname):
lang_lib = self._lang_lib_for_blob(lang, blobname)
if lang_lib is None:
return False
return lang_lib.has_blob(blobname)
def get_blob(self, lang, blobname):
lang_lib = self._lang_lib_for_blob(lang, blobname)
if lang_lib is None:
return None
return lang_lib.get_blob(blobname)
def _lang_lib_for_blob(self, lang, blobname):
filename = self._likely_filename_from_lang_and_blobname(lang, blobname)
try:
dirs = self.dirs_from_basename[filename]
except KeyError:
return None
else:
# XXX This may be a perf issue because of a possibly large
# number of created LangDirsLib's -- which was unexpected
# when the LangDirsLibs caching was designed on LangZone.
# The cache size may need to be increased or some other
# scheme considered.
return self.db.get_lang_lib(lang, "proj '%s' lib" % self.name,
dirs,
sublang=lang) # for PHP
def get_lib(self, lang):
proj_lib = self._proj_lib_from_lang.get(lang)
if proj_lib is None:
proj_lib = ProjectLib(self, lang)
self._proj_lib_from_lang[lang] = proj_lib
return proj_lib
class ProjectLib(object):
# Light lang-specific wrapper around a ProjectZone (akin to
# CatalogLig).
def __init__(self, proj_zone, lang):
self.proj_zone = proj_zone
self.lang = lang
def __repr__(self):
return "<proj '%s' %s lib>" % (self.proj_zone.name, self.lang)
def has_blob(self, blobname):
return self.proj_zone.has_blob(self.lang, blobname)
def get_blob(self, blobname):
return self.proj_zone.get_blob(self.lang, blobname)
|
dostavro/dotfiles
|
sublime2/Packages/SublimeCodeIntel/libs/codeintel2/database/projlib.py
|
Python
|
mit
| 8,984
|
def classify(text):
return True
|
checkr/fdep
|
tests/fixtures/serve/app.py
|
Python
|
mit
| 36
|
#!/usr/bin/python
import sys
li = open(sys.argv[2],'r')
dictionary = {}
for line in li:
splt = line.strip().split('.')
dictionary[splt[0]]= splt[0]
li.close()
com = open(sys.argv[1],'r')
for line in com:
if (dictionary.has_key(line.strip())):
print line.strip() +" yes"
else :
print line.strip()
|
germs-lab/RefSoil
|
not_contains.py
|
Python
|
gpl-2.0
| 332
|
'''
test iam2 volume operations by platform admin/operator/member
# 1 create project
# 2 create virtual id (project admin/operator/member)
# 3 operations on volume with virtual id
# 4 delete
@author: quarkonics
'''
import os
import time
import zstackwoodpecker.test_util as test_util
import apibinding.inventory as inventory
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.iam2_operations as iam2_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.image_operations as img_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.operations.account_operations as acc_ops
from zstackwoodpecker.operations import vm_operations as vm_ops
import zstackwoodpecker.operations.net_operations as net_ops
import zstackwoodpecker.operations.scheduler_operations as schd_ops
import zstackwoodpecker.operations.zwatch_operations as zwt_ops
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.zstack_test.zstack_test_vid as test_vid
project_uuid = None
virtual_id_uuid = None
project_admin_uuid = None
project_operator_uuid = None
plain_user_uuid = None
test_stub = test_lib.lib_get_test_stub()
case_flavor = dict(project_admin= dict(target_role='project_admin'),
project_operator= dict(target_role='project_operator'),
project_member= dict(target_role='project_member'),
system_admin= dict(target_role='system_admin'),
)
def test():
global project_uuid, project_admin_uuid, virtual_id_uuid, project_operator_uuid, plain_user_uuid
flavor = case_flavor[os.environ.get('CASE_FLAVOR')]
# 1 create project
if flavor['target_role'] != 'system_admin':
project_name = 'test_project'
project = iam2_ops.create_iam2_project(project_name)
project_uuid = project.uuid
project_linked_account_uuid = project.linkedAccountUuid
if flavor['target_role'] == 'project_admin':
# 2 create virtual id
project_admin_name = 'username'
project_admin_password = 'password'
project_admin_uuid = iam2_ops.create_iam2_virtual_id(project_admin_name, project_admin_password).uuid
virtual_id_uuid = iam2_ops.create_iam2_virtual_id('usernametwo', 'password').uuid
# 3 create project admin
iam2_ops.add_iam2_virtual_ids_to_project([project_admin_uuid],project_uuid)
attributes = [{"name": "__ProjectAdmin__", "value": project_uuid}]
iam2_ops.add_attributes_to_iam2_virtual_id(project_admin_uuid, attributes)
# 4 add the project admin role
projectadminrole_uuid='55553cefbbfb42468873897c95408a43'
iam2_ops.add_roles_to_iam2_virtual_id([projectadminrole_uuid], virtual_id_uuid)
# login in project by project admin
project_admin_session_uuid = iam2_ops.login_iam2_virtual_id(project_admin_name, project_admin_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name, session_uuid=project_admin_session_uuid).uuid
# iam2_ops.remove_attributes_from_iam2_virtual_id(virtual_id_uuid, attributes)
elif flavor['target_role'] == 'project_operator':
project_operator_name = 'username2'
project_operator_password = 'password'
attributes = [{"name": "__ProjectOperator__", "value": project_uuid}]
project_operator_uuid = iam2_ops.create_iam2_virtual_id(project_operator_name,project_operator_password,attributes=attributes).uuid
virtual_id_uuid = iam2_ops.create_iam2_virtual_id('usernamethree','password').uuid
# login in project by project operator
iam2_ops.add_iam2_virtual_ids_to_project([project_operator_uuid],project_uuid)
project_operator_session_uuid = iam2_ops.login_iam2_virtual_id(project_operator_name,project_operator_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name,session_uuid=project_operator_session_uuid).uuid
elif flavor['target_role'] == 'project_member':
plain_user_name = 'username'
plain_user_password = 'password'
plain_user_uuid = iam2_ops.create_iam2_virtual_id(plain_user_name, plain_user_password,
project_uuid=project_uuid).uuid
# 3 add virtual id to project
iam2_ops.add_iam2_virtual_ids_to_project([plain_user_uuid],project_uuid)
# 4 login in project by plain user
plain_user_session_uuid = iam2_ops.login_iam2_virtual_id(plain_user_name, plain_user_password)
# 4 login in project
#project_inv=iam2_ops.get_iam2_projects_of_virtual_id(plain_user_session_uuid)
project_login_uuid = iam2_ops.login_iam2_project(project_name, plain_user_session_uuid).uuid
elif flavor['target_role'] == 'system_admin':
username = "systemAdmin"
password = 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86'
vid_tst_obj = test_vid.ZstackTestVid()
test_stub.create_system_admin(username, password, vid_tst_obj)
virtual_id_uuid = vid_tst_obj.get_vid().uuid
# add the system admin role
systemadminrole_uuid='2069fe8ff0fb49efac0d4db3650a8076'
iam2_ops.add_roles_to_iam2_virtual_id([systemadminrole_uuid], virtual_id_uuid)
project_login_uuid = acc_ops.login_by_account(username, password)
# Image related ops: Add, Delete, Expunge, sync image size, Update QGA, delete, expunge
if flavor['target_role'] == 'project_member':
statements = [{"effect": "Allow", "actions": ["org.zstack.header.volume.**"]}, {"effect": "Allow", "actions": ["org.zstack.header.vm.**"]}]
role_uuid = iam2_ops.create_role('test_role', statements).uuid
iam2_ops.add_roles_to_iam2_virtual_id([role_uuid], plain_user_uuid)
# Volume related ops: Create, Delete, Expunge, Attach, Dettach, Enable, Disable
disk_offering_uuid = res_ops.query_resource(res_ops.DISK_OFFERING)[0].uuid
if flavor['target_role'] != 'system_admin':
acc_ops.share_resources([project_linked_account_uuid], [disk_offering_uuid])
volume_option = test_util.VolumeOption()
volume_option.set_disk_offering_uuid(disk_offering_uuid)
volume_option.set_name('data_volume_project_management')
volume_option.set_session_uuid(project_login_uuid)
data_volume = vol_ops.create_volume_from_offering(volume_option)
vol_ops.stop_volume(data_volume.uuid, session_uuid=project_login_uuid)
vol_ops.start_volume(data_volume.uuid, session_uuid=project_login_uuid)
vm_creation_option = test_util.VmOption()
l3_net_uuid = test_lib.lib_get_l3_by_name(os.environ.get('l3VlanNetwork3')).uuid
if flavor['target_role'] != 'system_admin':
acc_ops.share_resources([project_linked_account_uuid], [l3_net_uuid])
vm_creation_option.set_l3_uuids([l3_net_uuid])
image_uuid = test_lib.lib_get_image_by_name("centos").uuid
vm_creation_option.set_image_uuid(image_uuid)
if flavor['target_role'] != 'system_admin':
acc_ops.share_resources([project_linked_account_uuid], [image_uuid])
instance_offering_uuid = test_lib.lib_get_instance_offering_by_name(os.environ.get('instanceOfferingName_s')).uuid
vm_creation_option.set_instance_offering_uuid(instance_offering_uuid)
if flavor['target_role'] != 'system_admin':
acc_ops.share_resources([project_linked_account_uuid], [instance_offering_uuid])
vm_creation_option.set_name('vm_for_project_management')
vm_creation_option.set_session_uuid(project_login_uuid)
vm = test_stub.create_vm(image_uuid = image_uuid, session_uuid=project_login_uuid)
vm_uuid = vm.get_vm().uuid
vol_ops.attach_volume(data_volume.uuid, vm_uuid, session_uuid=project_login_uuid)
vol_ops.detach_volume(data_volume.uuid, vm_uuid, session_uuid=project_login_uuid)
vol_ops.delete_volume(data_volume.uuid, session_uuid=project_login_uuid)
vol_ops.expunge_volume(data_volume.uuid, session_uuid=project_login_uuid)
# 11 delete
acc_ops.logout(project_login_uuid)
if virtual_id_uuid != None:
iam2_ops.delete_iam2_virtual_id(virtual_id_uuid)
if project_admin_uuid != None:
iam2_ops.delete_iam2_virtual_id(project_admin_uuid)
if project_operator_uuid != None:
iam2_ops.delete_iam2_virtual_id(project_operator_uuid)
if plain_user_uuid != None:
iam2_ops.delete_iam2_virtual_id(plain_user_uuid)
if flavor['target_role'] != 'system_admin':
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
test_util.test_pass('success test iam2 login in by project admin!')
def error_cleanup():
global project_uuid, project_admin_uuid, virtual_id_uuid
if virtual_id_uuid:
iam2_ops.delete_iam2_virtual_id(virtual_id_uuid)
if project_admin_uuid:
iam2_ops.delete_iam2_virtual_id(project_admin_uuid)
if project_uuid:
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
if plain_user_uuid != None:
iam2_ops.delete_iam2_virtual_id(plain_user_uuid)
|
zstackio/zstack-woodpecker
|
integrationtest/vm/simulator/iam2/test_iam2_project_basic_ops_volume.py
|
Python
|
apache-2.0
| 9,214
|
#!usr/bin/python
# -*- coding: utf-8 -*-
from datetime import datetime
import json
from utils import log
class MessageParser():
"""
Parse json and create Message object with the parse-method.
"""
@staticmethod
def parse(json_string):
dictionary = json.loads(json_string)
type = dictionary.get("type")
if not type:
raise Exception("There is no type defined in the message.")
data = dictionary.get("data")
return Message(type, data)
class Message():
def __init__(self, type, data):
self.type = type
self.data = data
class JsonComposer():
"""
Composes a json String, according to our schema:
{
"type" : "TYPE",
"data": DATA
}
"""
def to_json(self, type, data):
try:
dictionary = {}
dictionary["type"] = type
dictionary["data"] = data
return json.dumps(dictionary)
except Exception,e :
log.error("Error while composing Message".format())
return json.dumps(
{
"type": "ERROR",
"data": "Error while composing json. Timestamp: {}".format(datetime.now())
}
)
|
knutgoetz/evka01
|
gatttool_scripts/parser.py
|
Python
|
mit
| 1,291
|
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
from string import Template
from textwrap import dedent
class PackageManagerTemplateAptGet(object):
"""
apt-get configuration file template
"""
def __init__(self):
self.host_header = dedent('''
# kiwi generated apt-get config file
Dir "/";
Dir::State "${apt_shared_base}/";
Dir::Cache "${apt_shared_base}/";
Dir::Etc "${apt_shared_base}/";
''').strip() + os.linesep
self.image_header = dedent('''
# kiwi generated apt-get config file
Dir "/";
''').strip() + os.linesep
self.apt = dedent('''
APT
{
Get
{
AllowDowngrades "true";
AllowRemoveEssential "true";
AllowChangeHeldPackages "true";
AllowUnauthenticated "${unauthenticated}";
}
};
''').strip() + os.linesep
self.dpkg_exclude_docs = dedent('''
DPkg::Options {
"--path-exclude=/usr/share/man/*";
"--path-exclude=/usr/share/doc/*";
"--path-include=/usr/share/doc/*/copyright";
};
''').strip() + os.linesep
def get_host_template(self, exclude_docs=False):
"""
apt-get package manager template for apt-get called
outside of the image, not chrooted
:rtype: Template
"""
template_data = self.host_header + self.apt
if exclude_docs:
template_data += self.dpkg_exclude_docs
return Template(template_data)
def get_image_template(self, exclude_docs=False):
"""
apt-get package manager template for apt-get called
inside of the image, chrooted
:rtype: Template
"""
template_data = self.image_header + self.apt
if exclude_docs:
template_data += self.dpkg_exclude_docs
return Template(template_data)
|
adrianschroeter/kiwi
|
kiwi/repository/template/apt.py
|
Python
|
gpl-3.0
| 2,728
|
# -*- coding: utf-8 -*-
import httplib as http
from flask import request
from modularodm.exceptions import ValidationError, ValidationValueError
from framework import forms, status
from framework.auth import cas
from framework.auth import User
from framework.auth.core import get_user, generate_verification_key
from framework.auth.decorators import block_bing_preview, collect_auth, must_be_logged_in
from framework.auth.forms import PasswordForm, SetEmailAndPasswordForm
from framework.auth.signals import user_registered
from framework.auth.utils import validate_email, validate_recaptcha
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.sessions import session
from framework.transactions.handlers import no_auto_transaction
from website import mails, language, settings
from website.models import Node, PreprintService
from website.notifications.utils import check_if_all_global_subscriptions_are_none
from website.profile import utils as profile_utils
from website.project.decorators import (must_have_permission, must_be_valid_project, must_not_be_registration,
must_be_contributor_or_public, must_be_contributor)
from website.project.model import has_anonymous_link
from website.project.signals import unreg_contributor_added, contributor_added
from website.util import sanitize
from website.util import web_url_for, is_json_request
from website.util.permissions import expand_permissions, ADMIN
from website.util.time import get_timestamp, throttle_period_expired
from website.exceptions import NodeStateError
@collect_auth
@must_be_valid_project(retractions_valid=True)
def get_node_contributors_abbrev(auth, node, **kwargs):
anonymous = has_anonymous_link(node, auth)
formatter = 'surname'
max_count = kwargs.get('max_count', 3)
if 'user_ids' in kwargs:
users = [
User.load(user_id) for user_id in kwargs['user_ids']
if node.contributor_set.filter(user__guid__guid=user_id).exists()
]
else:
users = node.visible_contributors
if anonymous or not node.can_view(auth):
raise HTTPError(http.FORBIDDEN)
contributors = []
n_contributors = len(users)
others_count = ''
for index, user in enumerate(users[:max_count]):
if index == max_count - 1 and len(users) > max_count:
separator = ' &'
others_count = str(n_contributors - 3)
elif index == len(users) - 1:
separator = ''
elif index == len(users) - 2:
separator = ' &'
else:
separator = ','
contributor = user.get_summary(formatter)
contributor['user_id'] = user._primary_key
contributor['separator'] = separator
contributors.append(contributor)
return {
'contributors': contributors,
'others_count': others_count,
}
@collect_auth
@must_be_valid_project(retractions_valid=True)
def get_contributors(auth, node, **kwargs):
# Can set limit to only receive a specified number of contributors in a call to this route
if request.args.get('limit'):
try:
limit = int(request.args['limit'])
except ValueError:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "limit": {}'.format(request.args['limit'])
))
else:
limit = None
anonymous = has_anonymous_link(node, auth)
if anonymous or not node.can_view(auth):
raise HTTPError(http.FORBIDDEN)
# Limit is either an int or None:
# if int, contribs list is sliced to specified length
# if None, contribs list is not sliced
contribs = profile_utils.serialize_contributors(
node.visible_contributors[0:limit],
node=node,
)
# Will either return just contributor list or contributor list + 'more' element
if limit:
return {
'contributors': contribs,
'more': max(0, len(node.visible_contributors) - limit)
}
else:
return {'contributors': contribs}
@must_be_logged_in
@must_be_valid_project
def get_contributors_from_parent(auth, node, **kwargs):
parent = node.parent_node
if not parent:
raise HTTPError(http.BAD_REQUEST)
if not node.can_view(auth):
raise HTTPError(http.FORBIDDEN)
contribs = [
profile_utils.add_contributor_json(contrib)
for contrib in parent.visible_contributors
]
return {'contributors': contribs}
def deserialize_contributors(node, user_dicts, auth, validate=False):
"""View helper that returns a list of User objects from a list of
serialized users (dicts). The users in the list may be registered or
unregistered users.
e.g. ``[{'id': 'abc123', 'registered': True, 'fullname': ..},
{'id': None, 'registered': False, 'fullname'...},
{'id': '123ab', 'registered': False, 'fullname': ...}]
If a dict represents an unregistered user without an ID, creates a new
unregistered User record.
:param Node node: The node to add contributors to
:param list(dict) user_dicts: List of serialized users in the format above.
:param Auth auth:
:param bool validate: Whether to validate and sanitize fields (if necessary)
"""
# Add the registered contributors
contribs = []
for contrib_dict in user_dicts:
fullname = contrib_dict['fullname']
visible = contrib_dict['visible']
email = contrib_dict.get('email')
if validate is True:
# Validate and sanitize inputs as needed. Email will raise error if invalid.
# TODO Edge case bug: validation and saving are performed in same loop, so all in list
# up to the invalid entry will be saved. (communicate to the user what needs to be retried)
fullname = sanitize.strip_html(fullname)
if not fullname:
raise ValidationValueError('Full name field cannot be empty')
if email:
validate_email(email) # Will raise a ValidationError if email invalid
if contrib_dict['id']:
contributor = User.load(contrib_dict['id'])
else:
try:
contributor = User.create_unregistered(
fullname=fullname,
email=email)
contributor.save()
except ValidationError:
## FIXME: This suppresses an exception if ID not found & new validation fails; get_user will return None
contributor = get_user(email=email)
# Add unclaimed record if necessary
if (not contributor.is_registered
and node._primary_key not in contributor.unclaimed_records):
contributor.add_unclaimed_record(node=node, referrer=auth.user,
given_name=fullname,
email=email)
contributor.save()
contribs.append({
'user': contributor,
'visible': visible,
'permissions': expand_permissions(contrib_dict.get('permission'))
})
return contribs
@unreg_contributor_added.connect
def finalize_invitation(node, contributor, auth, email_template='default'):
try:
record = contributor.get_unclaimed_record(node._primary_key)
except ValueError:
pass
else:
if record['email']:
send_claim_email(record['email'], contributor, node, notify=True, email_template=email_template)
@must_be_valid_project
@must_have_permission(ADMIN)
@must_not_be_registration
def project_contributors_post(auth, node, **kwargs):
""" Add contributors to a node. """
user_dicts = request.json.get('users')
node_ids = request.json.get('node_ids')
if node._id in node_ids:
node_ids.remove(node._id)
if user_dicts is None or node_ids is None:
raise HTTPError(http.BAD_REQUEST)
# Prepare input data for `Node::add_contributors`
try:
contribs = deserialize_contributors(node, user_dicts, auth=auth, validate=True)
except ValidationError as e:
return {'status': 400, 'message': e.message}, 400
try:
node.add_contributors(contributors=contribs, auth=auth)
except NodeStateError as e:
return {'status': 400, 'message': e.args[0]}, 400
node.save()
# Disconnect listener to avoid multiple invite emails
unreg_contributor_added.disconnect(finalize_invitation)
for child_id in node_ids:
child = Node.load(child_id)
# Only email unreg users once
try:
child_contribs = deserialize_contributors(
child, user_dicts, auth=auth, validate=True
)
except ValidationError as e:
return {'status': 400, 'message': e.message}, 400
child.add_contributors(contributors=child_contribs, auth=auth)
child.save()
# Reconnect listeners
unreg_contributor_added.connect(finalize_invitation)
return {
'status': 'success',
'contributors': profile_utils.serialize_contributors(
node.visible_contributors,
node=node,
)
}, 201
@no_auto_transaction
@must_be_valid_project # injects project
@must_have_permission(ADMIN)
@must_not_be_registration
def project_manage_contributors(auth, node, **kwargs):
"""Reorder and remove contributors.
:param Auth auth: Consolidated authorization
:param-json list contributors: Ordered list of contributors represented as
dictionaries of the form:
{'id': <id>, 'permission': <One of 'read', 'write', 'admin'>}
:raises: HTTPError(400) if contributors to be removed are not in list
or if no admin users would remain after changes were applied
"""
contributors = request.json.get('contributors')
# Update permissions and order
try:
node.manage_contributors(contributors, auth=auth, save=True)
except (ValueError, NodeStateError) as error:
raise HTTPError(http.BAD_REQUEST, data={'message_long': error.args[0]})
# If user has removed herself from project, alert; redirect to
# node summary if node is public, else to user's dashboard page
if not node.is_contributor(auth.user):
status.push_status_message(
'You have removed yourself as a contributor from this project',
kind='success',
trust=False
)
if node.is_public:
return {'redirectUrl': node.url}
return {'redirectUrl': web_url_for('dashboard')}
# Else if user has revoked her admin permissions, alert and stay on
# current page
if not node.has_permission(auth.user, ADMIN):
status.push_status_message(
'You have removed your administrative privileges for this project',
kind='success',
trust=False
)
# Else stay on current page
return {}
@must_be_valid_project # returns project
@must_be_contributor
@must_not_be_registration
def project_remove_contributor(auth, **kwargs):
"""Remove a contributor from a list of nodes.
:param Auth auth: Consolidated authorization
:raises: HTTPError(400) if contributors to be removed are not in list
or if no admin users would remain after changes were applied
"""
contributor_id = request.get_json()['contributorID']
node_ids = request.get_json()['nodeIDs']
contributor = User.load(contributor_id)
if contributor is None:
raise HTTPError(http.BAD_REQUEST, data={'message_long': 'Contributor not found.'})
redirect_url = {}
parent_id = node_ids[0]
for node_id in node_ids:
# Update permissions and order
node = Node.load(node_id)
# Forbidden unless user is removing herself
if not node.has_permission(auth.user, 'admin'):
if auth.user != contributor:
raise HTTPError(http.FORBIDDEN)
if node.visible_contributors.count() == 1 \
and node.visible_contributors[0] == contributor:
raise HTTPError(http.FORBIDDEN, data={
'message_long': 'Must have at least one bibliographic contributor'
})
nodes_removed = node.remove_contributor(contributor, auth=auth)
# remove_contributor returns false if there is not one admin or visible contributor left after the move.
if not nodes_removed:
raise HTTPError(http.BAD_REQUEST, data={
'message_long': 'Could not remove contributor.'})
# On parent node, if user has removed herself from project, alert; redirect to
# node summary if node is public, else to user's dashboard page
if not node.is_contributor(auth.user) and node_id == parent_id:
status.push_status_message(
'You have removed yourself as a contributor from this project',
kind='success',
trust=False
)
if node.is_public:
redirect_url = {'redirectUrl': node.url}
else:
redirect_url = {'redirectUrl': web_url_for('dashboard')}
return redirect_url
def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 3600):
"""
A registered user claiming the unclaimed user account as an contributor to a project.
Send an email for claiming the account to the referrer and notify the claimer.
:param claimer: the claimer
:param unclaimed_user: the user account to claim
:param node: the project node where the user account is claimed
:param throttle: the time period in seconds before another claim for the account can be made
:return:
:raise: http.BAD_REQUEST
"""
unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key)
# check throttle
timestamp = unclaimed_record.get('last_sent')
if not throttle_period_expired(timestamp, throttle):
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='User account can only be claimed with an existing user once every 24 hours'
))
# roll the valid token for each email, thus user cannot change email and approve a different email address
verification_key = generate_verification_key(verification_type='claim')
unclaimed_record['token'] = verification_key['token']
unclaimed_record['expires'] = verification_key['expires']
unclaimed_record['claimer_email'] = claimer.username
unclaimed_user.save()
referrer = User.load(unclaimed_record['referrer_id'])
claim_url = web_url_for(
'claim_user_registered',
uid=unclaimed_user._primary_key,
pid=node._primary_key,
token=unclaimed_record['token'],
_external=True,
)
# Send mail to referrer, telling them to forward verification link to claimer
mails.send_mail(
referrer.username,
mails.FORWARD_INVITE_REGISTERED,
user=unclaimed_user,
referrer=referrer,
node=node,
claim_url=claim_url,
fullname=unclaimed_record['name'],
)
unclaimed_record['last_sent'] = get_timestamp()
unclaimed_user.save()
# Send mail to claimer, telling them to wait for referrer
mails.send_mail(
claimer.username,
mails.PENDING_VERIFICATION_REGISTERED,
fullname=claimer.fullname,
referrer=referrer,
node=node,
)
def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default'):
"""
Unregistered user claiming a user account as an contributor to a project. Send an email for claiming the account.
Either sends to the given email or the referrer's email, depending on the email address provided.
:param str email: The address given in the claim user form
:param User unclaimed_user: The User record to claim.
:param Node node: The node where the user claimed their account.
:param bool notify: If True and an email is sent to the referrer, an email
will also be sent to the invited user about their pending verification.
:param int throttle: Time period (in seconds) after the referrer is
emailed during which the referrer will not be emailed again.
:param str email_template: the email template to use
:return
:raise http.BAD_REQUEST
"""
claimer_email = email.lower().strip()
unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key)
referrer = User.load(unclaimed_record['referrer_id'])
claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True)
# Option 1:
# When adding the contributor, the referrer provides both name and email.
# The given email is the same provided by user, just send to that email.
preprint_provider = None
if unclaimed_record.get('email') == claimer_email:
# check email template for branded preprints
if email_template == 'preprint':
email_template, preprint_provider = find_preprint_provider(node)
if not email_template or not preprint_provider:
return
mail_tpl = getattr(mails, 'INVITE_PREPRINT')(email_template, preprint_provider)
else:
mail_tpl = getattr(mails, 'INVITE_DEFAULT'.format(email_template.upper()))
to_addr = claimer_email
unclaimed_record['claimer_email'] = claimer_email
unclaimed_user.save()
# Option 2:
# TODO: [new improvement ticket] this option is disabled from preprint but still available on the project page
# When adding the contributor, the referred only provides the name.
# The account is later claimed by some one who provides the email.
# Send email to the referrer and ask her/him to forward the email to the user.
else:
# check throttle
timestamp = unclaimed_record.get('last_sent')
if not throttle_period_expired(timestamp, throttle):
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='User account can only be claimed with an existing user once every 24 hours'
))
# roll the valid token for each email, thus user cannot change email and approve a different email address
verification_key = generate_verification_key(verification_type='claim')
unclaimed_record['last_sent'] = get_timestamp()
unclaimed_record['token'] = verification_key['token']
unclaimed_record['expires'] = verification_key['expires']
unclaimed_record['claimer_email'] = claimer_email
unclaimed_user.save()
claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True)
# send an email to the invited user without `claim_url`
if notify:
pending_mail = mails.PENDING_VERIFICATION
mails.send_mail(
claimer_email,
pending_mail,
user=unclaimed_user,
referrer=referrer,
fullname=unclaimed_record['name'],
node=node
)
mail_tpl = mails.FORWARD_INVITE
to_addr = referrer.username
# Send an email to the claimer (Option 1) or to the referrer (Option 2) with `claim_url`
mails.send_mail(
to_addr,
mail_tpl,
user=unclaimed_user,
referrer=referrer,
node=node,
claim_url=claim_url,
email=claimer_email,
fullname=unclaimed_record['name'],
branded_service_name=preprint_provider
)
return to_addr
@contributor_added.connect
def notify_added_contributor(node, contributor, auth=None, throttle=None, email_template='default'):
throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE
# Exclude forks and templates because the user forking/templating the project gets added
# via 'add_contributor' but does not need to get notified.
# Only email users for projects, or for components where they are not contributors on the parent node.
if (contributor.is_registered and not node.template_node and not node.is_fork and
(not node.parent_node or
(node.parent_node and not node.parent_node.is_contributor(contributor)))):
preprint_provider = None
if email_template == 'preprint':
email_template, preprint_provider = find_preprint_provider(node)
if not email_template or not preprint_provider:
return
email_template = getattr(mails, 'CONTRIBUTOR_ADDED_PREPRINT')(email_template, preprint_provider)
else:
email_template = getattr(mails, 'CONTRIBUTOR_ADDED_DEFAULT'.format(email_template.upper()))
contributor_record = contributor.contributor_added_email_records.get(node._id, {})
if contributor_record:
timestamp = contributor_record.get('last_sent', None)
if timestamp:
if not throttle_period_expired(timestamp, throttle):
return
else:
contributor.contributor_added_email_records[node._id] = {}
mails.send_mail(
contributor.username,
email_template,
user=contributor,
node=node,
referrer_name=auth.user.fullname if auth else '',
all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(contributor),
branded_service_name=preprint_provider
)
contributor.contributor_added_email_records[node._id]['last_sent'] = get_timestamp()
contributor.save()
elif not contributor.is_registered:
unreg_contributor_added.send(node, contributor=contributor, auth=auth, email_template=email_template)
def find_preprint_provider(node):
"""
Given a node, find the preprint and the service provider.
:param node: the node to which a contributer or preprint author is added
:return: the email template
"""
try:
preprint = PreprintService.objects.get(node=node)
provider = preprint.provider
if provider._id == 'osf':
return 'osf', provider.name
else:
return 'branded', provider.name
# TODO: fine-grained exception handling
except Exception:
return None, None
def verify_claim_token(user, token, pid):
"""View helper that checks that a claim token for a given user and node ID
is valid. If not valid, throws an error with custom error messages.
"""
# if token is invalid, throw an error
if not user.verify_claim_token(token=token, project_id=pid):
if user.is_registered:
error_data = {
'message_short': 'User has already been claimed.',
'message_long': 'Please <a href="/login/">log in</a> to continue.'}
raise HTTPError(400, data=error_data)
else:
return False
return True
@block_bing_preview
@collect_auth
@must_be_valid_project
def claim_user_registered(auth, node, **kwargs):
"""
View that prompts user to enter their password in order to claim being a contributor on a project.
A user must be logged in.
"""
current_user = auth.user
sign_out_url = web_url_for('auth_register', logout=True, next=request.url)
if not current_user:
return redirect(sign_out_url)
# Logged in user should not be a contributor the project
if node.is_contributor(current_user):
logout_url = web_url_for('auth_logout', redirect_url=request.url)
data = {
'message_short': 'Already a contributor',
'message_long': ('The logged-in user is already a contributor to this '
'project. Would you like to <a href="{}">log out</a>?').format(logout_url)
}
raise HTTPError(http.BAD_REQUEST, data=data)
uid, pid, token = kwargs['uid'], kwargs['pid'], kwargs['token']
unreg_user = User.load(uid)
if not verify_claim_token(unreg_user, token, pid=node._primary_key):
error_data = {
'message_short': 'Invalid url.',
'message_long': 'The token in the URL is invalid or has expired.'
}
raise HTTPError(http.BAD_REQUEST, data=error_data)
# Store the unreg_user data on the session in case the user registers
# a new account
session.data['unreg_user'] = {
'uid': uid, 'pid': pid, 'token': token
}
form = PasswordForm(request.form)
if request.method == 'POST':
if form.validate():
if current_user.check_password(form.password.data):
node.replace_contributor(old=unreg_user, new=current_user)
node.save()
status.push_status_message(
'You are now a contributor to this project.',
kind='success',
trust=False
)
return redirect(node.url)
else:
status.push_status_message(language.LOGIN_FAILED, kind='warning', trust=False)
else:
forms.push_errors_to_status(form.errors)
if is_json_request():
form_ret = forms.utils.jsonify(form)
user_ret = profile_utils.serialize_user(current_user, full=False)
else:
form_ret = form
user_ret = current_user
return {
'form': form_ret,
'user': user_ret,
'signOutUrl': sign_out_url
}
@user_registered.connect
def replace_unclaimed_user_with_registered(user):
"""Listens for the user_registered signal. If unreg_user is stored in the
session, then the current user is trying to claim themselves as a contributor.
Replaces the old, unregistered contributor with the newly registered
account.
"""
unreg_user_info = session.data.get('unreg_user')
if unreg_user_info:
unreg_user = User.load(unreg_user_info['uid'])
pid = unreg_user_info['pid']
node = Node.load(pid)
node.replace_contributor(old=unreg_user, new=user)
node.save()
status.push_status_message(
'Successfully claimed contributor.', kind='success', trust=False)
@block_bing_preview
@collect_auth
def claim_user_form(auth, **kwargs):
"""
View for rendering the set password page for a claimed user.
Must have ``token`` as a querystring argument.
Renders the set password form, validates it, and sets the user's password.
HTTP Method: GET, POST
"""
uid, pid = kwargs['uid'], kwargs['pid']
token = request.form.get('token') or request.args.get('token')
user = User.load(uid)
# If unregistered user is not in database, or url bears an invalid token raise HTTP 400 error
if not user or not verify_claim_token(user, token, pid):
error_data = {
'message_short': 'Invalid url.',
'message_long': 'Claim user does not exists, the token in the URL is invalid or has expired.'
}
raise HTTPError(http.BAD_REQUEST, data=error_data)
# If user is logged in, redirect to 're-enter password' page
if auth.logged_in:
return redirect(web_url_for('claim_user_registered',
uid=uid, pid=pid, token=token))
unclaimed_record = user.unclaimed_records[pid]
user.fullname = unclaimed_record['name']
user.update_guessed_names()
# The email can be the original referrer email if no claimer email has been specified.
claimer_email = unclaimed_record.get('claimer_email') or unclaimed_record.get('email')
# If there is a registered user with this email, redirect to 're-enter password' page
try:
user_from_email = User.objects.get(emails__icontains=claimer_email) if claimer_email else None
except User.DoesNotExist:
user_from_email = None
if user_from_email and user_from_email.is_registered:
return redirect(web_url_for('claim_user_registered', uid=uid, pid=pid, token=token))
form = SetEmailAndPasswordForm(request.form, token=token)
if request.method == 'POST':
if not form.validate():
forms.push_errors_to_status(form.errors)
elif settings.RECAPTCHA_SITE_KEY and not validate_recaptcha(request.form.get('g-recaptcha-response'), remote_ip=request.remote_addr):
status.push_status_message('Invalid captcha supplied.', kind='error')
else:
username, password = claimer_email, form.password.data
if not username:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='No email associated with this account. Please claim this '
'account on the project to which you were invited.'
))
user.register(username=username, password=password)
# Clear unclaimed records
user.unclaimed_records = {}
user.verification_key = generate_verification_key()
user.save()
# Authenticate user and redirect to project page
status.push_status_message(language.CLAIMED_CONTRIBUTOR, kind='success', trust=True)
# Redirect to CAS and authenticate the user with a verification key.
return redirect(cas.get_login_url(
web_url_for('view_project', pid=pid, _absolute=True),
username=user.username,
verification_key=user.verification_key
))
return {
'firstname': user.given_name,
'email': claimer_email if claimer_email else '',
'fullname': user.fullname,
'form': forms.utils.jsonify(form) if is_json_request() else form,
}
@must_be_valid_project
@must_have_permission(ADMIN)
@must_not_be_registration
def invite_contributor_post(node, **kwargs):
"""API view for inviting an unregistered user. Performs validation, but does not actually invite the user.
Expects JSON arguments with 'fullname' (required) and email (not required).
"""
fullname = request.json.get('fullname').strip()
email = request.json.get('email')
# Validate and sanitize inputs as needed. Email will raise error if invalid.
fullname = sanitize.strip_html(fullname)
if email:
email = email.lower().strip()
try:
validate_email(email)
except ValidationError as e:
return {'status': 400, 'message': e.message}, 400
if not fullname:
return {'status': 400, 'message': 'Full name field cannot be empty'}, 400
# Check if email is in the database
user = get_user(email=email)
if user:
if user.is_registered:
msg = 'User is already in database. Please go back and try your search again.'
return {'status': 400, 'message': msg}, 400
elif node.is_contributor(user):
msg = 'User with this email address is already a contributor to this project.'
return {'status': 400, 'message': msg}, 400
else:
serialized = profile_utils.add_contributor_json(user)
# use correct display name
serialized['fullname'] = fullname
serialized['email'] = email
else:
# Create a placeholder
serialized = profile_utils.serialize_unregistered(fullname, email)
return {'status': 'success', 'contributor': serialized}
@must_be_contributor_or_public
def claim_user_post(node, **kwargs):
"""
View for claiming a user from the X-editable form on a project page.
:param node: the project node
:return:
"""
request_data = request.json
# The unclaimed user
unclaimed_user = User.load(request_data['pk'])
unclaimed_data = unclaimed_user.get_unclaimed_record(node._primary_key)
# Claimer is not logged in and submit her/his email through X-editable, stored in `request_data['value']`
if 'value' in request_data:
email = request_data['value'].lower().strip()
claimer = get_user(email=email)
# registered user
if claimer and claimer.is_registered:
send_claim_registered_email(claimer, unclaimed_user, node)
# unregistered user
else:
send_claim_email(email, unclaimed_user, node, notify=True)
# Claimer is logged in with confirmed identity stored in `request_data['claimerId']`
elif 'claimerId' in request_data:
claimer_id = request_data['claimerId']
claimer = User.load(claimer_id)
send_claim_registered_email(claimer, unclaimed_user, node)
email = claimer.username
else:
raise HTTPError(http.BAD_REQUEST)
return {
'status': 'success',
'email': email,
'fullname': unclaimed_data['name']
}
|
hmoco/osf.io
|
website/project/views/contributor.py
|
Python
|
apache-2.0
| 32,645
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, unicode_literals
import cPickle as pickle
import os
import sys
import mozpack.path as mozpath
from mozpack.copier import FileCopier
from mozpack.manifests import InstallManifest
from .base import MozbuildObject
from .util import OrderedDefaultDict
from collections import defaultdict
import manifestparser
def rewrite_test_base(test, new_base, honor_install_to_subdir=False):
"""Rewrite paths in a test to be under a new base path.
This is useful for running tests from a separate location from where they
were defined.
honor_install_to_subdir and the underlying install-to-subdir field are a
giant hack intended to work around the restriction where the mochitest
runner can't handle single test files with multiple configurations. This
argument should be removed once the mochitest runner talks manifests
(bug 984670).
"""
test['here'] = mozpath.join(new_base, test['dir_relpath'])
if honor_install_to_subdir and test.get('install-to-subdir'):
manifest_relpath = mozpath.relpath(test['path'],
mozpath.dirname(test['manifest']))
test['path'] = mozpath.join(new_base, test['dir_relpath'],
test['install-to-subdir'], manifest_relpath)
else:
test['path'] = mozpath.join(new_base, test['file_relpath'])
return test
class TestMetadata(object):
"""Holds information about tests.
This class provides an API to query tests active in the build
configuration.
"""
def __init__(self, all_tests, test_defaults=None):
self._tests_by_path = OrderedDefaultDict(list)
self._tests_by_flavor = defaultdict(set)
self._test_dirs = set()
with open(all_tests, 'rb') as fh:
test_data = pickle.load(fh)
defaults = None
if test_defaults:
with open(test_defaults, 'rb') as fh:
defaults = pickle.load(fh)
for path, tests in test_data.items():
for metadata in tests:
if defaults:
defaults_manifests = [metadata['manifest']]
ancestor_manifest = metadata.get('ancestor-manifest')
if ancestor_manifest:
defaults_manifests.append(ancestor_manifest)
for manifest in defaults_manifests:
manifest_defaults = defaults.get(manifest)
if manifest_defaults:
metadata = manifestparser.combine_fields(manifest_defaults,
metadata)
self._tests_by_path[path].append(metadata)
self._test_dirs.add(os.path.dirname(path))
flavor = metadata.get('flavor')
self._tests_by_flavor[flavor].add(path)
def tests_with_flavor(self, flavor):
"""Obtain all tests having the specified flavor.
This is a generator of dicts describing each test.
"""
for path in sorted(self._tests_by_flavor.get(flavor, [])):
yield self._tests_by_path[path]
def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None,
tags=None):
"""Resolve tests from an identifier.
This is a generator of dicts describing each test.
``paths`` can be an iterable of values to use to identify tests to run.
If an entry is a known test file, tests associated with that file are
returned (there may be multiple configurations for a single file). If
an entry is a directory, or a prefix of a directory containing tests,
all tests in that directory are returned. If the string appears in a
known test file, that test file is considered. If the path contains
a wildcard pattern, tests matching that pattern are returned.
If ``under_path`` is a string, it will be used to filter out tests that
aren't in the specified path prefix relative to topsrcdir or the
test's installed dir.
If ``flavor`` is a string, it will be used to filter returned tests
to only be the flavor specified. A flavor is something like
``xpcshell``.
If ``subsuite`` is a string, it will be used to filter returned tests
to only be in the subsuite specified.
If ``tags`` are specified, they will be used to filter returned tests
to only those with a matching tag.
"""
if tags:
tags = set(tags)
def fltr(tests):
for test in tests:
if flavor:
if (flavor == 'devtools' and test.get('flavor') != 'browser-chrome') or \
(flavor != 'devtools' and test.get('flavor') != flavor):
continue
if subsuite and test.get('subsuite') != subsuite:
continue
if tags and not (tags & set(test.get('tags', '').split())):
continue
if under_path \
and not test['file_relpath'].startswith(under_path):
continue
# Make a copy so modifications don't change the source.
yield dict(test)
paths = paths or []
paths = [mozpath.normpath(p) for p in paths]
if not paths:
paths = [None]
candidate_paths = set()
for path in sorted(paths):
if path is None:
candidate_paths |= set(self._tests_by_path.keys())
continue
if '*' in path:
candidate_paths |= {p for p in self._tests_by_path
if mozpath.match(p, path)}
continue
# If the path is a directory, or the path is a prefix of a directory
# containing tests, pull in all tests in that directory.
if (path in self._test_dirs or
any(p.startswith(path) for p in self._tests_by_path)):
candidate_paths |= {p for p in self._tests_by_path
if p.startswith(path)}
continue
# If it's a test file, add just that file.
candidate_paths |= {p for p in self._tests_by_path if path in p}
for p in sorted(candidate_paths):
tests = self._tests_by_path[p]
for test in fltr(tests):
yield test
class TestResolver(MozbuildObject):
"""Helper to resolve tests from the current environment to test files."""
def __init__(self, *args, **kwargs):
MozbuildObject.__init__(self, *args, **kwargs)
# If installing tests is going to result in re-generating the build
# backend, we need to do this here, so that the updated contents of
# all-tests.pkl make it to the set of tests to run.
self._run_make(target='run-tests-deps', pass_thru=True,
print_directory=False)
self._tests = TestMetadata(os.path.join(self.topobjdir,
'all-tests.pkl'),
test_defaults=os.path.join(self.topobjdir,
'test-defaults.pkl'))
self._test_rewrites = {
'a11y': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'a11y'),
'browser-chrome': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'browser'),
'jetpack-package': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'jetpack-package'),
'jetpack-addon': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'jetpack-addon'),
'chrome': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'chrome'),
'mochitest': os.path.join(self.topobjdir, '_tests', 'testing',
'mochitest', 'tests'),
'web-platform-tests': os.path.join(self.topobjdir, '_tests', 'testing',
'web-platform'),
'xpcshell': os.path.join(self.topobjdir, '_tests', 'xpcshell'),
}
def resolve_tests(self, cwd=None, **kwargs):
"""Resolve tests in the context of the current environment.
This is a more intelligent version of TestMetadata.resolve_tests().
This function provides additional massaging and filtering of low-level
results.
Paths in returned tests are automatically translated to the paths in
the _tests directory under the object directory.
If cwd is defined, we will limit our results to tests under the
directory specified. The directory should be defined as an absolute
path under topsrcdir or topobjdir for it to work properly.
"""
rewrite_base = None
if cwd:
norm_cwd = mozpath.normpath(cwd)
norm_srcdir = mozpath.normpath(self.topsrcdir)
norm_objdir = mozpath.normpath(self.topobjdir)
reldir = None
if norm_cwd.startswith(norm_objdir):
reldir = norm_cwd[len(norm_objdir)+1:]
elif norm_cwd.startswith(norm_srcdir):
reldir = norm_cwd[len(norm_srcdir)+1:]
result = self._tests.resolve_tests(under_path=reldir,
**kwargs)
else:
result = self._tests.resolve_tests(**kwargs)
for test in result:
rewrite_base = self._test_rewrites.get(test['flavor'], None)
if rewrite_base:
yield rewrite_test_base(test, rewrite_base,
honor_install_to_subdir=True)
else:
yield test
# These definitions provide a single source of truth for modules attempting
# to get a view of all tests for a build. Used by the emitter to figure out
# how to read/install manifests and by test dependency annotations in Files()
# entries to enumerate test flavors.
# While there are multiple test manifests, the behavior is very similar
# across them. We enforce this by having common handling of all
# manifests and outputting a single class type with the differences
# described inside the instance.
#
# Keys are variable prefixes and values are tuples describing how these
# manifests should be handled:
#
# (flavor, install_root, install_subdir, package_tests)
#
# flavor identifies the flavor of this test.
# install_root is the path prefix to install the files starting from the root
# directory and not as specified by the manifest location. (bug 972168)
# install_subdir is the path of where to install the files in
# the tests directory.
# package_tests indicates whether to package test files into the test
# package; suites that compile the test files should not install
# them into the test package.
#
TEST_MANIFESTS = dict(
A11Y=('a11y', 'testing/mochitest', 'a11y', True),
BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.', False),
JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest', 'jetpack-package', True),
JETPACK_ADDON=('jetpack-addon', 'testing/mochitest', 'jetpack-addon', False),
FIREFOX_UI_FUNCTIONAL=('firefox-ui-functional', 'firefox-ui', '.', False),
FIREFOX_UI_UPDATE=('firefox-ui-update', 'firefox-ui', '.', False),
PUPPETEER_FIREFOX=('firefox-ui-functional', 'firefox-ui', '.', False),
PYTHON_UNITTEST=('python', 'python', '.', False),
# marionette tests are run from the srcdir
# TODO(ato): make packaging work as for other test suites
MARIONETTE=('marionette', 'marionette', '.', False),
MARIONETTE_UNIT=('marionette', 'marionette', '.', False),
MARIONETTE_WEBAPI=('marionette', 'marionette', '.', False),
METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True),
)
# Reftests have their own manifest format and are processed separately.
REFTEST_FLAVORS = ('crashtest', 'reftest')
# Web platform tests have their own manifest format and are processed separately.
WEB_PLATFORM_TESTS_FLAVORS = ('web-platform-tests',)
def all_test_flavors():
return ([v[0] for v in TEST_MANIFESTS.values()] +
list(REFTEST_FLAVORS) +
list(WEB_PLATFORM_TESTS_FLAVORS))
class TestInstallInfo(object):
def __init__(self):
self.seen = set()
self.pattern_installs = []
self.installs = []
self.external_installs = set()
self.deferred_installs = set()
def __ior__(self, other):
self.pattern_installs.extend(other.pattern_installs)
self.installs.extend(other.installs)
self.external_installs |= other.external_installs
self.deferred_installs |= other.deferred_installs
return self
class SupportFilesConverter(object):
"""Processes a "support-files" entry from a test object, either from
a parsed object from a test manifests or its representation in
moz.build and returns the installs to perform for this test object.
Processing the same support files multiple times will not have any further
effect, and the structure of the parsed objects from manifests will have a
lot of repeated entries, so this class takes care of memoizing.
"""
def __init__(self):
self._fields = (('head', set()),
('support-files', set()),
('generated-files', set()))
def convert_support_files(self, test, install_root, manifest_dir, out_dir):
# Arguments:
# test - The test object to process.
# install_root - The directory under $objdir/_tests that will contain
# the tests for this harness (examples are "testing/mochitest",
# "xpcshell").
# manifest_dir - Absoulute path to the (srcdir) directory containing the
# manifest that included this test
# out_dir - The path relative to $objdir/_tests used as the destination for the
# test, based on the relative path to the manifest in the srcdir,
# the install_root, and 'install-to-subdir', if present in the manifest.
info = TestInstallInfo()
for field, seen in self._fields:
value = test.get(field, '')
for pattern in value.split():
# We track uniqueness locally (per test) where duplicates are forbidden,
# and globally, where they are permitted. If a support file appears multiple
# times for a single test, there are unnecessary entries in the manifest. But
# many entries will be shared across tests that share defaults.
# We need to memoize on the basis of both the path and the output
# directory for the benefit of tests specifying 'install-to-subdir'.
key = field, pattern, out_dir
if key in info.seen:
raise ValueError("%s appears multiple times in a test manifest under a %s field,"
" please omit the duplicate entry." % (pattern, field))
info.seen.add(key)
if key in seen:
continue
seen.add(key)
if field == 'generated-files':
info.external_installs.add(mozpath.normpath(mozpath.join(out_dir, pattern)))
# '!' indicates our syntax for inter-directory support file
# dependencies. These receive special handling in the backend.
elif pattern[0] == '!':
info.deferred_installs.add(pattern)
# We only support globbing on support-files because
# the harness doesn't support * for head.
elif '*' in pattern and field == 'support-files':
info.pattern_installs.append((manifest_dir, pattern, out_dir))
# "absolute" paths identify files that are to be
# placed in the install_root directory (no globs)
elif pattern[0] == '/':
full = mozpath.normpath(mozpath.join(manifest_dir,
mozpath.basename(pattern)))
info.installs.append((full, mozpath.join(install_root, pattern[1:])))
else:
full = mozpath.normpath(mozpath.join(manifest_dir, pattern))
dest_path = mozpath.join(out_dir, pattern)
# If the path resolves to a different directory
# tree, we take special behavior depending on the
# entry type.
if not full.startswith(manifest_dir):
# If it's a support file, we install the file
# into the current destination directory.
# This implementation makes installing things
# with custom prefixes impossible. If this is
# needed, we can add support for that via a
# special syntax later.
if field == 'support-files':
dest_path = mozpath.join(out_dir,
os.path.basename(pattern))
# If it's not a support file, we ignore it.
# This preserves old behavior so things like
# head files doesn't get installed multiple
# times.
else:
continue
info.installs.append((full, mozpath.normpath(dest_path)))
return info
def _resolve_installs(paths, topobjdir, manifest):
"""Using the given paths as keys, find any unresolved installs noted
by the build backend corresponding to those keys, and add them
to the given manifest.
"""
filename = os.path.join(topobjdir, 'test-installs.pkl')
with open(filename, 'rb') as fh:
resolved_installs = pickle.load(fh)
for path in paths:
path = path[2:]
if path not in resolved_installs:
raise Exception('A cross-directory support file path noted in a '
'test manifest does not appear in any other manifest.\n "%s" '
'must appear in another test manifest to specify an install '
'for "!/%s".' % (path, path))
installs = resolved_installs[path]
for install_info in installs:
try:
if len(install_info) == 3:
manifest.add_pattern_symlink(*install_info)
if len(install_info) == 2:
manifest.add_symlink(*install_info)
except ValueError:
# A duplicate value here is pretty likely when running
# multiple directories at once, and harmless.
pass
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
"""Installs the requested test files to the objdir. This is invoked by
test runners to avoid installing tens of thousands of test files when
only a few tests need to be run.
"""
flavor_info = {flavor: (root, prefix, install)
for (flavor, root, prefix, install) in TEST_MANIFESTS.values()}
objdir_dest = mozpath.join(topobjdir, tests_root)
converter = SupportFilesConverter()
install_info = TestInstallInfo()
for o in test_objs:
flavor = o['flavor']
if flavor not in flavor_info:
# This is a test flavor that isn't installed by the build system.
continue
root, prefix, install = flavor_info[flavor]
if not install:
# This flavor isn't installed to the objdir.
continue
manifest_path = o['manifest']
manifest_dir = mozpath.dirname(manifest_path)
out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:])
file_relpath = o['file_relpath']
source = mozpath.join(topsrcdir, file_relpath)
dest = mozpath.join(root, prefix, file_relpath)
if 'install-to-subdir' in o:
out_dir = mozpath.join(out_dir, o['install-to-subdir'])
manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path))
dest = mozpath.join(out_dir, manifest_relpath)
install_info.installs.append((source, dest))
install_info |= converter.convert_support_files(o, root,
manifest_dir,
out_dir)
manifest = InstallManifest()
for source, dest in set(install_info.installs):
if dest in install_info.external_installs:
continue
manifest.add_symlink(source, dest)
for base, pattern, dest in install_info.pattern_installs:
manifest.add_pattern_symlink(base, pattern, dest)
_resolve_installs(install_info.deferred_installs, topobjdir, manifest)
# Harness files are treated as a monolith and installed each time we run tests.
# Fortunately there are not very many.
manifest |= InstallManifest(mozpath.join(topobjdir,
'_build_manifests',
'install', tests_root))
copier = FileCopier()
manifest.populate_registry(copier)
copier.copy(objdir_dest,
remove_unaccounted=False)
# Convenience methods for test manifest reading.
def read_manifestparser_manifest(context, manifest_path):
path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
return manifestparser.TestManifest(manifests=[path], strict=True,
rootdir=context.config.topsrcdir,
finder=context._finder,
handle_defaults=False)
def read_reftest_manifest(context, manifest_path):
import reftest
path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
manifest = reftest.ReftestManifest(finder=context._finder)
manifest.load(path)
return manifest
def read_wpt_manifest(context, paths):
manifest_path, tests_root = paths
full_path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
old_path = sys.path[:]
try:
# Setup sys.path to include all the dependencies required to import
# the web-platform-tests manifest parser. web-platform-tests provides
# a the localpaths.py to do the path manipulation, which we load,
# providing the __file__ variable so it can resolve the relative
# paths correctly.
paths_file = os.path.join(context.config.topsrcdir, "testing",
"web-platform", "tests", "tools", "localpaths.py")
_globals = {"__file__": paths_file}
execfile(paths_file, _globals)
import manifest as wptmanifest
finally:
sys.path = old_path
f = context._finder.get(full_path)
return wptmanifest.manifest.load(tests_root, f)
|
Yukarumya/Yukarum-Redfoxes
|
python/mozbuild/mozbuild/testing.py
|
Python
|
mpl-2.0
| 23,808
|
#!/usr/bin/env python
from flask import Flask, request
import couchdb, json, datetime
app = Flask(__name__)
''' *** List of accessible api urls ***
***NOTE: until Akash finishes user authentication/security, <string:id> will be the name of the user
1. '/' is a get request that tests accessibility to the flask server
2. '/view/' is a get request that returns all the documents (with _id = 'id') in the database
3. '/get/<string:userid>/' is a get request that returns all of the information in the database corresponding to a specific userid user including goals, _id, _rev, etc.
4. '/getapps/<string:userid>/' is a get request that returns only the application data dictionary
5. '/newuser/<string:userid>/' is a put request that creates a new user
6. '/app/<string:userid>/<string:app>' is open to both PUT and DELETE requests
a. the PUT request places that app in the database if not already present, otherwise does nothing to prevent overriding data that may exist
b. the DELETE request removes the app from the database if present
7. '/getgoal/<string:userid>/' is a get request that retrieves the Daily and Weekly Goals from the server
8. '/newgoal/<string:userid>/<int:daily>/<int:weekly>/' is a put requests that can be used to set new goals
9. '/usage/<string:userid>/' is a put request that takes in json usage data and if that app does not yet exist, creates it in the database, and then updates the current day to reflect the json usage data sent by the put request
Database Structure - refer to indents as higherarchy
** Inside each document is a dictionary of dictionaries. **
Couchdb Server
-> database userid
-> user titled documents within database
-> _id
-> _rev
-> Appdata
-> Total
-> S : value
-> M : value
-> T : value
-> W : value
-> R : value
-> F : value
-> Tot : value
-> Repeat for each app
-> Goals
-> Daily : value
-> Weekly : value
'''
server = couchdb.Server()
db = server['test']
headers = {'Content-Type': 'application/json'}
Weekly = {'S': 0, 'M': 0, 'T': 0, 'W': 0, 'R': 0, 'F': 0, 'Sa': 0, 'Tot': 0}
#Weekly Time holders for each app
Goals = {'Daily': 24, 'Weekly': 150}
#make goals unreachable so notifications have to be set first
def dayToIndex(day):
switcher = {
'Mon': 'M',
'Tue': 'T',
'Wed': 'W',
'Thu': 'R',
'Fri': 'F',
'Sat': 'Sa',
'Sun': 'S'
}
return switcher.get(day, "nothing")
def appTotal(Appdata):
for app, data in Appdata.items():
if app != 'Total':
total = 0
for day, hours in data.items():
if day != 'Tot': #updates the total usage in each respective App dictionary
total += hours
data['Tot'] = total #updates total for that app
weekly = 0
for day in Weekly:
if day != 'Tot':
today = 0
for app, data in Appdata.items():
if day != 'Tot':
today += data[day]
Appdata['Total'][day] = today #updates total for the day
weekly += today
Appdata['Total']['Tot'] = weekly #updates weekly total
return Appdata
@app.route('/')
def welcome():
return "Welcome to Focus" + "\n"
@app.route('/view/', methods = ['GET'])
#curl -X GET http://localhost:5000/view/
def getDocs():
return json.dumps(db.get('_all_docs')) + "\n"
@app.route('/get/<string:userid>/', methods = ['GET'])
#curl -X GET http://localhost:5000/get/<userid>/
def getName(userid):
if userid in db:
doc = db.get(userid)
doc['Appdata'] = appTotal(doc['Appdata'])
db[userid] = doc
return json.dumps(db.get(userid)) + "\n"
else:
return "User not in database"
@app.route('/getapps/<string:userid>/', methods = ['GET'])
#curl -X GET http://localhost:5000/get/<userid>/
def getApps(userid):
if userid in db:
doc = db.get(userid)
doc['Appdata'] = appTotal(doc['Appdata'])
db[userid] = doc
return json.dumps(db.get(userid)['Appdata']) + "\n"
else:
return "User not in database"
@app.route('/newuser/<string:userid>/', methods = ['PUT'])
#curl -X PUT http://localhost:5000/newuser/<userid>/
def newUser(userid):
if userid in db:
return userid + " already in db!" + "\n"
db[userid] = {'userid': userid, 'Appdata': {'Total': Weekly}, 'Goals': Goals}
if userid in db:
return "Successfully inserted " + userid + "\n"
else:
return "Failed to insert"
@app.route('/app/<string:userid>/<string:app>/', methods = ['PUT','DELETE'])
def App(userid,app):
doc = db.get(userid)
if request.method == "PUT":
#curl -X PUT http://localhost:5000/app/<userid>/<app>/
if app in doc['Appdata']:
return app + " already in Appdata" + "\n"
else:
doc['Appdata'][app] = Weekly
db[userid] = doc
return "Successfully inserted " + app + "\n"
elif request.method == "DELETE":
#curl -X DELETE http://localhost:5000/app/<userid>/<app>/
if app in doc['Appdata']:
del doc['Appdata'][app]
db[userid] = doc
return "Successfully deleted " + app + "\n"
else:
return app + " not in Appdata" + "\n"
@app.route('/getgoal/<string:userid>/', methods = ['GET'])
#curl -X GET http://localhost:5000/getgoal/<userid>/
def getGoal(userid):
doc = db.get(userid)
return "Daily Goal is : " + str(doc['Goals']['Daily']) + "\n" + "Weekly Goal is : " + str(doc['Goals']['Weekly']) + "\n"
@app.route('/newgoal/<string:userid>/<int:daily>/<int:weekly>/', methods = ['PUT'])
#curl -X PUT http://localhost:5000/newgoal/<userid>/<daily>/<weekly>/
def newGoal(userid,daily,weekly):
doc = db.get(userid)
doc['Goals']['Daily'] = daily
doc['Goals']['Weekly'] = weekly
db[userid] = doc
return "New Daily is: " + str(daily) + "\n" + "New Weekly is: " + str(weekly) + "\n"
@app.route('/compare/<string:userid>/', methods = ['GET'])
#curl -X GET http://localhost:5000/compare/<userid>
def checker(userid):
#https://www.tutorialspoint.com/python/time_strftime.htm
now = datetime.datetime.now()
day = now.strftime("%a") #gives current day of week abbrev
doc = db.get(userid)
data = doc['Appdata']['Total']
w_excess = data['Tot'] - doc['Goals']['Weekly']
today_index = dayToIndex(day)
d_excess = data[today_index] - doc['Goals']['Daily']
if w_excess > 0 and d_excess > 0:
return "Weekly limit exceeded by " + str(w_excess) + "\n" + \
"Daily limit exceeded by " + (d_excess) + "\n"
elif d_excess > 0:
return "Daily limit exceeded by " + (d_excess) + "\n"
elif w_excess > 0:
return "Weekly limit exceeded by " + str(w_excess) + "\n"
else:
return "Daily Time = " + str(data[today_index]) + "\n" + \
"Weekly Time = " + str(data['Tot']) + "\n" + \
"No goals exceeded! Good job not procrastinating!" + "\n"
@app.route('/usage/<string:userid>/', methods = ['PUT'])
# curl -H "Content-type: application/json" -X PUT http://127.0.0.1:5000/usage/Byron/ -d '{"Instagram": 5}'
def takeJson(userid):
now = datetime.datetime.now()
day = now.strftime("%a") #gives current day of week abbrev
today_index = dayToIndex(day) #converts to day of week as referenced in the database
doc = db.get(userid)
jdata = json.loads(request.data) #converts the incoming json request to a json dictionary
for app in jdata:
if app not in doc: #if not in 'Appdata', then insert the app by calling the App function before updating usage in database
App(userid,app)
doc = db.get(userid) #have to update doc in this case
index = str(app)
doc['Appdata'][index][today_index] = jdata[index]
db[userid] = doc
return "Hello " + str(jdata) + "\n" #confimation that the json data was received
if __name__ == '__main__':
app.debug = True
app.run(host='0.0.0.0')
|
ByronBecker/rmp
|
newUser.py
|
Python
|
mit
| 8,411
|
from django import forms
from django.utils.translation import ugettext as _
import autocomplete_light
from .models import GardenItem
class AddProduceForm(autocomplete_light.ModelForm):
class Meta:
model = GardenItem
exclude = ['owner',]
class ProduceSearchForm(autocomplete_light.ModelForm):
choices = (
('gardens', _('Gardens')),
('wishlists', _('Wishlists')),
)
type = forms.ChoiceField(choices=choices)
zip = forms.CharField(required=False, max_length=5, label=_('Zip Code'),
widget=forms.TextInput(attrs={'placeholder': _('Zip Code')}))
class Meta:
model = GardenItem
fields = ['produce',]
|
jmickela/stalkexchange
|
produce/forms.py
|
Python
|
apache-2.0
| 721
|
#!/usr/bin/python
"""
Solution based on Algorithm L by Donald Knuth,
see The Art of Computer Programming vol. 4
"""
def next(perm):
tmp = list(perm)
k = lastNotLast(tmp)
if not k:
return ''
tmp = increase(k, tmp)
tmp = first(k, tmp)
return tmp
def lastNotLast(perm):
l = len(perm) - 1
while l and perm[l] < perm[l - 1]:
l -= 1
return l
def increase(k, perm):
minmax = k
tmp = list(perm)
while minmax < len(perm) and tmp[minmax] > tmp[k - 1]:
minmax += 1
ch = tmp[k - 1]
tmp[k - 1] = tmp[minmax - 1]
tmp[minmax - 1] = ch
return tmp
def first(k, perm):
tmp = list(perm[:k])
tmp1 = list(perm[k:])
tmp1.reverse()
tmp.extend(tmp1)
return tmp
word = '0123456789'
for i in range(1, 1000000):
word = next(word)
print(word)
"""
Really fast solution, we don't need to generate all the permutations
but we solve with a bit of analisys
"""
from math import factorial
word = '0123456789'
perm = []
limit = 1000000
fact = len(word) - 1
for i in range(fact, -1, -1):
count = 0
while limit > factorial(i):
limit -= factorial(i)
count += 1
perm.append(word[count])
word = word[:count] + word[count + 1:]
print(perm)
|
cifvts/PyEuler
|
euler024.py
|
Python
|
mit
| 1,259
|
#!/usr/bin/env python3
# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multisig RPCs"""
import binascii
import decimal
import itertools
import json
import os
from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create, drop_origins
from test_framework.key import ECPubKey, ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
from test_framework.wallet_util import bytes_to_wif
class RpcCreateMultiSigTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def get_keys(self):
self.pub = []
self.priv = []
node0, node1, node2 = self.nodes
for _ in range(self.nkeys):
k = ECKey()
k.generate()
self.pub.append(k.get_pubkey().get_bytes().hex())
self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
self.final = node2.getnewaddress()
def run_test(self):
node0, node1, node2 = self.nodes
self.check_addmultisigaddress_errors()
self.log.info('Generating blocks ...')
node0.generate(249)
self.sync_all()
self.moved = 0
for self.nkeys in [3, 5]:
for self.nsigs in [2, 3]:
for self.output_type in ["bech32", "p2sh-segwit", "legacy"]:
self.get_keys()
self.do_multisig()
self.checkbalances()
# Test mixed compressed and uncompressed pubkeys
self.log.info('Mixed compressed and uncompressed multisigs are not allowed')
pk0 = node0.getaddressinfo(node0.getnewaddress())['pubkey']
pk1 = node1.getaddressinfo(node1.getnewaddress())['pubkey']
pk2 = node2.getaddressinfo(node2.getnewaddress())['pubkey']
# decompress pk2
pk_obj = ECPubKey()
pk_obj.set(binascii.unhexlify(pk2))
pk_obj.compressed = False
pk2 = binascii.hexlify(pk_obj.get_bytes()).decode()
node0.createwallet(wallet_name='wmulti0', disable_private_keys=True)
wmulti0 = node0.get_wallet_rpc('wmulti0')
# Check all permutations of keys because order matters apparently
for keys in itertools.permutations([pk0, pk1, pk2]):
# Results should be the same as this legacy one
legacy_addr = node0.createmultisig(2, keys, 'legacy')['address']
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'legacy')['address'])
# Generate addresses with the segwit types. These should all make legacy addresses
assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'bech32')['address'])
assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'p2sh-segwit')['address'])
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'bech32')['address'])
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors')
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f:
vectors = json.load(f)
for t in vectors:
key_str = ','.join(t['keys'])
desc = descsum_create('sh(sortedmulti(2,{}))'.format(key_str))
assert_equal(self.nodes[0].deriveaddresses(desc)[0], t['address'])
sorted_key_str = ','.join(t['sorted_keys'])
sorted_key_desc = descsum_create('sh(multi(2,{}))'.format(sorted_key_str))
assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address'])
def check_addmultisigaddress_errors(self):
if self.options.descriptors:
return
self.log.info('Check that addmultisigaddress fails when the private keys are missing')
addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
for a in addresses:
# Importing all addresses should not change the result
self.nodes[0].importaddress(a)
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
def checkbalances(self):
node0, node1, node2 = self.nodes
node0.generate(240)
self.sync_all()
bal0 = node0.getbalance()
bal1 = node1.getbalance()
bal2 = node2.getbalance()
height_remaining = node0.getblockchaininfo()["blocks"] - 240
total = 0
reward = 500000.0
while height_remaining > 0:
total = total + min(149, height_remaining) * reward
reward = reward / 2
height_remaining = max(0, height_remaining - 149)
assert bal1 == 0
assert bal2 == self.moved
assert bal0 + bal1 + bal2 == total
def do_multisig(self):
node0, node1, node2 = self.nodes
if 'wmulti' not in node1.listwallets():
try:
node1.loadwallet('wmulti')
except JSONRPCException as e:
path = os.path.join(self.options.tmpdir, "node1", "regtest", "wallets", "wmulti")
if e.error['code'] == -18 and "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path) in e.error['message']:
node1.createwallet(wallet_name='wmulti', disable_private_keys=True)
else:
raise
wmulti = node1.get_wallet_rpc('wmulti')
# Construct the expected descriptor
desc = 'multi({},{})'.format(self.nsigs, ','.join(self.pub))
if self.output_type == 'legacy':
desc = 'sh({})'.format(desc)
elif self.output_type == 'p2sh-segwit':
desc = 'sh(wsh({}))'.format(desc)
elif self.output_type == 'bech32':
desc = 'wsh({})'.format(desc)
desc = descsum_create(desc)
msig = node2.createmultisig(self.nsigs, self.pub, self.output_type)
madd = msig["address"]
mredeem = msig["redeemScript"]
assert_equal(desc, msig['descriptor'])
if self.output_type == 'bech32':
assert madd[0:4] == "dcrt" # actually a bech32 address
# compare against addmultisigaddress
msigw = wmulti.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
maddw = msigw["address"]
mredeemw = msigw["redeemScript"]
assert_equal(desc, drop_origins(msigw['descriptor']))
# addmultisigiaddress and createmultisig work the same
assert maddw == madd
assert mredeemw == mredeem
txid = node0.sendtoaddress(madd, 40)
tx = node0.getrawtransaction(txid, True)
vout = [v["n"] for v in tx["vout"] if madd in v["scriptPubKey"].get("addresses", [])]
assert len(vout) == 1
vout = vout[0]
scriptPubKey = tx["vout"][vout]["scriptPubKey"]["hex"]
value = tx["vout"][vout]["value"]
prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}]
node0.generate(1)
outval = value - decimal.Decimal("0.00001000")
rawtx = node2.createrawtransaction([{"txid": txid, "vout": vout}], [{self.final: outval}])
prevtx_err = dict(prevtxs[0])
del prevtx_err["redeemScript"]
assert_raises_rpc_error(-8, "Missing redeemScript/witnessScript", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# if witnessScript specified, all ok
prevtx_err["witnessScript"] = prevtxs[0]["redeemScript"]
node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# both specified, also ok
prevtx_err["redeemScript"] = prevtxs[0]["redeemScript"]
node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# redeemScript mismatch to witnessScript
prevtx_err["redeemScript"] = "6a" # OP_RETURN
assert_raises_rpc_error(-8, "redeemScript does not correspond to witnessScript", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# redeemScript does not match scriptPubKey
del prevtx_err["witnessScript"]
assert_raises_rpc_error(-8, "redeemScript/witnessScript does not match scriptPubKey", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# witnessScript does not match scriptPubKey
prevtx_err["witnessScript"] = prevtx_err["redeemScript"]
del prevtx_err["redeemScript"]
assert_raises_rpc_error(-8, "redeemScript/witnessScript does not match scriptPubKey", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
rawtx2 = node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs - 1], prevtxs)
rawtx3 = node2.signrawtransactionwithkey(rawtx2["hex"], [self.priv[-1]], prevtxs)
self.moved += outval
tx = node0.sendrawtransaction(rawtx3["hex"], 0)
blk = node0.generate(1)[0]
assert tx in node0.getblock(blk)["tx"]
txinfo = node0.getrawtransaction(tx, True, blk)
self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (self.nsigs, self.nkeys, self.output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"]))
wmulti.unloadwallet()
if __name__ == '__main__':
RpcCreateMultiSigTest().main()
|
rnicoll/dogecoin
|
test/functional/rpc_createmultisig.py
|
Python
|
mit
| 9,973
|
from landlab import Component
from ...utils.decorators import use_file_name_or_kwds
import numpy as np
_VALID_METHODS = set(['Grid'])
def _assert_method_is_valid(method):
if method not in _VALID_METHODS:
raise ValueError('%s: Invalid method name' % method)
class Radiation(Component):
"""Compute 1D and 2D total incident shortwave radiation.
Landlab component that computes 1D and 2D total incident shortwave
radiation. This code also computes relative incidence shortwave radiation
compared to a flat surface.
.. codeauthor:: Sai Nudurupati & Erkan Istanbulluoglu
Construction::
Radiation(grid, method='Grid', cloudiness=0.2, latitude=34.,
albedo=0.2, solar_constant=1366.67,
clearsky_turbidity=2., opt_airmass=0.)
Parameters
----------
grid: RasterModelGrid
A grid.
method: {'Grid'}, optional
Currently, only default is available.
cloudiness: float, optional
Cloudiness.
latitude: float, optional
Latitude (radians).
albedo: float, optional
Albedo.
solar_constant: float, optional
Solar Constant (W/m^2).
clearsky_turbidity: float, optional
Clear sky turbidity.
opt_airmass: float, optional
Optical air mass.
Examples
--------
>>> from landlab import RasterModelGrid
>>> from landlab.components import Radiation
>>> import numpy as np
>>> grid = RasterModelGrid((5, 4), spacing=(0.2, 0.2))
>>> rad = Radiation(grid)
>>> rad.name
'Radiation'
>>> rad.input_var_names
('topographic__elevation',)
>>> sorted(rad.output_var_names) # doctest: +NORMALIZE_WHITESPACE
['radiation__incoming_shortwave_flux',
'radiation__net_shortwave_flux',
'radiation__ratio_to_flat_surface']
>>> sorted(rad.units) # doctest: +NORMALIZE_WHITESPACE
[('radiation__incoming_shortwave_flux', 'W/m^2'),
('radiation__net_shortwave_flux', 'W/m^2'),
('radiation__ratio_to_flat_surface', 'None'),
('topographic__elevation', 'm')]
>>> rad.grid.number_of_node_rows
5
>>> rad.grid.number_of_node_columns
4
>>> rad.grid is grid
True
>>> np.all(grid.at_cell['radiation__ratio_to_flat_surface'] == 0.)
True
>>> np.all(grid.at_node['topographic__elevation'] == 0.)
True
>>> grid['node']['topographic__elevation'] = np.array([
... 0., 0., 0., 0.,
... 1., 1., 1., 1.,
... 2., 2., 2., 2.,
... 3., 4., 4., 3.,
... 4., 4., 4., 4.])
>>> current_time = 0.5
>>> rad.update(current_time)
>>> np.all(grid.at_cell['radiation__ratio_to_flat_surface'] == 0.)
False
"""
_name = 'Radiation'
_input_var_names = (
'topographic__elevation',
)
_output_var_names = (
'radiation__incoming_shortwave_flux',
'radiation__ratio_to_flat_surface',
'radiation__net_shortwave_flux',
)
_var_units = {
'topographic__elevation': 'm',
'radiation__incoming_shortwave_flux': 'W/m^2',
'radiation__ratio_to_flat_surface': 'None',
'radiation__net_shortwave_flux': 'W/m^2',
}
_var_mapping = {
'topographic__elevation': 'node',
'radiation__incoming_shortwave_flux': 'cell',
'radiation__ratio_to_flat_surface': 'cell',
'radiation__net_shortwave_flux': 'cell',
}
_var_doc = {
'topographic__elevation':
'elevation of the ground surface relative to some datum',
'radiation__incoming_shortwave_flux':
'total incident shortwave radiation over the time step',
'radiation__ratio_to_flat_surface':
'ratio of total incident shortwave radiation on sloped surface \
to flat surface',
'radiation__net_shortwave_flux':
'net incident shortwave radiation over the time step',
}
@use_file_name_or_kwds
def __init__(self, grid, method='Grid', cloudiness=0.2,
latitude=34., albedo=0.2, solar_constant=1366.67,
clearsky_turbidity=2., opt_airmass=0., **kwds):
"""
Parameters
----------
grid : RasterModelGrid
A grid.
method : {'Grid'}, optional
Currently, only default is available.
cloudiness: float, optional
Cloudiness.
latitude: float, optional
Latitude (Radians).
albedo: float, optional
Albedo.
solar_constant: float, optional
Solar Constant (W/m^2).
clearsky_turbidity: float, optional
Clear sky turbidity.
opt_airmass: float, optional
Optical air mass.
"""
self._method = method
self._N = cloudiness
self._latitude = latitude
self._A = albedo
self._Io = solar_constant
self._n = clearsky_turbidity
self._m = opt_airmass
_assert_method_is_valid(self._method)
super(Radiation, self).__init__(grid, **kwds)
for name in self._input_var_names:
if name not in self.grid.at_node:
self.grid.add_zeros(name, at='node',
units=self._var_units[name])
for name in self._output_var_names:
if name not in self.grid.at_cell:
self.grid.add_zeros(name, at='cell',
units=self._var_units[name])
if 'Slope' not in self.grid.at_cell:
self.grid.add_zeros('Slope', at='cell', units='radians')
if 'Aspect' not in self.grid.at_cell:
self.grid.add_zeros('Aspect', at='cell', units='radians')
self._nodal_values = self.grid['node']
self._cell_values = self.grid['cell']
self._slope, self._aspect = \
grid.calculate_slope_aspect_at_nodes_burrough(
vals='topographic__elevation')
# self._slope = grid.calc_slope_of_node( \
# elevs = 'topographic__elevation')
# self._aspect =
self._cell_values['Slope'] = self._slope
self._cell_values['Aspect'] = self._aspect
def update(self, current_time, hour=12., **kwds):
"""Update fields with current loading conditions.
Parameters
----------
current_time: float
Current time (years).
hour: float, optional
Hour of the day.
"""
self._t = hour
self._radf = self._cell_values['radiation__ratio_to_flat_surface']
self._Rs = self._cell_values['radiation__incoming_shortwave_flux']
self._Rnet = self._cell_values['radiation__net_shortwave_flux']
self._julian = np.floor((current_time - np.floor(current_time)) *
365.25) # Julian day
self._phi = np.radians(self._latitude) # Latitude in Radians
self._delta = 23.45 * np.radians(
np.cos(2*np.pi / 365 * (172 - self._julian))) # Declination angle
self._tau = (self._t + 12.0) * np.pi / 12.0 # Hour angle
self._alpha = np.arcsin(np.sin(self._delta) * np.sin(self._phi) +
np.cos(self._delta) * np.cos(self._phi) *
np.cos(self._tau)) # Solar Altitude
if self._alpha <= 0.25 * np.pi / 180.0: # If altitude is -ve,
self._alpha = 0.25 * np.pi / 180.0 # sun is beyond the horizon
self._Rgl = (self._Io * np.exp((-1) * self._n * (
0.128 - 0.054 * np.log10(1. / np.sin(self._alpha)))*(
1. / np.sin(self._alpha))))
# Counting for Albedo, Cloudiness and Atmospheric turbidity
self._phisun = (np.arctan(- np.sin(self._tau) / (np.tan(self._delta) *
np.cos(self._phi) - np.sin(self._phi) *
np.cos(self._tau)))) # Sun's Azhimuth
if (self._phisun >= 0 and - np.sin(self._tau) <= 0):
self._phisun = self._phisun + np.pi
elif (self._phisun <= 0 and - np.sin(self._tau) >= 0):
self._phisun = self._phisun + np.pi
self._flat = (np.cos(np.arctan(0)) * np.sin(self._alpha) +
np.sin(np.arctan(0)) * np.cos(self._alpha) *
np.cos(self._phisun - 0)) # flat surface reference
self._Rsflat = self._Rgl * self._flat
# flat surface total incoming shortwave radiation
self._Rnetflat = ((1 - self._A) * (1 - 0.65 * (self._N ** 2)) *
self._Rsflat)
# flat surface Net incoming shortwave radiation
self._sloped = (np.cos(self._slope) * np.sin(self._alpha) +
np.sin(self._slope) * np.cos(self._alpha) *
np.cos(self._phisun - self._aspect))
self._radf = self._sloped / self._flat
self._radf[self._radf <= 0.] = 0.
self._radf[self._radf > 6.] = 6.
self._Rs = self._Rsflat * self._radf
# Sloped surface Toatl Incoming Shortwave Radn
self._Rnet = self._Rnetflat * self._radf
self._cell_values['radiation__ratio_to_flat_surface'] = self._radf
self._cell_values['radiation__incoming_shortwave_flux'] = self._Rs
self._cell_values['radiation__net_shortwave_flux'] = self._Rnet
|
laijingtao/landlab
|
landlab/components/radiation/radiation.py
|
Python
|
mit
| 9,349
|
"""
Data Aggregation Layer of the Enrollment API. Collects all enrollment specific data into a single
source to be used throughout the API.
"""
import logging
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.db import transaction
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.enrollments.errors import (
CourseEnrollmentClosedError,
CourseEnrollmentExistsError,
CourseEnrollmentFullError,
InvalidEnrollmentAttribute,
UserNotFoundError
)
from openedx.core.djangoapps.enrollments.serializers import CourseEnrollmentSerializer, CourseSerializer
from openedx.core.lib.exceptions import CourseNotFoundError
from common.djangoapps.student.models import (
AlreadyEnrolledError,
CourseEnrollment,
CourseEnrollmentAttribute,
CourseFullError,
EnrollmentClosedError,
NonExistentCourseError
)
from common.djangoapps.student.roles import RoleCache
log = logging.getLogger(__name__)
def get_course_enrollments(username, include_inactive=False):
"""Retrieve a list representing all aggregated data for a user's course enrollments.
Construct a representation of all course enrollment data for a specific user.
Args:
username: The name of the user to retrieve course enrollment information for.
include_inactive (bool): Determines whether inactive enrollments will be included
Returns:
A serializable list of dictionaries of all aggregated enrollment data for a user.
"""
qset = CourseEnrollment.objects.filter(
user__username=username,
).order_by('created')
if not include_inactive:
qset = qset.filter(is_active=True)
enrollments = CourseEnrollmentSerializer(qset, many=True).data
# Find deleted courses and filter them out of the results
deleted = []
valid = []
for enrollment in enrollments:
if enrollment.get("course_details") is not None:
valid.append(enrollment)
else:
deleted.append(enrollment)
if deleted:
log.warning(
(
"Course enrollments for user %s reference "
"courses that do not exist (this can occur if a course is deleted)."
), username,
)
return valid
def get_course_enrollment(username, course_id):
"""Retrieve an object representing all aggregated data for a user's course enrollment.
Get the course enrollment information for a specific user and course.
Args:
username (str): The name of the user to retrieve course enrollment information for.
course_id (str): The course to retrieve course enrollment information for.
Returns:
A serializable dictionary representing the course enrollment.
"""
course_key = CourseKey.from_string(course_id)
try:
enrollment = CourseEnrollment.objects.get(
user__username=username, course_id=course_key
)
return CourseEnrollmentSerializer(enrollment).data
except CourseEnrollment.DoesNotExist:
return None
def get_user_enrollments(course_key):
"""Based on the course id, return all user enrollments in the course
Args:
course_key (CourseKey): Identifier of the course
from which to retrieve enrollments.
Returns:
A course's user enrollments as a queryset
Raises:
CourseEnrollment.DoesNotExist
"""
return CourseEnrollment.objects.filter(
course_id=course_key,
is_active=True
).order_by('created')
def create_course_enrollment(username, course_id, mode, is_active):
"""Create a new course enrollment for the given user.
Creates a new course enrollment for the specified user username.
Args:
username (str): The name of the user to create a new course enrollment for.
course_id (str): The course to create the course enrollment for.
mode (str): (Optional) The mode for the new enrollment.
is_active (boolean): (Optional) Determines if the enrollment is active.
Returns:
A serializable dictionary representing the new course enrollment.
Raises:
CourseNotFoundError
CourseEnrollmentFullError
EnrollmentClosedError
CourseEnrollmentExistsError
"""
course_key = CourseKey.from_string(course_id)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
msg = f"Not user with username '{username}' found."
log.warning(msg)
raise UserNotFoundError(msg) # lint-amnesty, pylint: disable=raise-missing-from
try:
enrollment = CourseEnrollment.enroll(user, course_key, check_access=True)
return _update_enrollment(enrollment, is_active=is_active, mode=mode)
except NonExistentCourseError as err:
raise CourseNotFoundError(str(err)) # lint-amnesty, pylint: disable=raise-missing-from
except EnrollmentClosedError as err:
raise CourseEnrollmentClosedError(str(err)) # lint-amnesty, pylint: disable=raise-missing-from
except CourseFullError as err:
raise CourseEnrollmentFullError(str(err)) # lint-amnesty, pylint: disable=raise-missing-from
except AlreadyEnrolledError as err:
enrollment = get_course_enrollment(username, course_id)
raise CourseEnrollmentExistsError(str(err), enrollment) # lint-amnesty, pylint: disable=raise-missing-from
def update_course_enrollment(username, course_id, mode=None, is_active=None):
"""Modify a course enrollment for a user.
Allows updates to a specific course enrollment.
Args:
username (str): The name of the user to retrieve course enrollment information for.
course_id (str): The course to retrieve course enrollment information for.
mode (str): (Optional) If specified, modify the mode for this enrollment.
is_active (boolean): (Optional) Determines if the enrollment is active.
Returns:
A serializable dictionary representing the modified course enrollment.
"""
course_key = CourseKey.from_string(course_id)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
msg = f"Not user with username '{username}' found."
log.warning(msg)
raise UserNotFoundError(msg) # lint-amnesty, pylint: disable=raise-missing-from
try:
enrollment = CourseEnrollment.objects.get(user=user, course_id=course_key)
return _update_enrollment(enrollment, is_active=is_active, mode=mode)
except CourseEnrollment.DoesNotExist:
return None
def add_or_update_enrollment_attr(username, course_id, attributes):
"""Set enrollment attributes for the enrollment of given user in the
course provided.
Args:
course_id (str): The Course to set enrollment attributes for.
username: The User to set enrollment attributes for.
attributes (list): Attributes to be set.
Example:
>>>add_or_update_enrollment_attr(
"Bob",
"course-v1-edX-DemoX-1T2015",
[
{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
},
]
)
"""
course_key = CourseKey.from_string(course_id)
user = _get_user(username)
enrollment = CourseEnrollment.get_enrollment(user, course_key)
if not _invalid_attribute(attributes) and enrollment is not None:
CourseEnrollmentAttribute.add_enrollment_attr(enrollment, attributes)
def get_enrollment_attributes(username, course_id):
"""Retrieve enrollment attributes for given user for provided course.
Args:
username: The User to get enrollment attributes for
course_id (str): The Course to get enrollment attributes for.
Example:
>>>get_enrollment_attributes("Bob", "course-v1-edX-DemoX-1T2015")
[
{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
},
]
Returns: list
"""
course_key = CourseKey.from_string(course_id)
user = _get_user(username)
enrollment = CourseEnrollment.get_enrollment(user, course_key)
return CourseEnrollmentAttribute.get_enrollment_attributes(enrollment)
def unenroll_user_from_all_courses(username):
"""
Set all of a user's enrollments to inactive.
:param username: The user being unenrolled.
:return: A list of all courses from which the user was unenrolled.
"""
user = _get_user(username)
enrollments = CourseEnrollment.objects.filter(user=user)
with transaction.atomic():
for enrollment in enrollments:
_update_enrollment(enrollment, is_active=False)
return {str(enrollment.course_id.org) for enrollment in enrollments} # lint-amnesty, pylint: disable=consider-using-set-comprehension
def _get_user(username):
"""Retrieve user with provided username
Args:
username: username of the user for which object is to retrieve
Returns: obj
"""
try:
return User.objects.get(username=username)
except User.DoesNotExist:
msg = f"Not user with username '{username}' found."
log.warning(msg)
raise UserNotFoundError(msg) # lint-amnesty, pylint: disable=raise-missing-from
def _update_enrollment(enrollment, is_active=None, mode=None):
enrollment.update_enrollment(is_active=is_active, mode=mode)
enrollment.save()
return CourseEnrollmentSerializer(enrollment).data
def _invalid_attribute(attributes):
"""Validate enrollment attribute
Args:
attributes(List): List of attribute dicts
Return:
list of invalid attributes
"""
invalid_attributes = []
for attribute in attributes:
if "namespace" not in attribute:
msg = "'namespace' not in enrollment attribute"
log.warning(msg)
invalid_attributes.append("namespace")
raise InvalidEnrollmentAttribute(msg)
if "name" not in attribute:
msg = "'name' not in enrollment attribute"
log.warning(msg)
invalid_attributes.append("name")
raise InvalidEnrollmentAttribute(msg)
if "value" not in attribute:
msg = "'value' not in enrollment attribute"
log.warning(msg)
invalid_attributes.append("value")
raise InvalidEnrollmentAttribute(msg)
return invalid_attributes
def get_course_enrollment_info(course_id, include_expired=False):
"""Returns all course enrollment information for the given course.
Based on the course id, return all related course information.
Args:
course_id (str): The course to retrieve enrollment information for.
include_expired (bool): Boolean denoting whether expired course modes
should be included in the returned JSON data.
Returns:
A serializable dictionary representing the course's enrollment information.
Raises:
CourseNotFoundError
"""
course_key = CourseKey.from_string(course_id)
try:
course = CourseOverview.get_from_id(course_key)
except CourseOverview.DoesNotExist:
msg = f"Requested enrollment information for unknown course {course_id}"
log.warning(msg)
raise CourseNotFoundError(msg) # lint-amnesty, pylint: disable=raise-missing-from
else:
return CourseSerializer(course, include_expired=include_expired).data
def get_user_roles(username):
"""
Returns a list of all roles that this user has.
:param username: The id of the selected user.
:return: All roles for all courses that this user has.
"""
# pylint: disable=protected-access
user = _get_user(username)
if not hasattr(user, '_roles'):
user._roles = RoleCache(user)
role_cache = user._roles
return role_cache._roles
def serialize_enrollments(enrollments):
"""
Take CourseEnrollment objects and return them in a serialized list.
"""
return CourseEnrollmentSerializer(enrollments, many=True).data
|
EDUlib/edx-platform
|
openedx/core/djangoapps/enrollments/data.py
|
Python
|
agpl-3.0
| 12,273
|
import argparse
import os
from gittalk import GitTalk
from gittalk.utils import which, make_sure_path_exists
def run():
"""
`run` drives the command line interface for Git Talk.
It exposes a command line interface through which users
can interact with Git Talk to configure or invoke various
functionalities.
"""
# do explict dependency checks
try:
import Tkinter
except Exception as e:
print 'Make sure your Python has Tkinter installed before using GitTalk!'
if not which('ffmpeg'):
print 'Please make sure FFmpeg is installed before using GitTalk!'
# create a folder to be used by GitTalk
make_sure_path_exists(os.path.join(os.environ['HOME'], '.gittalk'))
parser = argparse.ArgumentParser(description='Audio & Video annotations to your code via Git')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-e', '--enable', action='store_true', required=False,
help='Enable Git Talk in the current Git repository.',
dest='enable')
group.add_argument('-d', '--disable', action='store_true', required=False,
help='Disable Git Talk in the current Git repository.',
dest='disable')
group.add_argument('-t', '--trigger', action='store_true', required=False,
help='Trigger Git Talk.',
dest='trigger')
args = parser.parse_args()
gt = GitTalk()
if args.enable:
gt.enable()
elif args.disable:
gt.disable()
elif args.trigger:
gt.trigger()
if __name__ == '__main__':
run()
|
sandeepraju/git-talk
|
cli.py
|
Python
|
bsd-3-clause
| 1,686
|
import modules.options_helper as opt_helper
from modules.file.file_helper import File
import sys
def main(options):
# available config keys
options_registry = ["path","find","replace_with"]
# verify config option provided match registry
opt_helper.check_options(options, options_registry)
path = options.get("path", False)
find = options.get("find", False)
replace_with = options.get("replace_with", False)
# see if all required fields are present
if path and find and replace_with:
f = File(path)
is_find_in_file = f.is_in_file(find)
filetype = f.get_ftype()
# only supporting files right now, no links, directories
if filetype == "file" and is_find_in_file:
# check if the change was applied already to avoid replacing duplicate lines if any
if f.is_in_file(replace_with) and is_find_in_file:
print "Will not replace. Looks like following is already in file " + path + ": " + replace_with
else:
print "Replacing content in file: " + path
f.replace_in_file(find, replace_with)
else:
if filetype != "file":
print "Can't run this playbook because provided 'path' is not a file, it's a " + filetype
# TODO: raise exception
sys.exit()
if not is_find_in_file:
print "Didn't find " + find + " in the file " + path + ". Nothing to replace."
if __name__ == '__main__':
main(options)
|
dkoudlo/py-manage-server
|
modules/file/replace.py
|
Python
|
apache-2.0
| 1,593
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1RuleWithOperations(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_groups': 'list[str]',
'api_versions': 'list[str]',
'operations': 'list[str]',
'resources': 'list[str]',
'scope': 'str'
}
attribute_map = {
'api_groups': 'apiGroups',
'api_versions': 'apiVersions',
'operations': 'operations',
'resources': 'resources',
'scope': 'scope'
}
def __init__(self, api_groups=None, api_versions=None, operations=None, resources=None, scope=None, local_vars_configuration=None): # noqa: E501
"""V1RuleWithOperations - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_groups = None
self._api_versions = None
self._operations = None
self._resources = None
self._scope = None
self.discriminator = None
if api_groups is not None:
self.api_groups = api_groups
if api_versions is not None:
self.api_versions = api_versions
if operations is not None:
self.operations = operations
if resources is not None:
self.resources = resources
if scope is not None:
self.scope = scope
@property
def api_groups(self):
"""Gets the api_groups of this V1RuleWithOperations. # noqa: E501
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The api_groups of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._api_groups
@api_groups.setter
def api_groups(self, api_groups):
"""Sets the api_groups of this V1RuleWithOperations.
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_groups: The api_groups of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_groups = api_groups
@property
def api_versions(self):
"""Gets the api_versions of this V1RuleWithOperations. # noqa: E501
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The api_versions of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._api_versions
@api_versions.setter
def api_versions(self, api_versions):
"""Sets the api_versions of this V1RuleWithOperations.
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_versions: The api_versions of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_versions = api_versions
@property
def operations(self):
"""Gets the operations of this V1RuleWithOperations. # noqa: E501
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The operations of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._operations
@operations.setter
def operations(self, operations):
"""Sets the operations of this V1RuleWithOperations.
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param operations: The operations of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._operations = operations
@property
def resources(self):
"""Gets the resources of this V1RuleWithOperations. # noqa: E501
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:return: The resources of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1RuleWithOperations.
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:param resources: The resources of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._resources = resources
@property
def scope(self):
"""Gets the scope of this V1RuleWithOperations. # noqa: E501
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:return: The scope of this V1RuleWithOperations. # noqa: E501
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""Sets the scope of this V1RuleWithOperations.
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:param scope: The scope of this V1RuleWithOperations. # noqa: E501
:type: str
"""
self._scope = scope
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1RuleWithOperations):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1RuleWithOperations):
return True
return self.to_dict() != other.to_dict()
|
kubernetes-client/python
|
kubernetes/client/models/v1_rule_with_operations.py
|
Python
|
apache-2.0
| 9,436
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from django.contrib.auth.forms import SetPasswordForm
urlpatterns = [
url(
r"^login/$",
auth_views.login,
{"template_name": "registration/login.html"},
name="auth_login",
),
url(
r"^logout/$",
auth_views.logout,
{"template_name": "registration/logout.html"},
name="auth_logout",
),
url(
r"^password/change/$",
auth_views.password_change,
{
"post_change_redirect": "/network/profiles/edit/",
"password_change_form": SetPasswordForm,
},
name="auth_password_change",
),
url(
r"^password/change/done/$",
auth_views.password_change_done,
name="auth_password_change_done",
),
url(r"^password/reset/$", auth_views.password_reset, name="auth_password_reset"),
url(
r"^password/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$",
auth_views.password_reset_confirm,
name="auth_password_reset_confirm",
),
url(
r"^password/reset/complete/$",
auth_views.password_reset_complete,
name="password_reset_complete",
),
url(
r"^password/reset/done/$",
auth_views.password_reset_done,
name="password_reset_done",
),
]
|
hzlf/openbroadcast.org
|
website/tools/registration/auth_urls.py
|
Python
|
gpl-3.0
| 1,439
|
# -*- coding: utf-8 -*-
#
# hl_api_simulation.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions for simulation control
"""
from contextlib import contextmanager
import warnings
from ..ll_api import *
from .hl_api_helper import *
from .hl_api_parallel_computing import Rank
__all__ = [
'Cleanup',
'DisableStructuralPlasticity',
'EnableStructuralPlasticity',
'GetKernelStatus',
'Install',
'Prepare',
'ResetKernel',
'Run',
'RunManager',
'SetKernelStatus',
'Simulate',
]
@check_stack
def Simulate(t):
"""Simulate the network for `t` milliseconds.
Parameters
----------
t : float
Time to simulate in ms
See Also
--------
RunManager
"""
sps(float(t))
sr('ms Simulate')
@check_stack
def Run(t):
"""Simulate the network for `t` milliseconds.
Parameters
----------
t : float
Time to simulate in ms
Notes
------
Call between `Prepare` and `Cleanup` calls, or within a
``with RunManager`` clause.
Simulate(t): t' = t/m; Prepare(); for _ in range(m): Run(t'); Cleanup()
`Prepare` must be called before `Run` to calibrate the system, and
`Cleanup` must be called after `Run` to close files, cleanup handles, and
so on. After `Cleanup`, `Prepare` can and must be called before more `Run`
calls.
Be careful about modifying the network or neurons between `Prepare` and `Cleanup`
calls. In particular, do not call `Create`, `Connect`, or `SetKernelStatus`.
Calling `SetStatus` to change membrane potential `V_m` of neurons or synaptic
weights (but not delays!) will in most cases work as expected, while changing
membrane or synaptic times constants will not work correctly. If in doubt, assume
that changes may cause undefined behavior and check these thoroughly.
See Also
--------
Prepare, Cleanup, RunManager, Simulate
"""
sps(float(t))
sr('ms Run')
@check_stack
def Prepare():
"""Calibrate the system before a `Run` call. Not needed for `Simulate`.
Call before the first `Run` call, or before calling `Run` after changing
the system, calling `SetStatus` or `Cleanup`.
See Also
--------
Run, Cleanup
"""
sr('Prepare')
@check_stack
def Cleanup():
"""Cleans up resources after a `Run` call. Not needed for `Simulate`.
Closes state for a series of runs, such as flushing and closing files.
A `Prepare` is needed after a `Cleanup` before any more calls to `Run`.
See Also
--------
Run, Prepare
"""
sr('Cleanup')
@contextmanager
def RunManager():
"""ContextManager for `Run`
Calls `Prepare` before a series of `Run` calls, and calls `Cleanup` at end.
E.g.:
::
with RunManager():
for _ in range(10):
Run(100)
# extract results
Notes
-----
Be careful about modifying the network or neurons inside the `RunManager` context.
In particular, do not call `Create`, `Connect`, or `SetKernelStatus`. Calling `SetStatus`
to change membrane potential `V_m` of neurons or synaptic weights (but not delays!)
will in most cases work as expected, while changing membrane or synaptic times
constants will not work correctly. If in doubt, assume that changes may cause
undefined behavior and check these thoroughly.
See Also
--------
Prepare, Run, Cleanup, Simulate
"""
Prepare()
try:
yield
finally:
Cleanup()
@check_stack
def ResetKernel():
"""Reset the simulation kernel.
This will destroy the network as well as all custom models created with
:py:func:`.CopyModel`. Calling this function is equivalent to restarting NEST.
In particular,
* all network nodes
* all connections
* all user-defined neuron and synapse models
are deleted, and
* time
* random generators
are reset. The only exception is that dynamically loaded modules are not
unloaded. This may change in a future version of NEST.
"""
sr('ResetKernel')
@check_stack
def SetKernelStatus(params):
r"""Set parameters for the simulation kernel.
Parameters
----------
params : dict
Dictionary of parameters to set.
**Note**
All NEST kernel parameters are described below, grouped by topic.
Some of them only provide information about the kernel status and
cannot be set by the user. These are marked as *read only* and can
be accessed using ``GetKernelStatus``.
**Time and resolution**
Parameters
----------
resolution : float, default: 0.1
The resolution of the simulation (in ms)
time : float
The current simulation time (in ms)
to_do : int, read only
The number of steps yet to be simulated
max_delay : float, default: 0.1
The maximum delay in the network
min_delay : float, default: 0.1
The minimum delay in the network
ms_per_tic : float, default: 0.001
The number of milliseconds per tic
tics_per_ms : float, default: 1000.0
The number of tics per millisecond
tics_per_step : int, default: 100
The number of tics per simulation time step
T_max : float, read only
The largest representable time value
T_min : float, read only
The smallest representable time value
**Random number generators**
Parameters
----------
rng_types : list, read only
Names of random number generator types available.
Types: "Philox_32", "Philox_64", "Threefry_32", "Threefry_64", "mt19937", "mt19937_64"
rng_type : str, default: mt19937_64
Name of random number generator type used by NEST.
rng_seed : int, default: 143202461
Seed value used as base for seeding NEST random number generators
(:math:`1 \leq s \leq 2^{32}-1`).
**Parallel processing**
Parameters
----------
total_num_virtual_procs : int, default: 1
The total number of virtual processes
local_num_threads : int, default: 1
The local number of threads
num_processes : int, read only
The number of MPI processes
off_grid_spiking : bool, read only
Whether to transmit precise spike times in MPI communication
**MPI buffers**
Parameters
----------
adaptive_spike_buffers : bool, default: True
Whether MPI buffers for communication of spikes resize on the fly
adaptive_target_buffers : bool, default: True
Whether MPI buffers for communication of connections resize on the fly
buffer_size_secondary_events : int, read only
Size of MPI buffers for communicating secondary events (in bytes, per
MPI rank, for developers)
buffer_size_spike_data : int, default: 2
Total size of MPI buffer for communication of spikes
buffer_size_target_data : int, default: 2
Total size of MPI buffer for communication of connections
growth_factor_buffer_spike_data : float, default: 1.5
If MPI buffers for communication of spikes resize on the fly, grow
them by this factor each round
growth_factor_buffer_target_data : float, default: 1.5
If MPI buffers for communication of connections resize on the fly, grow
them by this factor each round
max_buffer_size_spike_data : int, default: 8388608
Maximal size of MPI buffers for communication of spikes.
max_buffer_size_target_data : int, default: 16777216
Maximal size of MPI buffers for communication of connections
**Gap junctions and rate models (waveform relaxation method)**
Parameters
----------
use_wfr : bool, default: True
Whether to use waveform relaxation method
wfr_comm_interval : float, default: 1.0
Desired waveform relaxation communication interval
wfr_tol : float, default: 0.0001
Convergence tolerance of waveform relaxation method
wfr_max_iterations : int, default: 15
Maximal number of iterations used for waveform relaxation
wfr_interpolation_order : int, default: 3
Interpolation order of polynomial used in wfr iterations
**Synapses**
Parameters
----------
max_num_syn_models : int, read only
Maximal number of synapse models supported
sort_connections_by_source : bool, default: True
Whether to sort connections by their source; increases construction
time of presynaptic data structures, decreases simulation time if the
average number of outgoing connections per neuron is smaller than the
total number of threads
structural_plasticity_synapses : dict
Defines all synapses which are plastic for the structural plasticity
algorithm. Each entry in the dictionary is composed of a synapse model,
the pre synaptic element and the postsynaptic element
structural_plasticity_update_interval : int, default: 10000.0
Defines the time interval in ms at which the structural plasticity
manager will make changes in the structure of the network (creation
and deletion of plastic synapses)
use_compressed_spikes : bool, default: True
Whether to use spike compression; if a neuron has targets on
multiple threads of a process, this switch makes sure that only
a single packet is sent to the process instead of one packet per
target thread; requires sort_connections_by_source = true
**Output**
Parameters
-------
data_path : str
A path, where all data is written to (default is the current
directory)
data_prefix : str
A common prefix for all data files
overwrite_files : bool, default: False
Whether to overwrite existing data files
print_time : bool, default: False
Whether to print progress information during the simulation
network_size : int, read only
The number of nodes in the network
num_connections : int, read only, local only
The number of connections in the network
local_spike_counter : int, read only
Number of spikes fired by neurons on a given MPI rank during the most
recent call to :py:func:`.Simulate`. Only spikes from "normal" neurons
are counted, not spikes generated by devices such as ``poisson_generator``.
recording_backends : list of str
List of available backends for recording devices:
"memory", "ascii", "screen"
**Miscellaneous**
Parameters
----------
dict_miss_is_error : bool, default: True
Whether missed dictionary entries are treated as errors
keep_source_table : bool, default: True
Whether to keep source table after connection setup is complete
min_update_time: double, read only
Shortest wall-clock time measured so far for a full update step [seconds].
max_update_time: double, read only
Longest wall-clock time measured so far for a full update step [seconds].
update_time_limit: double
Maximum wall-clock time for one full update step in seconds, default +inf.
This can be used to terminate simulations that slow down significantly.
Simulations may still get stuck if the slowdown occurs within a single update
step.
See Also
--------
GetKernelStatus
"""
# Resolve if missing entries should raise errors
raise_errors = params.get('dict_miss_is_error')
if raise_errors is None:
raise_errors = GetKernelStatus('dict_miss_is_error')
# Check validity of passed parameters
keys = list(params.keys())
for key in keys:
readonly = _sks_params.get(key)
msg = None
if readonly is None:
# If the parameter is not in the docstring
msg = f'`{key}` is not a valid kernel parameter, ' + \
'valid parameters are: ' + \
', '.join(f"'{p}'" for p in _sks_params.keys())
elif readonly:
# If the parameter is tagged as read only
msg = f'`{key}` is a read only parameter and cannot ' + \
'be defined using SetKernelStatus'
# Raise error or warn the user
if msg is not None:
if raise_errors:
raise ValueError(msg)
else:
warnings.warn(msg + f' \n`{key}` has been ignored')
del params[key]
sps(params)
sr('SetKernelStatus')
# Parse the `SetKernelStatus` docstring to obtain all valid and readonly params
doc_lines = SetKernelStatus.__doc__.split('\n')
# Get the lines describing parameters
param_lines = (line.strip() for line in doc_lines if ' : ' in line)
# Exclude the first parameter `params`.
next(param_lines)
_sks_params = {ln.split(" :")[0]: "read only" in ln for ln in param_lines}
del doc_lines, param_lines
@check_stack
def GetKernelStatus(keys=None):
"""Obtain parameters of the simulation kernel.
Parameters
----------
keys : str or list, optional
Single parameter name or `list` of parameter names
Returns
-------
dict:
Parameter dictionary, if called without argument
type:
Single parameter value, if called with single parameter name
list:
List of parameter values, if called with list of parameter names
Raises
------
TypeError
If `keys` are of the wrong type.
Notes
-----
See SetKernelStatus for documentation on each parameter key.
See Also
--------
SetKernelStatus
"""
sr('GetKernelStatus')
status_root = spp()
if keys is None:
return status_root
elif is_literal(keys):
return status_root[keys]
elif is_iterable(keys):
return tuple(status_root[k] for k in keys)
else:
raise TypeError("keys should be either a string or an iterable")
@check_stack
def Install(module_name):
"""Load a dynamically linked NEST module.
Parameters
----------
module_name : str
Name of the dynamically linked module
Returns
-------
handle
NEST module identifier, required for unloading
Notes
-----
Dynamically linked modules are searched in the NEST library
directory (``<prefix>/lib/nest``) and in ``LD_LIBRARY_PATH`` (on
Linux) or ``DYLD_LIBRARY_PATH`` (on OSX).
**Example**
::
nest.Install("mymodule")
"""
return sr("(%s) Install" % module_name)
@check_stack
def EnableStructuralPlasticity():
"""Enable structural plasticity for the network simulation
See Also
--------
DisableStructuralPlasticity
"""
sr('EnableStructuralPlasticity')
@check_stack
def DisableStructuralPlasticity():
"""Disable structural plasticity for the network simulation
See Also
--------
EnableStructuralPlasticity
"""
sr('DisableStructuralPlasticity')
|
lekshmideepu/nest-simulator
|
pynest/nest/lib/hl_api_simulation.py
|
Python
|
gpl-2.0
| 15,584
|
# Copyright 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glance_store as store
import webob
import glance.api.v2.image_actions as image_actions
import glance.context
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
BASE_URI = unit_test_utils.BASE_URI
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
CHKSUM = '93264c3edf5972c9f1cb309543d38a5c'
def _db_fixture(id, **kwargs):
obj = {
'id': id,
'name': None,
'visibility': 'shared',
'properties': {},
'checksum': None,
'owner': None,
'status': 'queued',
'tags': [],
'size': None,
'virtual_size': None,
'locations': [],
'protected': False,
'disk_format': None,
'container_format': None,
'deleted': False,
'min_ram': None,
'min_disk': None,
}
obj.update(kwargs)
return obj
class TestImageActionsController(base.IsolatedUnitTest):
def setUp(self):
super(TestImageActionsController, self).setUp()
self.db = unit_test_utils.FakeDB(initialize=False)
self.policy = unit_test_utils.FakePolicyEnforcer()
self.notifier = unit_test_utils.FakeNotifier()
self.store = unit_test_utils.FakeStoreAPI()
for i in range(1, 4):
self.store.data['%s/fake_location_%i' % (BASE_URI, i)] = ('Z', 1)
self.store_utils = unit_test_utils.FakeStoreUtils(self.store)
self.controller = image_actions.ImageActionsController(
self.db,
self.policy,
self.notifier,
self.store)
self.controller.gateway.store_utils = self.store_utils
store.create_stores()
def _get_fake_context(self, user=USER1, tenant=TENANT1, roles=None,
is_admin=False):
if roles is None:
roles = ['member']
kwargs = {
'user': user,
'tenant': tenant,
'roles': roles,
'is_admin': is_admin,
}
context = glance.context.RequestContext(**kwargs)
return context
def _create_image(self, status):
self.images = [
_db_fixture(UUID1, owner=TENANT1, checksum=CHKSUM,
name='1', size=256, virtual_size=1024,
visibility='public',
locations=[{'url': '%s/%s' % (BASE_URI, UUID1),
'metadata': {}, 'status': 'active'}],
disk_format='raw',
container_format='bare',
status=status),
]
context = self._get_fake_context()
[self.db.image_create(context, image) for image in self.images]
def test_deactivate_from_active(self):
self._create_image('active')
request = unit_test_utils.get_fake_request()
self.controller.deactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('deactivated', image['status'])
def test_deactivate_from_deactivated(self):
self._create_image('deactivated')
request = unit_test_utils.get_fake_request()
self.controller.deactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('deactivated', image['status'])
def _test_deactivate_from_wrong_status(self, status):
# deactivate will yield an error if the initial status is anything
# other than 'active' or 'deactivated'
self._create_image(status)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.deactivate,
request, UUID1)
def test_deactivate_from_queued(self):
self._test_deactivate_from_wrong_status('queued')
def test_deactivate_from_saving(self):
self._test_deactivate_from_wrong_status('saving')
def test_deactivate_from_killed(self):
self._test_deactivate_from_wrong_status('killed')
def test_deactivate_from_pending_delete(self):
self._test_deactivate_from_wrong_status('pending_delete')
def test_deactivate_from_deleted(self):
self._test_deactivate_from_wrong_status('deleted')
def test_reactivate_from_active(self):
self._create_image('active')
request = unit_test_utils.get_fake_request()
self.controller.reactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('active', image['status'])
def test_reactivate_from_deactivated(self):
self._create_image('deactivated')
request = unit_test_utils.get_fake_request()
self.controller.reactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('active', image['status'])
def _test_reactivate_from_wrong_status(self, status):
# reactivate will yield an error if the initial status is anything
# other than 'active' or 'deactivated'
self._create_image(status)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.reactivate,
request, UUID1)
def test_reactivate_from_queued(self):
self._test_reactivate_from_wrong_status('queued')
def test_reactivate_from_saving(self):
self._test_reactivate_from_wrong_status('saving')
def test_reactivate_from_killed(self):
self._test_reactivate_from_wrong_status('killed')
def test_reactivate_from_pending_delete(self):
self._test_reactivate_from_wrong_status('pending_delete')
def test_reactivate_from_deleted(self):
self._test_reactivate_from_wrong_status('deleted')
|
openstack/glance
|
glance/tests/unit/v2/test_image_actions_resource.py
|
Python
|
apache-2.0
| 6,487
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Amount(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, value=None, currency_code=None):
"""
Amount - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'value': 'str',
'currency_code': 'str'
}
self.attribute_map = {
'value': 'value',
'currency_code': 'currency-code'
}
self._value = value
self._currency_code = currency_code
@property
def value(self):
"""
Gets the value of this Amount.
:return: The value of this Amount.
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""
Sets the value of this Amount.
:param value: The value of this Amount.
:type: str
"""
self._value = value
@property
def currency_code(self):
"""
Gets the currency_code of this Amount.
:return: The currency_code of this Amount.
:rtype: str
"""
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
"""
Sets the currency_code of this Amount.
:param currency_code: The currency_code of this Amount.
:type: str
"""
self._currency_code = currency_code
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Amount):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api/models/amount.py
|
Python
|
mit
| 3,493
|
#!/usr/bin/env python
import os
import pprint
import xml.dom.minidom
_MIN_PROJECT_ID = 0
_MAX_PROJECT_ID = 255
_MIN_FEATURE_ID = 0
_MAX_FEATURE_ID = 255
_MIN_CLASS_ID = 0
_MAX_CLASS_ID = 255
_MIN_CMD_ID = 0
_MAX_CMD_ID = 65535
_FTR_GEN = 'generic'
#===============================================================================
#===============================================================================
class ArParserError(Exception):
pass
#===============================================================================
#===============================================================================
class ArCmdListType(object):
NONE = 0
LIST = 1
MAP = 2
TO_STRING = {NONE: "NONE", LIST: "LIST_ITEM", MAP: "MAP_ITEM"}
FROM_STRING = {"NONE": NONE, "LIST_ITEM": LIST, "MAP_ITEM": MAP}
#===============================================================================
#===============================================================================
class ArCmdBufferType(object):
NON_ACK = 0
ACK = 1
HIGH_PRIO = 2
TO_STRING = {NON_ACK: "NON_ACK", ACK: "ACK", HIGH_PRIO: "HIGH_PRIO"}
FROM_STRING = {"NON_ACK": NON_ACK, "ACK": ACK, "HIGH_PRIO": HIGH_PRIO}
#===============================================================================
#===============================================================================
class ArCmdTimeoutPolicy(object):
POP = 0
RETRY = 1
TO_STRING = {POP: "POP", RETRY: "RETRY"}
FROM_STRING = {"POP": POP, "RETRY": RETRY}
#===============================================================================
#===============================================================================
class ArCmdContent(object):
UPDATE = 0
NOTIFICATION = 1
TO_STRING = {UPDATE: "UPDATE", NOTIFICATION: "NOTIFICATION"}
FROM_STRING = {"UPDATE": UPDATE, "NOTIFICATION": NOTIFICATION}
#===============================================================================
#===============================================================================
class ArCmdDeprecation(object):
TO_STRING = {True: "true", False: "false"}
FROM_STRING = {"true": True, "false": False}
#===============================================================================
#===============================================================================
class ArArgType(object):
I8 = 0
U8 = 1
I16 = 2
U16 = 3
I32 = 4
U32 = 5
I64 = 6
U64 = 7
FLOAT = 8
DOUBLE = 9
STRING = 10
ENUM = 11
BITFIELD = 12
MULTISETTING = 13
TO_STRING = {I8: "i8", U8: "u8", I16: "i16", U16: "u16",
I32: "i32", U32: "u32", I64: "i64", U64: "u64",
FLOAT: "float", DOUBLE: "double", STRING: "string",
ENUM: "enum", BITFIELD: "bitfield", MULTISETTING: "multisetting"}
FROM_STRING = {"i8": I8, "u8": U8, "i16": I16, "u16": U16,
"i32": I32, "u32": U32, "i64": I64, "u64": U64,
"float": FLOAT, "double": DOUBLE, "string": STRING,
"enum": ENUM, "bitfield": BITFIELD, "multisetting": MULTISETTING}
#===============================================================================
#===============================================================================
class ArParserCtx(object):
def __init__(self):
self.projects = []
self.projectsById = {}
self.projectsByName = {}
self.features = []
self.featuresById = {}
self.featuresByName = {}
def walk_classes(self):
for projectObj in self.projects:
for classObj in projectObj.classes:
yield (projectObj, classObj)
def walk_cmds(self):
for projectObj in self.projects:
for classObj in projectObj.classes:
for cmdObj in classObj.cmds:
yield (projectObj, classObj, cmdObj)
def walk_msgs(self):
for featureObj in self.features:
for msg in featureObj.getMsgs():
yield (featureObj, msg)
#===============================================================================
#===============================================================================
class ArProject(object):
def __init__(self, name, projectId, doc):
self.name = name
self.projectId = projectId
self.doc = doc
self.classes = []
self.classesById = {}
self.classesByName = {}
def __repr__(self):
return ("{name='%s', projectId=%d, doc='%s', classes=%s}" % (
self.name,
self.projectId,
repr(self.doc),
pprint.pformat(self.classes)))
#===============================================================================
#===============================================================================
class ArFeature(object):
def __init__(self, name, featureId, doc):
self.name = name
self.featureId = featureId
self.doc = doc
self.enums = []
self.enumsByName = {}
self.multisets = []
self.multisetsByName = {}
self.cmds = []
self.cmdsById = {} #only for real feature, empty for project
self.cmdsByName = {} #only for real feature, empty for project
self.evts = []
self.evtsById = {}
self.evtsByName = {}
self.classes = None #only for project conversion
self.classesById = {} #only for project conversion
self.classesByName = {} #only for project conversion
def getMsgs (self):
return self.cmds + self.evts
def getMsgsById (self):
#only for feature
return dict(self.cmdsById, **self.evtsById)
def getMsgsByName (self):
#only for feature
return dict(self.cmdsByName, **self.evtsByName)
def __repr__(self):
return ("{name='%s', featureId=%d, doc='%s', enums='%s', "
"multisets='%s', cmds='%s', evts='%s'}" % (
self.name,
self.featureId,
repr(self.doc),
pprint.pformat(self.enums),
pprint.pformat(self.multisets),
pprint.pformat(self.cmds),
pprint.pformat(self.evts)))
@staticmethod
def from_project(prj):
ftrObj = ArFeature (prj.name, prj.projectId, prj.doc)
ftrObj.classes = prj.classes
ftrObj.classesById = prj.classesById
ftrObj.classesByName = prj.classesByName
for cl in prj.classes:
for cmd in cl.cmds:
msgId = cmd.cmdId
msgName = cmd.name
if "event" in cl.name.lower() or "state" in cl.name.lower():
msgObj = ArEvt(msgName, msgId, cmd.doc, cmd.listType,
cmd.bufferType, cmd.timeoutPolicy, cmd.content,
cmd.isDeprecated, ftrObj)
else:
msgObj = ArCmd(msgName, msgId, cmd.doc, cmd.listType,
cmd.bufferType, cmd.timeoutPolicy, cmd.content,
cmd.isDeprecated, ftrObj)
if cmd.listType == ArCmdListType.MAP:
msgObj.mapKey = cmd.args[0]
msgObj.cls = cl
msgObj.args = cmd.args
msgObj.argsByName = cmd.argsByName
# Create enums
for arg in msgObj.args:
if len(arg.enums) > 0:
enumName = cl.name + '_' +\
cmd.name[0].upper()+cmd.name[1:]+'_' +\
arg.name[0].upper()+arg.name[1:]
enumObj = ArEnum(enumName, arg.doc)
enumObj.msg = msgObj
for val in arg.enums:
eValObj = ArEnumValue(val.name, val.value, val.doc)
enumObj.values.append(eValObj)
enumObj.valuesByName[val.name] = eValObj
ftrObj.enums.append(enumObj)
ftrObj.enumsByName[enumName] = enumObj
arg.argType = enumObj
arg.doc = ''
if isinstance(msgObj, ArCmd):
ftrObj.cmds.append(msgObj)
else:
ftrObj.evts.append(msgObj)
return ftrObj
#===============================================================================
#===============================================================================
class ArClass(object):
def __init__(self, name, classId, doc):
self.name = name
self.classId = classId
self.doc = doc
self.cmds = []
self.cmdsById = {}
self.cmdsByName = {}
def __repr__(self):
return ("{name='%s', classId=%d, doc='%s', cmds=%s}" % (
self.name,
self.classId,
repr(self.doc),
pprint.pformat(self.cmds)))
#===============================================================================
#===============================================================================
class ArMsg(object):
def __init__(self, name, cmdId, doc, listType, bufferType, timeoutPolicy,
content, isDeprecated, ftr):
self.name = name
self.cmdId = cmdId
self.doc = doc
self.listType = listType
self.bufferType = bufferType
self.timeoutPolicy = timeoutPolicy
self.content = content
self.mapKey = None
self.args = []
self.argsByName = {}
self.cls = None #only for project conversion
self.isDeprecated = isDeprecated
self.ftr = ftr
def __repr__(self):
return ("{name='%s', cmdId=%d, doc='%s', listType='%s', "
"bufferType='%s', timeoutPolicy='%s', content='%s', "
"args=%s isDeprecated=%r}" % (
self.name,
self.cmdId,
repr(self.doc),
ArCmdListType.TO_STRING[self.listType],
ArCmdBufferType.TO_STRING[self.bufferType],
ArCmdTimeoutPolicy.TO_STRING[self.timeoutPolicy],
ArCmdContent.TO_STRING[self.content],
pprint.pformat(self.args),
self.isDeprecated))
#===============================================================================
#===============================================================================
class ArCmd(ArMsg):
def __init__(self, name, cmdId, doc, listType, bufferType, timeoutPolicy,
content, isDeprecated, ftr):
ArMsg.__init__(self, name, cmdId, doc, listType, bufferType,
timeoutPolicy, content, isDeprecated, ftr)
#===============================================================================
#===============================================================================
class ArEvt(ArMsg):
def __init__(self, name, cmdId, doc, listType, bufferType, timeoutPolicy,
content, isDeprecated, ftr):
ArMsg.__init__(self, name, cmdId, doc, listType, bufferType,
timeoutPolicy, content, isDeprecated, ftr)
#===============================================================================
#===============================================================================
class ArComment(object):
def __init__(self, title, desc, support, triggered, result):
self.title = title
self.desc = desc
self.support = support
self.triggered = triggered
self.result = result
def __repr__(self):
return ("{title='%s', desc=%s, support='%s', triggered='%s', "
"result='%s'}" % (
self.title,
self.desc,
self.support,
self.triggered,
self.result))
#===============================================================================
#===============================================================================
class ArArg(object):
def __init__(self, name, argType, doc):
self.name = name
self.argType = argType
self.doc = doc
self.enums = []
self.enumsByName = {}
def __repr__(self):
if isinstance(self.argType, str):
argTypeRep = ArArgType.TO_STRING[self.argType]
else:
argTypeRep = pprint.pformat(self.argType)
return ("{name='%s', argType='%s', doc='%s', enums=%s}" % (
self.name,
argTypeRep,
repr(self.doc),
pprint.pformat(self.enums)))
#===============================================================================
#===============================================================================
class ArMultiSetting(object):
def __init__(self, name, doc):
self.name = name
self.doc = doc
self.links = []
self.msgs = []
def __repr__(self):
return ("{name='%s', doc='%s', msgs=%s}" % (
self.name,
repr(self.doc),
pprint.pformat(self.msgs)))
#===============================================================================
#===============================================================================
class ArEnumValue(object):
def __init__(self, name, value, doc):
self.name = name
self.doc = doc
self.value = value
def __cmp__(self, other):
return cmp(self.value, other.value)
def __repr__(self):
return ("{name='%s', value=%d, doc='%s'}" % (
self.name,
self.value,
repr(self.doc)))
#===============================================================================
#===============================================================================
class ArEnum(object):
def __init__(self, name, doc):
self.name = name
self.doc = doc
self.values = []
self.valuesByName = {}
self.usedLikeBitfield = False
self.msg = None #only for project conversion
def getMaxBitfieldVal(self):
return 2 ** max(self.values).value
def __repr__(self):
return ("{name='%s', doc='%s', values='%s'}" % (
self.name,
repr(self.doc),
pprint.pformat(self.values)))
#===============================================================================
#===============================================================================
class ArBitfield(object):
TYPE_TO_LENGTH = {ArArgType.U8:2**7, ArArgType.U16:2**15, ArArgType.U32:2**31, ArArgType.U64:2**63}
def __init__(self, enum, btfType):
self.enum = enum
self.btfType = btfType
def __repr__(self):
return ("{enum='%s', type='%s'}" % (
pprint.pformat(self.enum),
pprint.pformat(self.btfType)))
#===============================================================================
#===============================================================================
def _get_node_content(node):
try:
content = node.childNodes[0].nodeValue.strip()
lines = [l.strip() for l in content.split('\n')]
return '\n'.join(lines)
except:
return ''
#===============================================================================
#===============================================================================
def _parse_project_node(filePath, projectNode, projectObj):
for classNode in projectNode.getElementsByTagName("class"):
className = classNode.getAttribute("name")
classId = int(classNode.getAttribute("id"))
classDoc = _get_node_content(classNode).strip()
# Check class id/name
if classId in projectObj.classesById:
raise ArParserError("%s: Duplicate class id %d" % (
filePath, classId))
if className in projectObj.classesByName:
raise ArParserError("%s: Duplicate class name '%s'" % (
filePath, className))
if classId < _MIN_CLASS_ID or classId > _MAX_CLASS_ID:
raise ArParserError("%s: Invalid class id %d" % (
filePath, classId))
# Create class object
classObj = ArClass(className, classId, classDoc)
projectObj.classes.append(classObj)
projectObj.classesById[classId] = classObj
projectObj.classesByName[className] = classObj
# Parse class node
_parse_class_node(filePath, classNode, classObj)
#===============================================================================
#===============================================================================
def _parse_feature_node(ctx, filePath, featureNode, featureObj):
for enumsNode in featureNode.getElementsByTagName("enums"):
for enumNode in enumsNode.getElementsByTagName("enum"):
enumName = enumNode.getAttribute("name")
enumDoc = _get_node_content(enumNode).strip()
# Check enum name
if enumName in featureObj.enumsByName:
raise ArParserError("%s: Duplicate enum name '%s'" % (
filePath, enumName))
# Create enum object
enumObj = ArEnum(enumName, enumDoc)
featureObj.enums.append(enumObj)
featureObj.enumsByName[enumName] = enumObj
# Parse enum node
_parse_enum_node(filePath, enumNode, enumObj)
_parse_feature_node_multisets(ctx, filePath, featureNode, featureObj)
_parse_feature_node_msgs(ctx, filePath, featureNode, featureObj)
#===============================================================================
#===============================================================================
def _parse_feature_node_multisets(ctx, filePath, featureNode, featureObj):
for multisetsNode in featureNode.getElementsByTagName("multisettings"):
for multisetNode in multisetsNode.getElementsByTagName("multisetting"):
multisetName = multisetNode.getAttribute("name")
multisetDoc = _get_node_content(multisetNode).strip()
# Check multiset name
if multisetName in featureObj.multisetsByName:
raise ArParserError("%s: Duplicate multiset name '%s'" % (
filePath, multisetName))
# Create multiset object
multisetObj = ArMultiSetting(multisetName, multisetDoc)
featureObj.multisets.append(multisetObj)
featureObj.multisetsByName[multisetName] = multisetObj
# Parse multiset node
_parse_multiset_node(filePath, multisetNode, multisetObj)
#===============================================================================
#===============================================================================
def _parse_multiset_node(filePath, multisetNode, multisetObj):
for memberNode in multisetNode.getElementsByTagName("member"):
multisetObj.links.append(memberNode.getAttribute("link"))
#===============================================================================
#===============================================================================
def _parse_feature_node_msgs(ctx, filePath, featureNode, featureObj):
for msgsNode in featureNode.getElementsByTagName("msgs"):
for msgNode in msgsNode.getElementsByTagName("cmd") + \
msgsNode.getElementsByTagName("evt"):
msgName = msgNode.getAttribute("name")
msgId = int(msgNode.getAttribute("id"))
msgDoc = _get_node_content(msgNode).strip()
if msgId < _MIN_CMD_ID or msgId > _MAX_CMD_ID:
raise ArParserError("%s: Invalid msg id %d" % (
filePath, msgId))
# Check msg name
if msgName in featureObj.getMsgsByName():
raise ArParserError("%s: Duplicate message name '%s'" % (
filePath, msgName))
# Check msg id
if msgId in featureObj.getMsgsById():
raise ArParserError("%s: Duplicate message id '%s'" % (
filePath, msgName))
# Get type
msgType = ArCmdListType.NONE
mapKey = None
if msgNode.hasAttribute("type"):
attr, _, mapKey = msgNode.getAttribute("type").partition(':')
if attr not in ArCmdListType.FROM_STRING:
raise ArParserError("%s: Invalid list type '%s'" % (
filePath, attr))
msgType = ArCmdListType.FROM_STRING[attr]
# Get buffer type
msgBufferType = ArCmdBufferType.ACK
if msgNode.hasAttribute("buffer"):
attr = msgNode.getAttribute("buffer")
if attr not in ArCmdBufferType.FROM_STRING:
raise ArParserError("%s: Invalid buffer type '%s'" % (
filePath, attr))
msgBufferType = ArCmdBufferType.FROM_STRING[attr]
# Get timeout policy
msgTimeoutPolicy = ArCmdTimeoutPolicy.POP
if msgNode.hasAttribute("timeout"):
attr = msgNode.getAttribute("timeout")
if attr not in ArCmdTimeoutPolicy.FROM_STRING:
raise ArParserError("%s: Invalid timout policy '%s'" % (
filePath, attr))
msgTimeoutPolicy = ArCmdTimeoutPolicy.FROM_STRING[attr]
# Get Content
msgContent = ArCmdContent.UPDATE
if msgNode.hasAttribute("content"):
attr = msgNode.getAttribute("content")
if attr not in ArCmdContent.FROM_STRING:
raise ArParserError("%s: Invalid notification '%s'" % (
filePath, attr))
msgContent = ArCmdContent.FROM_STRING[attr]
# Get if the message is deprecated
mgsIsDeprecated = False
if msgNode.hasAttribute("deprecated"):
attr = msgNode.getAttribute("deprecated")
if attr not in ArCmdDeprecation.FROM_STRING:
mgsIsDeprecated = ArCmdDeprecation.FROM_STRING[attr]
# Create msg object
if msgNode in msgsNode.getElementsByTagName("cmd"):
#is command
msgObj = ArCmd (msgName, msgId, msgDoc,
msgType, msgBufferType, msgTimeoutPolicy, msgContent,
mgsIsDeprecated, featureObj)
else:
#is event
msgObj = ArEvt(msgName, msgId, msgDoc,
msgType, msgBufferType, msgTimeoutPolicy, msgContent,
mgsIsDeprecated, featureObj)
# Parse msg node
_parse_msg_node(ctx, filePath, featureObj, msgNode, msgObj)
# Find map key
if mapKey :
if mapKey not in msgObj.argsByName:
raise ArParserError("%s: Invalid Map Key '%s'" % (
filePath, mapKey))
msgObj.mapKey = msgObj.argsByName[mapKey]
if isinstance(msgObj, ArCmd):
featureObj.cmds.append(msgObj)
featureObj.cmdsById[msgId] = msgObj
featureObj.cmdsByName[msgName] = msgObj
else:
featureObj.evts.append(msgObj)
featureObj.evtsById[msgId] = msgObj
featureObj.evtsByName[msgName] = msgObj
#===============================================================================
#===============================================================================
def _parse_class_node(filePath, classNode, classObj):
for cmdNode in classNode.getElementsByTagName("cmd"):
cmdName = cmdNode.getAttribute("name")
cmdId = int(cmdNode.getAttribute("id"))
cmdDoc = _get_cmt_node(cmdNode)
if cmdId < _MIN_CMD_ID or cmdId > _MAX_CMD_ID:
raise ArParserError("%s: Invalid cmd id %d" % (
filePath, cmdId))
# Get list type
cmdListType = ArCmdListType.NONE
if cmdNode.hasAttribute("type"):
attr = cmdNode.getAttribute("type")
if attr not in ArCmdListType.FROM_STRING:
raise ArParserError("%s: Invalid list type '%s'" % (
filePath, attr))
cmdListType = ArCmdListType.FROM_STRING[attr]
# Get buffer type
cmdBufferType = ArCmdBufferType.ACK
if cmdNode.hasAttribute("buffer"):
attr = cmdNode.getAttribute("buffer")
if attr not in ArCmdBufferType.FROM_STRING:
raise ArParserError("%s: Invalid buffer type '%s'" % (
filePath, attr))
cmdBufferType = ArCmdBufferType.FROM_STRING[attr]
# Get timeout policy
cmdTimeoutPolicy = ArCmdTimeoutPolicy.POP
if cmdNode.hasAttribute("timeout"):
attr = cmdNode.getAttribute("timeout")
if attr not in ArCmdTimeoutPolicy.FROM_STRING:
raise ArParserError("%s: Invalid timout policy '%s'" % (
filePath, attr))
cmdTimeoutPolicy = ArCmdTimeoutPolicy.FROM_STRING[attr]
# Check cmd name
if cmdName in classObj.cmdsByName:
raise ArParserError("%s: Duplicate cmd name '%s'" % (
filePath, cmdName))
# Get cmd Content
cmdContent = ArCmdContent.UPDATE
if cmdNode.hasAttribute("content"):
attr = cmdNode.getAttribute("content")
if attr not in ArCmdContent.FROM_STRING:
raise ArParserError("%s: Invalid notification '%s'" % (
filePath, attr))
cmdContent = ArCmdContent.FROM_STRING[attr]
# Get if the message is deprecated
mgsIsDeprecated = False
if cmdNode.hasAttribute("deprecated"):
attr = cmdNode.getAttribute("deprecated")
if attr == "true":
mgsIsDeprecated = True
# Create cmd object
cmdObj = ArCmd(cmdName, cmdId, cmdDoc, cmdListType, cmdBufferType,
cmdTimeoutPolicy, cmdContent, mgsIsDeprecated, None)
cmdObj.cls = classObj
classObj.cmds.append(cmdObj)
classObj.cmdsById[cmdId] = cmdObj
classObj.cmdsByName[cmdName] = cmdObj
# Parse cmd node
_parse_prj_cmd_node(filePath, cmdNode, cmdObj)
#===============================================================================
#===============================================================================
def _parse_prj_cmd_node(filePath, cmdNode, cmdObj):
for argNode in cmdNode.getElementsByTagName("arg"):
argName = argNode.getAttribute("name")
argDoc = _get_node_content(argNode).strip()
# Arg type
attr = argNode.getAttribute("type")
if attr not in ArArgType.FROM_STRING:
raise ArParserError("%s: Invalid arg type '%s'" % (
filePath, attr))
argType = ArArgType.FROM_STRING[attr]
# Check arg name
if argName in cmdObj.argsByName:
raise ArParserError("%s: Duplicate arg name '%s'" % (
filePath, argName))
# Create arg object
argObj = ArArg(argName, argType, argDoc)
cmdObj.args.append(argObj)
cmdObj.argsByName[argName] = argObj
# Parse arg node
_parse_arg_node(filePath, argNode, argObj)
def _fmt_cmt_node(raw_cmt):
one_line = ' '.join(raw_cmt.split())
lines = [l.strip() for l in one_line.split(r'\n')]
res = '\n'.join(lines)
return res
def _get_cmt_node(msgNode):
if msgNode.getElementsByTagName("comment"):
commentNode = msgNode.getElementsByTagName("comment")[0]
cmtTitle = commentNode.getAttribute("title")
cmtSupport = commentNode.getAttribute("support")
cmtDesc = _fmt_cmt_node(commentNode.getAttribute("desc"))
if commentNode.hasAttribute("triggered"):
cmtTriggered = _fmt_cmt_node(commentNode.getAttribute("triggered"))
else:
cmtTriggered = None
if commentNode.hasAttribute("result"):
cmtResult = _fmt_cmt_node(commentNode.getAttribute("result"))
else:
cmtResult = None
# Create comment object
return ArComment(cmtTitle, cmtDesc, cmtSupport,
cmtTriggered, cmtResult)
else:
oldComment = _get_node_content(msgNode)
return ArComment(oldComment.splitlines()[0], oldComment, None,
None, None)
#===============================================================================
#===============================================================================
def _parse_msg_node(ctx, filePath, ftr, msgNode, msgObj):
if msgNode.getElementsByTagName("comment"):
commentNode = msgNode.getElementsByTagName("comment")[0]
cmtTitle = commentNode.getAttribute("title")
cmtSupport = commentNode.getAttribute("support")
cmtDesc = _fmt_cmt_node(commentNode.getAttribute("desc"))
if commentNode.hasAttribute("triggered"):
cmtTriggered = _fmt_cmt_node(commentNode.getAttribute("triggered"))
else:
cmtTriggered = None
if commentNode.hasAttribute("result"):
cmtResult = _fmt_cmt_node(commentNode.getAttribute("result"))
else:
cmtResult = None
# Create comment object
msgObj.doc = ArComment(cmtTitle, cmtDesc, cmtSupport,
cmtTriggered, cmtResult)
else:
oldComment = _get_node_content(msgNode)
msgObj.doc = ArComment(oldComment.splitlines()[0], oldComment, None,
None, None)
_parse_msg_node_args(ctx, filePath, ftr, msgNode, msgObj)
#===============================================================================
#===============================================================================
def _parse_msg_node_args(ctx, filePath, ftr, msgNode, msgObj):
for argNode in msgNode.getElementsByTagName("arg"):
argName = argNode.getAttribute("name")
argDoc = _get_node_content(argNode).strip()
# Get type attrs
attr1, _, flw = argNode.getAttribute("type").partition(':')
attr2, _, attr3 = flw.partition(':')
# Check arg type
if attr1 not in ArArgType.FROM_STRING:
raise ArParserError("%s: Invalid arg type '%s'" % (
filePath, attr1))
if ArArgType.FROM_STRING[attr1] == ArArgType.ENUM:
# Find Enum
if attr2 not in ftr.enumsByName and \
(_FTR_GEN not in ctx.featuresByName or \
attr2 not in ctx.featuresByName[_FTR_GEN].enumsByName):
raise ArParserError("%s: Invalid enum arg type '%s'" % (
filePath, attr2))
if attr2 in ftr.enumsByName:
argType = ftr.enumsByName[attr2]
else:
argType = ctx.featuresByName[_FTR_GEN].enumsByName[attr2]
elif ArArgType.FROM_STRING[attr1] == ArArgType.BITFIELD:
# Find Enum
if attr3 not in ftr.enumsByName and \
(_FTR_GEN not in ctx.featuresByName or \
attr3 not in ctx.featuresByName[_FTR_GEN].enumsByName):
raise ArParserError("%s: Invalid bitfield enum arg type '%s'"
% (filePath, attr3))
# Check bitfield length
if attr2 not in ArArgType.FROM_STRING and \
ArArgType.FROM_STRING[attr2] in ArBitfield.TYPE_TO_LENGTH:
raise ArParserError("%s: Invalid bitfield enum arg length '%s'"
% (filePath, attr2))
if attr3 in ftr.enumsByName:
btfEnum = ftr.enumsByName[attr3]
else:
btfEnum = ctx.featuresByName[_FTR_GEN].enumsByName[attr3]
btfType = ArArgType.FROM_STRING[attr2]
# Check Compatibility between Enum max value and bitfield length
if ArBitfield.TYPE_TO_LENGTH[btfType] < btfEnum.getMaxBitfieldVal():
raise ArParserError("%s: Too Small bitfield length '%s.%s'"
% (filePath, msgObj.name, argName))
argType = ArBitfield(btfEnum, btfType)
btfEnum.usedLikeBitfield = True
elif ArArgType.FROM_STRING[attr1] == ArArgType.MULTISETTING:
# Find multi setting
if attr2 not in ftr.multisetsByName and \
(_FTR_GEN not in ctx.featuresByName or \
attr2 not in ctx.featuresByName[_FTR_GEN].multisetsByName):
raise ArParserError("%s: Invalid multisetting arg type '%s'"
% (filePath, attr2))
if attr2 in ftr.multisetsByName:
argType = ftr.multisetsByName[attr2]
else:
argType = ctx.featuresByName[_FTR_GEN].multisetsByName[attr2]
else:
argType = ArArgType.FROM_STRING[attr1]
# Check arg name
if argName in msgObj.argsByName:
raise ArParserError("%s: Duplicate arg name '%s'" % (
filePath, argName))
# Create arg object
argObj = ArArg(argName, argType, argDoc)
msgObj.args.append(argObj)
msgObj.argsByName[argName] = argObj
# Parse arg node
_parse_arg_node(filePath, argNode, argObj)
#===============================================================================
#===============================================================================
def _parse_arg_node(filePath, argNode, argObj):
nextValue = 0
for enumNode in argNode.getElementsByTagName("enum"):
enumName = enumNode.getAttribute("name")
enumDoc = _get_node_content(enumNode).strip()
enumValue = nextValue
nextValue += 1
# Check enum name
if enumName in argObj.enumsByName:
raise ArParserError("%s: Duplicate enum name '%s'" % (
filePath, enumName))
# Create enum object
enumObj = ArEnumValue(enumName, enumValue, enumDoc)
argObj.enums.append(enumObj)
argObj.enumsByName[enumName] = enumObj
#===============================================================================
#===============================================================================
def _parse_enum_node(filePath, enumNode, enumObj):
nextValue = 0
for eValNode in enumNode.getElementsByTagName("value"):
eValName = eValNode.getAttribute("name")
eValDoc = _get_node_content(eValNode).strip()
if eValNode.hasAttribute("val"):
eValVal = int(eValNode.getAttribute("val"))
else:
eValVal = nextValue
nextValue += 1
nextValue = eValVal + 1
# Check enum value name
if eValName in enumObj.valuesByName:
raise ArParserError("%s: Duplicate enum value name '%s'" % (
filePath, eValName))
# Create enum value object
eValObj = ArEnumValue(eValName, eValVal, eValDoc)
enumObj.values.append(eValObj)
enumObj.valuesByName[eValName] = eValObj
#===============================================================================
#===============================================================================
def parse_prj_xml(ctx, filePath):
# Parse project xml file
try:
xmlDom = xml.dom.minidom.parse(filePath)
except Exception as ex:
raise ArParserError("Error while loading '%s': %s" % (
filePath, str(ex)))
# Get project node
projectNode = xmlDom.documentElement
if projectNode.tagName != "project":
raise ArParserError("%s: Bad root element: '%s'" % (
filePath, projectNode.tagName))
projectName = projectNode.getAttribute("name")
projectId = int(projectNode.getAttribute("id"))
projectDoc = _get_node_content(projectNode).strip()
# Check project id/name
if projectId in ctx.projectsById:
raise ArParserError("%s: Duplicate project id %d" % (
filePath, projectId))
if projectId < _MIN_PROJECT_ID or projectId > _MAX_PROJECT_ID:
raise ArParserError("%s: Invalid project id %d" % (
filePath, projectId))
if projectName in ctx.projectsByName:
raise ArParserError("%s: Duplicate project name '%s'" % (
filePath, projectName))
# Create project object
projectObj = ArProject(projectName, projectId, projectDoc)
ctx.projects.append(projectObj)
ctx.projectsById[projectId] = projectObj
ctx.projectsByName[projectName] = projectObj
# Parse project node
_parse_project_node(filePath, projectNode, projectObj)
# Convert project to feature object
featureObj = ArFeature.from_project(projectObj)
ctx.features.append(featureObj)
ctx.featuresById[featureObj.featureId] = featureObj
ctx.featuresByName[featureObj.name] = featureObj
#===============================================================================
#===============================================================================
def parse_ftr_xml(ctx, filePath):
# Parse feature xml file
try:
xmlDom = xml.dom.minidom.parse(filePath)
except Exception as ex:
raise ArParserError("Error while loading '%s': %s" % (
filePath, str(ex)))
# Get feature node
featureNode = xmlDom.documentElement
if featureNode.tagName != "feature":
raise ArParserError("%s: Bad root element: '%s'" % (
filePath, featureNode.tagName))
featureName = featureNode.getAttribute("name")
featureId = int(featureNode.getAttribute("id"))
featureDoc = _get_node_content(featureNode).strip()
# Check feature id/name
if featureId in ctx.featuresById:
raise ArParserError("%s: Duplicate feature id %d" % (
filePath, featureId))
if featureId < _MIN_FEATURE_ID or featureId > _MAX_FEATURE_ID:
raise ArParserError("%s: Invalid feature id %d" % (
filePath, featureId))
if featureName in ctx.featuresByName:
raise ArParserError("%s: Duplicate feature name '%s'" % (
filePath, featureName))
# Create feature object
featureObj = ArFeature(featureName, featureId, featureDoc)
ctx.features.append(featureObj)
ctx.featuresById[featureId] = featureObj
ctx.featuresByName[featureName] = featureObj
# Parse feature node
_parse_feature_node(ctx, filePath, featureNode, featureObj)
#===============================================================================
#===============================================================================
def _link_to_msg(ctx, link):
parts = link.split(".")
if len(parts) < 2:
return None
if not parts[0] in ctx.featuresByName:
return None
ftr = ctx.featuresByName[parts[0]]
if len(parts) == 2:
return ftr.getMsgsByName[parts[1]]
# Project part
clsName = parts[1]
cmdName = parts[2]
for cmd in ftr.cmds + ftr.evts:
if cmd.name == cmdName and cmd.cls and cmd.cls.name == clsName:
return cmd
return None
#===============================================================================
#===============================================================================
def finalize_ftrs(ctx):
# Finalize features
for ftr in ctx.features:
# Finalize multi settings
for multiset in ftr.multisets:
for link in multiset.links:
msg = _link_to_msg(ctx, link)
if not msg:
raise ArParserError("%s: Bad multisetting link '%s'" % (
filePath, link))
multiset.msgs.append(msg)
#===============================================================================
#===============================================================================
def parse_xml(ctx, filePath):
# Parse xml file
try:
xmlDom = xml.dom.minidom.parse(filePath)
except Exception as ex:
raise ArParserError("Error while loading '%s': %s" % (
filePath, str(ex)))
# Get feature node
node = xmlDom.documentElement
if node.tagName == "feature":
parse_ftr_xml(ctx, filePath)
elif node.tagName == "project":
parse_prj_xml(ctx, filePath)
#===============================================================================
#===============================================================================
def main():
ctx = ArParserCtx()
path, filename = os.path.split(os.path.realpath(__file__))
path = os.path.join(path, "xml")
# first load generic.xml
parse_xml(ctx, os.path.join(path, "generic.xml"))
for f in sorted(os.listdir(path)):
if not f.endswith(".xml") or f == "generic.xml":
continue
parse_xml(ctx, os.path.join(path, f))
# Finalize MultiSettings
finalize_ftrs(ctx)
#for prj in ctx.projects:
# print prj
# print '\n'
#for f in ctx.features:
# print f
# print '\n'
#===============================================================================
#===============================================================================
if __name__ == "__main__":
main()
|
2016-Capstone/PythonController
|
arsdk-xml/arsdkparser.py
|
Python
|
bsd-3-clause
| 41,476
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/messages/use_item_move_reroll_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pogoprotos.inventory.item import item_id_pb2 as pogoprotos_dot_inventory_dot_item_dot_item__id__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/messages/use_item_move_reroll_message.proto',
package='pogoprotos.networking.requests.messages',
syntax='proto3',
serialized_pb=_b('\nJpogoprotos/networking/requests/messages/use_item_move_reroll_message.proto\x12\'pogoprotos.networking.requests.messages\x1a\'pogoprotos/inventory/item/item_id.proto\"b\n\x18UseItemMoveRerollMessage\x12\x32\n\x07item_id\x18\x01 \x01(\x0e\x32!.pogoprotos.inventory.item.ItemId\x12\x12\n\npokemon_id\x18\x02 \x01(\x06\x62\x06proto3')
,
dependencies=[pogoprotos_dot_inventory_dot_item_dot_item__id__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_USEITEMMOVEREROLLMESSAGE = _descriptor.Descriptor(
name='UseItemMoveRerollMessage',
full_name='pogoprotos.networking.requests.messages.UseItemMoveRerollMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item_id', full_name='pogoprotos.networking.requests.messages.UseItemMoveRerollMessage.item_id', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pokemon_id', full_name='pogoprotos.networking.requests.messages.UseItemMoveRerollMessage.pokemon_id', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=160,
serialized_end=258,
)
_USEITEMMOVEREROLLMESSAGE.fields_by_name['item_id'].enum_type = pogoprotos_dot_inventory_dot_item_dot_item__id__pb2._ITEMID
DESCRIPTOR.message_types_by_name['UseItemMoveRerollMessage'] = _USEITEMMOVEREROLLMESSAGE
UseItemMoveRerollMessage = _reflection.GeneratedProtocolMessageType('UseItemMoveRerollMessage', (_message.Message,), dict(
DESCRIPTOR = _USEITEMMOVEREROLLMESSAGE,
__module__ = 'pogoprotos.networking.requests.messages.use_item_move_reroll_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.UseItemMoveRerollMessage)
))
_sym_db.RegisterMessage(UseItemMoveRerollMessage)
# @@protoc_insertion_point(module_scope)
|
bellowsj/aiopogo
|
aiopogo/pogoprotos/networking/requests/messages/use_item_move_reroll_message_pb2.py
|
Python
|
mit
| 3,241
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# ============================================================================
# Project Name : iTrade
# Module Name : itrade_quotes_hong-kong.py
#
# Description: List of quotes from http://www.hkex.com.hk/
# The Original Code is iTrade code (http://itrade.sourceforge.net).
#
# The Initial Developer of the Original Code is Gilles Dumortier.
# New code for HKEX is from Michel Legrand.
# Portions created by the Initial Developer are Copyright (C) 2004-2007 the
# Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see http://www.gnu.org/licenses/gpl.html
#
# History Rev Description
# 2007-05-15 dgil Wrote it from scratch
# ============================================================================
# ============================================================================
# Imports
# ============================================================================
# python system
from __future__ import print_function
import logging
import string
# iTrade system
import itrade_config
import itrade_excel
from itrade_logging import setLevel, info
from itrade_defs import QList, QTag
from itrade_ext import gListSymbolRegistry
from itrade_connection import ITradeConnection
# ============================================================================
# Import_ListOfQuotes_HKG()
# ============================================================================
def Import_ListOfQuotes_HKG(quotes,market='HONG KONG EXCHANGE',dlg=None,x=0):
if itrade_config.verbose:
print('Update %s list of symbols' % market)
connection=ITradeConnection(cookies=None,
proxy=itrade_config.proxyHostname,
proxyAuth=itrade_config.proxyAuthentication)
import xlrd
if market=='HONG KONG EXCHANGE':
# Two urls for download list of HONG KONG EXCHANGE
urls = ['https://www.hkex.com.hk/eng/market/sec_tradinfo/isincode/documents/isino.xls',
'https://www.hkex.com.hk/eng/market/sec_tradinfo/isincode/documents/isinsehk.xls']
n = 0
else:
return False
def splitLines(buf):
lines = string.split(buf, '\n')
lines = filter(lambda x:x, lines)
def removeCarriage(s):
if s[-1]=='\r':
return s[:-1]
else:
return s
lines = [removeCarriage(l) for l in lines]
return lines
for url in urls:
info('Import_ListOfQuotes_HKG_%s:connect to %s' % (market,url))
try:
data = connection.getDataFromUrl(url)
except:
info('Import_ListOfQuotes_HKG_%s:unable to connect :-(' % market)
return False
# returns the data
book = itrade_excel.open_excel(file=None,content=data)
sh = book.sheet_by_index(0)
#print 'Import_ListOfQuotes_HKG_%s:' % market,'book',book,'sheet',sh,'nrows=',sh.nrows
for line in range(sh.nrows):
if sh.cell_type(line,1) != xlrd.XL_CELL_EMPTY:
if sh.cell_value(line,3) in ('ORD SH','PREF SH','TRT','RTS'):
isin=sh.cell_value(line,1)
ticker = sh.cell_value(line,2)
if type(ticker)==float: ticker=int(ticker);ticker='%s' % ticker
if len(ticker) == 1 : ticker='000'+ticker
if len(ticker) == 2 : ticker='00'+ticker
if len(ticker) == 3 : ticker='0'+ticker
name = sh.cell_value(line,0)
if ticker == '0657':
name = 'G-VISION INTERNATIONAL (HOLDINGS) LTD'
name = name.decode().encode('utf8')
name = name.replace(',',' ')
currency='HKD'
place='HKG'
country='HK'
quotes.addQuote(isin = isin,name = name,ticker = ticker,market = 'HONG KONG EXCHANGE',currency = currency,place = place,country = country)
n = n + 1
if itrade_config.verbose:
print('Imported %d lines from %s ' % (n,market))
return True
# ============================================================================
# Export me
# ============================================================================
if itrade_excel.canReadExcel:
gListSymbolRegistry.register('HONG KONG EXCHANGE','HKG',QList.any,QTag.list,Import_ListOfQuotes_HKG)
# ============================================================================
# Test ME
# ============================================================================
if __name__ == '__main__':
setLevel(logging.INFO)
from itrade_quotes import quotes
if itrade_excel.canReadExcel:
Import_ListOfQuotes_HKG(quotes,'HKG')
quotes.saveListOfQuotes()
else:
print('XLRD package not installed :-(')
# ============================================================================
# That's all folks !
# ============================================================================
|
eternallyBaffled/itrade
|
ext/itrade_quotes_hong-kong.py
|
Python
|
gpl-3.0
| 5,840
|
from suds.client import Client
from suds.xsd.doctor import ImportDoctor, Import
import difflib
class Status(object):
AWAITING_COMPILATION = -1
DONE = 0
COMPILING = 1
RUNNING = 3
class Result(object):
NOT_RUN = 0
COMPILATION_ERROR = 11
RUNTIME_ERROR = 12
TIME_LIMIT_EXCEEDED = 13
SUCCESS = 15
MEMORY_LIMIT_EXCEEDED = 17
ILLEGAL_SYSTEM_CALL = 19
INTERNAL_ERROR = 20
class IdeoneError(Exception):
pass
class Ideone(object):
ERROR_OK = 'OK'
def __init__(self, user, password, api_url=None):
self.user = user
self.password = password
self.api_url = api_url if api_url else 'https://ideone.com/api/1/service.wsdl'
self._import = Import('http://schemas.xmlsoap.org/soap/encoding/')
self._doctor = ImportDoctor(self._import)
self.client = Client(self.api_url, doctor=self._doctor)
self._language_dict = None
@staticmethod
def _transform_to_dict(result):
"""
Transform the array from Ideone into a Python dictionary.
"""
result_dict = {}
property_list = result.item
for item in property_list:
result_dict[item.key[0]] = item.value[0]
return result_dict
@staticmethod
def _handle_error(result_dict):
"""
Raise an exception if the Ideone gave us an error.
"""
error = result_dict['error']
if error == Ideone.ERROR_OK:
return
else:
raise IdeoneError(error)
@staticmethod
def _collapse_language_array(language_array):
"""
Convert the Ideone language list into a Python dictionary.
"""
language_dict = {}
for language in language_array.item:
key = language.key[0]
value = language.value[0]
language_dict[key] = value
return language_dict
def _translate_language_name(self, language_name):
"""
Translate a human readable langauge name into its Ideone
integer representation.
Keyword Arguments
-----------------
* langauge_name: a string of the language (e.g. "c++")
Returns
-------
An integer representation of the language.
Notes
-----
We use a local cache of languages if available, else we grab
the list of languages from Ideone. We test for a string match
by comparing prefixes because Ideone includes the language
compiler name and version number. Both strings are converted
to lower case before the comparison.
Examples
--------
>>> ideone_object = Ideone('username', 'password')
>>> ideone_object._translate_language_name('ada')
7
"""
languages = self.languages()
language_id = None
# Check for exact match first including the whole version
# string
for ideone_index, ideone_language in list(languages.items()):
if ideone_language.lower() == language_name.lower():
return ideone_index
# Check for a match of just the language name without any
# version information
simple_languages = dict((k,v.split('(')[0].strip())
for (k,v) in list(languages.items()))
for ideone_index, simple_name in list(simple_languages.items()):
if simple_name.lower() == language_name.lower():
return ideone_index
# Give up, but first find a similar name, suggest it and error
# out
language_choices = list(languages.values()) + list(simple_languages.values())
similar_choices = difflib.get_close_matches(language_name,
language_choices,
n=3,
cutoff=0.3)
# Add quotes and delimit with strings for easier to read
# output
similar_choices_string = ", ".join(["'" + s + "'"
for s in similar_choices])
error_string = ("Couldn't match '%s' to an Ideone accepted language.\n"
"Did you mean one of the following: %s")
raise IdeoneError(error_string % (language_name, similar_choices_string))
def create_submission(self, source_code, language_name=None, language_id=None,
std_input="", run=True, private=False):
"""
Create a submission and upload it to Ideone.
Keyword Arguments
-----------------
* source_code: a string of the programs source code
* language_name: the human readable language string (e.g. 'python')
* language_id: the ID of the programming language
* std_input: the string to pass to the program on stdin
* run: a boolean flag to signifying if Ideone should compile and
run the program
* private: a boolean flag signifying the code is private
Returns
-------
A dictionary with the keys error and link. The link is the
unique id of the program. The URL of the submission is
http://ideone.com/LINK.
Examples
--------
>>> ideone_object = Ideone('username', 'password')
>>> ideone_object.create_submission('print(42)', language_name='python')
{'error': 'OK',
'link' : 'LsSbo'}
"""
language_id = language_id or self._translate_language_name(language_name)
result = self.client.service.createSubmission(self.user, self.password,
source_code, language_id,
std_input, run, private)
result_dict = Ideone._transform_to_dict(result)
Ideone._handle_error(result_dict)
return result_dict
def submission_status(self, link):
"""
Given the unique link of a submission, returns its current
status.
Keyword Arguments
-----------------
* link: the unique id string of a submission
Returns
-------
A dictionary of the error, the result code and the status
code.
Notes
-----
Status specifies the stage of execution.
* status < 0 means the program awaits compilation
* status == 0 means the program is done
* status == 1 means the program is being compiled
* status == 3 means the program is running
Result specifies how the program finished.
* result == 0 means not running, the program was submitted
with run=False
* result == 11 means compilation error
* result == 12 means runtime error
* result == 13 means timelimit exceeded
* result == 15 means success
* result == 17 means memory limit exceeded
* result == 19 means illegal system call
* result == 20 means Ideone internal error, submit a bug report
Examples
--------
>>> ideone_object = Ideone('username', 'password')
>>> ideone_object.submission_status('LsSbo')
{'error': 'OK',
'result': 15,
'status': 0}
"""
result = self.client.service.getSubmissionStatus(self.user, self.password, link)
result_dict = Ideone._transform_to_dict(result)
Ideone._handle_error(result_dict)
return result_dict
def submission_details(self, link, with_source=True,
with_input=True, with_output=True,
with_stderr=True, with_compilation_info=True):
"""
Return a dictionary of requested details about a submission
with the id of link.
Keyword Arguments
-----------------
* link: the unique string ID of a submission
* with_source: should we request the source code
* with_input: request the program input
* with_output: request the program output
* with_stderr: request the error output
* with_compilation_info: request compilation flags
Examples
--------
>>> ideone_object = Ideone('username', 'password')
>>> ideone_object.submission_details('LsSbo')
{'cmpinfo': ,
'date': "2011-04-18 15:24:14",
'error': "OK",
'input': "",
'langId': 116,
'langName': "Python 3",
'langVersion': "python-3.1.2",
'memory': 5852,
'output': 42,
'public': True,
'result': 15,
'signal': 0,
'source': "print(42)",
'status': 0,
'stderr': "",
'time': 0.02}
"""
result = self.client.service.getSubmissionDetails(self.user, self.password,
link,
with_source, with_input,
with_output, with_stderr,
with_compilation_info)
result_dict = Ideone._transform_to_dict(result)
Ideone._handle_error(result_dict)
return result_dict
def languages(self):
"""
Get a list of supported languages and cache it.
Examples
--------
>>> ideone_object.languages()
{'error': 'OK',
'languages': {1: "C++ (gcc-4.3.4)",
2: "Pascal (gpc) (gpc 20070904)",
...
...
...
125: "Falcon (falcon-0.9.6.6)"}}
"""
if self._language_dict is None:
result = self.client.service.getLanguages(self.user, self.password)
result_dict = Ideone._transform_to_dict(result)
Ideone._handle_error(result_dict)
languages = result_dict['languages']
result_dict['languages'] = Ideone._collapse_language_array(languages)
self._language_dict = result_dict['languages']
return self._language_dict
def test(self):
"""
A test function that always returns the same thing.
>>> ideone_object = Ideone('username', 'password')
>>> ideone_object.test_function()
{'answerToLifeAndEverything': 42,
'error': "OK",
'moreHelp': "ideone.com",
'oOok': True,
'pi': 3.14}
"""
result = self.client.service.testFunction(self.user, self.password)
result_dict = Ideone._transform_to_dict(result)
Ideone._handle_error(result_dict)
return result_dict
|
ronreiter/interactive-tutorials
|
ideone/__init__.py
|
Python
|
apache-2.0
| 10,805
|
"""
Automatically generate marking helpers functions.
"""
import sys
from .objects import Mark
class SimpleHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag):
"""
Make a simple helper.
:param color_tag: The color tag to make a helper for.
:returns: The helper function.
"""
helper = self.__helpers.get(color_tag)
if not helper:
def helper(obj):
return Mark(obj=obj, color_tag=color_tag)
helper.__name__ = color_tag
helper.__doc__ = """
Mark an object for coloration.
The color tag is set to {color_tag!r}.
:param obj: The object to mark for coloration.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.simple import {color_tag}
>>> {color_tag}(42).color_tag
['{color_tag}']
""".format(color_tag=color_tag)
self.__helpers[color_tag] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get.
>>> SimpleHelpers().alpha(42).color_tag
['alpha']
>>> getattr(SimpleHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
return self.make_helper(color_tag=name)
class ConditionalHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag_true, color_tag_false):
"""
Make a conditional helper.
:param color_tag_true: The color tag if the condition is met.
:param color_tag_false: The color tag if the condition is not met.
:returns: The helper function.
"""
helper = self.__helpers.get(
(color_tag_true, color_tag_false),
)
if not helper:
def helper(obj, condition=None):
if condition is None:
condition = obj
return Mark(
obj=obj,
color_tag=color_tag_true if condition else color_tag_false,
)
helper.__name__ = '_or_'.join((color_tag_true, color_tag_false))
helper.__doc__ = """
Convenience helper method that marks an object with the
{color_tag_true!r} color tag if `condition` is truthy, and with the
{color_tag_false!r} color tag otherwise.
:param obj: The object to mark for coloration.
:param condition: The condition to verify. If `condition` is
:const:`None`, the `obj` is evaluated instead.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.conditional import {name}
>>> {name}(42, True).color_tag
['{color_tag_true}']
>>> {name}(42, False).color_tag
['{color_tag_false}']
>>> {name}(42).color_tag
['{color_tag_true}']
>>> {name}(0).color_tag
['{color_tag_false}']
""".format(
name=helper.__name__,
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
self.__helpers[
(color_tag_true, color_tag_false),
] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get. Must be of the form
'a_or_b' where `a` and `b` are color tags.
>>> ConditionalHelpers().alpha_or_beta(42, True).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(42, False).color_tag
['beta']
>>> ConditionalHelpers().alpha_or_beta(42).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(0).color_tag
['beta']
>>> getattr(ConditionalHelpers(), 'alpha_beta', None)
>>> getattr(ConditionalHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
try:
color_tag_true, color_tag_false = name.split('_or_')
except ValueError:
raise AttributeError(name)
return self.make_helper(
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
simple = SimpleHelpers()
simple.__doc__ = """
Pseudo-module that generates simple helpers.
See :class:`SimpleHelpers<chromalog.mark.helpers.SimpleHelpers>`.
"""
conditional = ConditionalHelpers()
conditional.__doc__ = """
Pseudo-module that generates conditional helpers.
See :class:`ConditionalHelpers<chromalog.mark.helpers.ConditionalHelpers>`.
"""
sys.modules['.'.join([__name__, 'simple'])] = simple
sys.modules['.'.join([__name__, 'conditional'])] = conditional
|
freelan-developers/chromalog
|
chromalog/mark/helpers.py
|
Python
|
mit
| 5,224
|
#===========================================================================================================================
# aims : This script takes the frame file in input and gives it back adding the context to each frame
#
# input : input_filepath : folder containing subjects' cepstra. It takes the subject_path set in 1_main_process_subjects.py
#
# output_filepath: folder where it writes back the cepstra with context
# Nframes: number of context frames added to both sides of each single frame. It takes the ctx_frames set in 1_main_process_subjects.py
#
# return : void
#===========================================================================================================================
import os
import re
import numpy as np
from numpy import genfromtxt
from . import utilities
def create_context_file(input_filepath, output_filepath, Nframes):
f = open(input_filepath, 'rb')
lines = f.readlines()
count_lines = len(lines) # str(len(lines))
ctx_file = open(output_filepath, "wb")
# TEST DATASET
startSent = 0 # start line command
endSent = int(count_lines) -1 # end line command
data = genfromtxt(input_filepath)
Newdata = np.zeros([data.shape[0], data.shape[1] * ((2 * Nframes) + 1)])
indexData1 = startSent
indexData2 = endSent
# first frame of the statement:
Newdata[indexData1, :] = np.concatenate((np.tile(data[indexData1, :], Nframes + 1),
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# second frame of the statement:
Newdata[indexData1, :] = np.concatenate((np.tile(data[startSent, :], Nframes), data[indexData1, :],
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# from 3th frame to the Nframesth frame:
while indexData1 < Nframes:
diff1 = indexData1
DummyFramesL = np.concatenate((np.tile(data[startSent, :], Nframes - diff1 + 1),
data[indexData1 - diff1 + 1:indexData1, :].reshape(data.shape[1] * (diff1 - 1))))
Newdata[indexData1, :] = np.concatenate((DummyFramesL, data[indexData1, :],
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# central frames :
for index in range(indexData1, indexData2 - Nframes):
Newdata[index, :] = data[max(0, index - Nframes - 1 + 1): index + Nframes + 1, :].reshape(
data.shape[1] * ((2 * Nframes) + 1))
# last frame of the statement :
Newdata[indexData2, :] = np.concatenate((data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes),
np.tile(data[indexData2, :], Nframes + 1)))
indexData2 -= 1
# penultimate frame of the statement :
Newdata[indexData2, :] = np.concatenate((data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes),
data[indexData2, :], np.tile(data[indexData2 + 1, :], Nframes)))
indexData2 -= 1
# frames in [-Nframes, penultimate]:
while endSent - indexData2 < Nframes:
diff2 = endSent - indexData2
DummyFramesR = np.concatenate((data[indexData2 + 1:endSent, :].reshape(data.shape[1] * (diff2 - 1)),
np.tile(data[endSent, :], Nframes - diff2 + 1)))
Newdata[indexData2, :] = np.concatenate(
(data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes), data[indexData2, :], DummyFramesR))
indexData2 -= 1
np.savetxt(ctx_file, Newdata, fmt='%.4f')
def createSubjectContext(subject_path, ctx_frames):
for f in os.listdir(subject_path):
if f.endswith(".dat") and not f.startswith("ctx"):
original_file_path = subject_path + "/" + f
ctx_file_path = subject_path + "/" + 'ctx_' + f
#print(f)
create_context_file(original_file_path, ctx_file_path, ctx_frames)
print("subject context created")
|
allspeak/api.allspeak.eu
|
web/project/training_api/libs/context.py
|
Python
|
mit
| 4,350
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" 工程文件生成
so_ 模块方案
pj_ 模块工程
tp_ 测试工程
"""
import basebuilder
import os
import hashlib as md5
import web.template
render = web.template.render(os.path.join( os.path.split(os.path.realpath(__file__))[0], "template") ,
globals={'type':type,"hasattr":hasattr})
class Msvc2008SolutionBuilder(object):
def __init__(self):
self.props = {}
def GenerateUUIDByName(self, name, ns=None):
""" Generate UUID """
if ns != None:
name = ns + "." + name
if type(name) == unicode:
name = name.encode('utf-8')
s = md5.md5(md5.md5(name).hexdigest()).hexdigest()
return "-".join( (s[:8],s[8:12], s[12:16], s[16:20],s[20:]) ).upper()
def BuildProjectFolder(self):
""" 生成工程文件路径和项目文件路径 root/code/{so_folder} """
if not os.path.exists(self.props["pj_path"]):
os.makedirs(self.props["pj_path"])
if not os.path.exists(self.props["tp_path"]):
os.makedirs(self.props["tp_path"])
return
def BuildSolutionFile(self):
#print self.props.keys
ctx = render.msvc2008_sln_tmpl(self.props)
name = os.path.join(self.props["so_path"], "%s.sln" % self.props["so_name"])
f=open(name, "w")
f.write( str(ctx) )
f.close()
return name
def BuildSolution(self, pps):
self.props = pps
self.BuildProjectFolder()
return self.BuildSolutionFile()
|
LMiceOrg/Modelingtools
|
autotools/builder/msvc2008solutionbuilder.py
|
Python
|
mit
| 1,544
|
from __future__ import division
from collections import defaultdict
from operator import itemgetter
from readdata import read_data
import csv
import sys
RN = ['I', 'bII', 'II', 'bIII', 'III', 'IV', 'bV', 'V', 'bVI', 'VI', 'bVII', 'VII', 'NonHarmonic']
for j in range(0,4):
z = ['4','4','4','4']
t = ['1','2','3','4']
def transition_probs_by_song(chord_lists):
"""
Return a dictionary where the keys are song names, and the values are
dictionaries with transitional probabilities.
"""
chord_counts = defaultdict(lambda: 0)
transition_counts = defaultdict(lambda: 0)
song_transition_probs = {}
# for every song in the corpus, 'chords' will be a list of the chords
for chords in chord_lists:
length = len(chords)
# for every chord in the list of chords, count all the transitions and root occurances
for i in range(length-1):
if((chords[i]['bars_per_phrase'] == z[j]) and (chords[i]['bar_of_phrase'] == t[j])):
transition = (chords[i]['root'], chords[i+1]['root'])
transition_counts[transition] += 1
chord_counts[chords[i]['root']] += 1
# add the transition probabilities for this song into a giant dictionary
song_transition_probs[chords[i]['song_name']] = get_transition_probs(chord_counts, transition_counts)
# reset the count dictionaries for the next song
chord_counts = defaultdict(lambda: 0)
transition_counts = defaultdict(lambda: 0)
return song_transition_probs
def get_transition_probs(chord_counts, transition_counts):
"""
Returns a dictionary of transition probabilities based on counts for chords
and transitions.
"""
probs = {}
# go through all 144 possible transitions
for first in RN:
for second in RN:
# use try catch to avoid divide by 0 errors or key errors
try:
probability = transition_counts[(first, second)] / chord_counts[first]
probs[(first, second)] = probability
# if a transition isn't found in the data, give it probability 0
except:
probs[(first, second)] = 0
return probs
def write_csv(probabilities):
with open('outputeachsongperbar.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
for song_name, probs in transition_probs.items():
# get all probabilities in sorted order, and get rid of non-harmonic transitions
transitions = [(RN.index(c1), RN.index(c2)) for c1, c2 in probs if c1 != 'NonHarmonic' and c2 != 'NonHarmonic']
line = [probs[(RN[c1], RN[c2])] for c1, c2 in sorted(transitions)]
# add the song name as the first value in the csv
line = [song_name] + line
# write to csv
writer.writerow(line)
if __name__ == '__main__':
try:
datafile = sys.argv[1]
except:
datafile = 'AlldataWithNonHarmonics.csv'
data = read_data(datafile)
transition_probs = transition_probs_by_song(data)
write_csv(transition_probs)
for song_name, probs in transition_probs.items():
print song_name + '\n' + ('-' * len(song_name)) + 'Bar Phrase ' + t[j] + ' of ' + z[j]
# map roman numerals to integers for sorting, and covert back to display
# this isn't actually necessary, just makes printing the results look nicer
transitions = [(RN.index(c1), RN.index(c2)) for c1, c2 in probs]
for c1, c2 in sorted(transitions):
probability = probs[(RN[c1], RN[c2])]
if probability != 0:
print '({} -> {}): {:.4f}'.format(RN[c1], RN[c2], probability)
print #newline
|
corpusmusic/billboardcorpus
|
4Bars_PerSong_EachBar.py
|
Python
|
gpl-3.0
| 4,066
|
"""Testing for the boost module (sklearn.ensemble.boost)."""
import numpy as np
import pytest
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
from sklearn.utils._testing import assert_array_equal, assert_array_less
from sklearn.utils._testing import assert_array_almost_equal
from sklearn.base import BaseEstimator
from sklearn.base import clone
from sklearn.dummy import DummyClassifier, DummyRegressor
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble._weight_boosting import _samme_proba
from sklearn.svm import SVC, SVR
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.utils import shuffle
from sklearn.utils._mocking import NoSampleWeightWrapper
from sklearn import datasets
# Common random state
rng = np.random.RandomState(0)
# Toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y_class = ["foo", "foo", "foo", 1, 1, 1] # test string class labels
y_regr = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
y_t_class = ["foo", 1, 1]
y_t_regr = [-1, 1, 1]
# Load the iris dataset and randomly permute it
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data, iris.target = shuffle(iris.data, iris.target, random_state=rng)
# Load the diabetes dataset and randomly permute it
diabetes = datasets.load_diabetes()
diabetes.data, diabetes.target = shuffle(diabetes.data, diabetes.target,
random_state=rng)
def test_samme_proba():
# Test the `_samme_proba` helper function.
# Define some example (bad) `predict_proba` output.
probs = np.array([[1, 1e-6, 0],
[0.19, 0.6, 0.2],
[-999, 0.51, 0.5],
[1e-6, 1, 1e-9]])
probs /= np.abs(probs.sum(axis=1))[:, np.newaxis]
# _samme_proba calls estimator.predict_proba.
# Make a mock object so I can control what gets returned.
class MockEstimator:
def predict_proba(self, X):
assert_array_equal(X.shape, probs.shape)
return probs
mock = MockEstimator()
samme_proba = _samme_proba(mock, 3, np.ones_like(probs))
assert_array_equal(samme_proba.shape, probs.shape)
assert np.isfinite(samme_proba).all()
# Make sure that the correct elements come out as smallest --
# `_samme_proba` should preserve the ordering in each example.
assert_array_equal(np.argmin(samme_proba, axis=1), [2, 0, 0, 2])
assert_array_equal(np.argmax(samme_proba, axis=1), [0, 1, 1, 1])
def test_oneclass_adaboost_proba():
# Test predict_proba robustness for one class label input.
# In response to issue #7501
# https://github.com/scikit-learn/scikit-learn/issues/7501
y_t = np.ones(len(X))
clf = AdaBoostClassifier().fit(X, y_t)
assert_array_almost_equal(clf.predict_proba(X), np.ones((len(X), 1)))
@pytest.mark.parametrize("algorithm", ["SAMME", "SAMME.R"])
def test_classification_toy(algorithm):
# Check classification on a toy dataset.
clf = AdaBoostClassifier(algorithm=algorithm, random_state=0)
clf.fit(X, y_class)
assert_array_equal(clf.predict(T), y_t_class)
assert_array_equal(np.unique(np.asarray(y_t_class)), clf.classes_)
assert clf.predict_proba(T).shape == (len(T), 2)
assert clf.decision_function(T).shape == (len(T),)
def test_regression_toy():
# Check classification on a toy dataset.
clf = AdaBoostRegressor(random_state=0)
clf.fit(X, y_regr)
assert_array_equal(clf.predict(T), y_t_regr)
def test_iris():
# Check consistency on dataset iris.
classes = np.unique(iris.target)
clf_samme = prob_samme = None
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(iris.data, iris.target)
assert_array_equal(classes, clf.classes_)
proba = clf.predict_proba(iris.data)
if alg == "SAMME":
clf_samme = clf
prob_samme = proba
assert proba.shape[1] == len(classes)
assert clf.decision_function(iris.data).shape[1] == len(classes)
score = clf.score(iris.data, iris.target)
assert score > 0.9, "Failed with algorithm %s and score = %f" % \
(alg, score)
# Check we used multiple estimators
assert len(clf.estimators_) > 1
# Check for distinct random states (see issue #7408)
assert (len(set(est.random_state for est in clf.estimators_)) ==
len(clf.estimators_))
# Somewhat hacky regression test: prior to
# ae7adc880d624615a34bafdb1d75ef67051b8200,
# predict_proba returned SAMME.R values for SAMME.
clf_samme.algorithm = "SAMME.R"
assert_array_less(0,
np.abs(clf_samme.predict_proba(iris.data) - prob_samme))
@pytest.mark.parametrize('loss', ['linear', 'square', 'exponential'])
def test_diabetes(loss):
# Check consistency on dataset diabetes.
reg = AdaBoostRegressor(loss=loss, random_state=0)
reg.fit(diabetes.data, diabetes.target)
score = reg.score(diabetes.data, diabetes.target)
assert score > 0.6
# Check we used multiple estimators
assert len(reg.estimators_) > 1
# Check for distinct random states (see issue #7408)
assert (len(set(est.random_state for est in reg.estimators_)) ==
len(reg.estimators_))
@pytest.mark.parametrize("algorithm", ["SAMME", "SAMME.R"])
def test_staged_predict(algorithm):
# Check staged predictions.
rng = np.random.RandomState(0)
iris_weights = rng.randint(10, size=iris.target.shape)
diabetes_weights = rng.randint(10, size=diabetes.target.shape)
clf = AdaBoostClassifier(algorithm=algorithm, n_estimators=10)
clf.fit(iris.data, iris.target, sample_weight=iris_weights)
predictions = clf.predict(iris.data)
staged_predictions = [p for p in clf.staged_predict(iris.data)]
proba = clf.predict_proba(iris.data)
staged_probas = [p for p in clf.staged_predict_proba(iris.data)]
score = clf.score(iris.data, iris.target, sample_weight=iris_weights)
staged_scores = [
s for s in clf.staged_score(
iris.data, iris.target, sample_weight=iris_weights)]
assert len(staged_predictions) == 10
assert_array_almost_equal(predictions, staged_predictions[-1])
assert len(staged_probas) == 10
assert_array_almost_equal(proba, staged_probas[-1])
assert len(staged_scores) == 10
assert_array_almost_equal(score, staged_scores[-1])
# AdaBoost regression
clf = AdaBoostRegressor(n_estimators=10, random_state=0)
clf.fit(diabetes.data, diabetes.target, sample_weight=diabetes_weights)
predictions = clf.predict(diabetes.data)
staged_predictions = [p for p in clf.staged_predict(diabetes.data)]
score = clf.score(diabetes.data, diabetes.target,
sample_weight=diabetes_weights)
staged_scores = [
s for s in clf.staged_score(
diabetes.data, diabetes.target, sample_weight=diabetes_weights)]
assert len(staged_predictions) == 10
assert_array_almost_equal(predictions, staged_predictions[-1])
assert len(staged_scores) == 10
assert_array_almost_equal(score, staged_scores[-1])
def test_gridsearch():
# Check that base trees can be grid-searched.
# AdaBoost classification
boost = AdaBoostClassifier(base_estimator=DecisionTreeClassifier())
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2),
'algorithm': ('SAMME', 'SAMME.R')}
clf = GridSearchCV(boost, parameters)
clf.fit(iris.data, iris.target)
# AdaBoost regression
boost = AdaBoostRegressor(base_estimator=DecisionTreeRegressor(),
random_state=0)
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2)}
clf = GridSearchCV(boost, parameters)
clf.fit(diabetes.data, diabetes.target)
def test_pickle():
# Check pickability.
import pickle
# Adaboost classifier
for alg in ['SAMME', 'SAMME.R']:
obj = AdaBoostClassifier(algorithm=alg)
obj.fit(iris.data, iris.target)
score = obj.score(iris.data, iris.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert type(obj2) == obj.__class__
score2 = obj2.score(iris.data, iris.target)
assert score == score2
# Adaboost regressor
obj = AdaBoostRegressor(random_state=0)
obj.fit(diabetes.data, diabetes.target)
score = obj.score(diabetes.data, diabetes.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert type(obj2) == obj.__class__
score2 = obj2.score(diabetes.data, diabetes.target)
assert score == score2
def test_importances():
# Check variable importances.
X, y = datasets.make_classification(n_samples=2000,
n_features=10,
n_informative=3,
n_redundant=0,
n_repeated=0,
shuffle=False,
random_state=1)
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(X, y)
importances = clf.feature_importances_
assert importances.shape[0] == 10
assert (importances[:3, np.newaxis] >= importances[3:]).all()
def test_error():
# Test that it gives proper exception on deficient input.
with pytest.raises(ValueError):
AdaBoostClassifier(learning_rate=-1).fit(X, y_class)
with pytest.raises(ValueError):
AdaBoostClassifier(algorithm="foo").fit(X, y_class)
with pytest.raises(ValueError):
AdaBoostClassifier().fit(X, y_class, sample_weight=np.asarray([-1]))
def test_base_estimator():
# Test different base estimators.
from sklearn.ensemble import RandomForestClassifier
# XXX doesn't work with y_class because RF doesn't support classes_
# Shouldn't AdaBoost run a LabelBinarizer?
clf = AdaBoostClassifier(RandomForestClassifier())
clf.fit(X, y_regr)
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
clf.fit(X, y_class)
from sklearn.ensemble import RandomForestRegressor
clf = AdaBoostRegressor(RandomForestRegressor(), random_state=0)
clf.fit(X, y_regr)
clf = AdaBoostRegressor(SVR(), random_state=0)
clf.fit(X, y_regr)
# Check that an empty discrete ensemble fails in fit, not predict.
X_fail = [[1, 1], [1, 1], [1, 1], [1, 1]]
y_fail = ["foo", "bar", 1, 2]
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
with pytest.raises(ValueError, match="worse than random"):
clf.fit(X_fail, y_fail)
def test_sparse_classification():
# Check classification with sparse input.
class CustomSVC(SVC):
"""SVC variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super().fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_multilabel_classification(n_classes=1, n_samples=15,
n_features=5,
random_state=42)
# Flatten y to a 1d array
y = np.ravel(y)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
# decision_function
sparse_results = sparse_classifier.decision_function(X_test_sparse)
dense_results = dense_classifier.decision_function(X_test)
assert_array_almost_equal(sparse_results, dense_results)
# predict_log_proba
sparse_results = sparse_classifier.predict_log_proba(X_test_sparse)
dense_results = dense_classifier.predict_log_proba(X_test)
assert_array_almost_equal(sparse_results, dense_results)
# predict_proba
sparse_results = sparse_classifier.predict_proba(X_test_sparse)
dense_results = dense_classifier.predict_proba(X_test)
assert_array_almost_equal(sparse_results, dense_results)
# score
sparse_results = sparse_classifier.score(X_test_sparse, y_test)
dense_results = dense_classifier.score(X_test, y_test)
assert_array_almost_equal(sparse_results, dense_results)
# staged_decision_function
sparse_results = sparse_classifier.staged_decision_function(
X_test_sparse)
dense_results = dense_classifier.staged_decision_function(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_almost_equal(sprase_res, dense_res)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_predict_proba
sparse_results = sparse_classifier.staged_predict_proba(X_test_sparse)
dense_results = dense_classifier.staged_predict_proba(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_almost_equal(sprase_res, dense_res)
# staged_score
sparse_results = sparse_classifier.staged_score(X_test_sparse,
y_test)
dense_results = dense_classifier.staged_score(X_test, y_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# Verify sparsity of data is maintained during training
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
def test_sparse_regression():
# Check regression with sparse input.
class CustomSVR(SVR):
"""SVR variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super().fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_regression(n_samples=15, n_features=50, n_targets=1,
random_state=42)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = dense_results = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_almost_equal(sparse_results, dense_results)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_almost_equal(sprase_res, dense_res)
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
def test_sample_weight_adaboost_regressor():
"""
AdaBoostRegressor should work without sample_weights in the base estimator
The random weighted sampling is done internally in the _boost method in
AdaBoostRegressor.
"""
class DummyEstimator(BaseEstimator):
def fit(self, X, y):
pass
def predict(self, X):
return np.zeros(X.shape[0])
boost = AdaBoostRegressor(DummyEstimator(), n_estimators=3)
boost.fit(X, y_regr)
assert len(boost.estimator_weights_) == len(boost.estimator_errors_)
def test_multidimensional_X():
"""
Check that the AdaBoost estimators can work with n-dimensional
data matrix
"""
rng = np.random.RandomState(0)
X = rng.randn(50, 3, 3)
yc = rng.choice([0, 1], 50)
yr = rng.randn(50)
boost = AdaBoostClassifier(DummyClassifier(strategy='most_frequent'))
boost.fit(X, yc)
boost.predict(X)
boost.predict_proba(X)
boost = AdaBoostRegressor(DummyRegressor())
boost.fit(X, yr)
boost.predict(X)
@pytest.mark.parametrize("algorithm", ['SAMME', 'SAMME.R'])
def test_adaboostclassifier_without_sample_weight(algorithm):
X, y = iris.data, iris.target
base_estimator = NoSampleWeightWrapper(DummyClassifier())
clf = AdaBoostClassifier(
base_estimator=base_estimator, algorithm=algorithm
)
err_msg = ("{} doesn't support sample_weight"
.format(base_estimator.__class__.__name__))
with pytest.raises(ValueError, match=err_msg):
clf.fit(X, y)
def test_adaboostregressor_sample_weight():
# check that giving weight will have an influence on the error computed
# for a weak learner
rng = np.random.RandomState(42)
X = np.linspace(0, 100, num=1000)
y = (.8 * X + 0.2) + (rng.rand(X.shape[0]) * 0.0001)
X = X.reshape(-1, 1)
# add an arbitrary outlier
X[-1] *= 10
y[-1] = 10000
# random_state=0 ensure that the underlying bootstrap will use the outlier
regr_no_outlier = AdaBoostRegressor(
base_estimator=LinearRegression(), n_estimators=1, random_state=0
)
regr_with_weight = clone(regr_no_outlier)
regr_with_outlier = clone(regr_no_outlier)
# fit 3 models:
# - a model containing the outlier
# - a model without the outlier
# - a model containing the outlier but with a null sample-weight
regr_with_outlier.fit(X, y)
regr_no_outlier.fit(X[:-1], y[:-1])
sample_weight = np.ones_like(y)
sample_weight[-1] = 0
regr_with_weight.fit(X, y, sample_weight=sample_weight)
score_with_outlier = regr_with_outlier.score(X[:-1], y[:-1])
score_no_outlier = regr_no_outlier.score(X[:-1], y[:-1])
score_with_weight = regr_with_weight.score(X[:-1], y[:-1])
assert score_with_outlier < score_no_outlier
assert score_with_outlier < score_with_weight
assert score_no_outlier == pytest.approx(score_with_weight)
@pytest.mark.parametrize("algorithm", ["SAMME", "SAMME.R"])
def test_adaboost_consistent_predict(algorithm):
# check that predict_proba and predict give consistent results
# regression test for:
# https://github.com/scikit-learn/scikit-learn/issues/14084
X_train, X_test, y_train, y_test = train_test_split(
*datasets.load_digits(return_X_y=True), random_state=42
)
model = AdaBoostClassifier(algorithm=algorithm, random_state=42)
model.fit(X_train, y_train)
assert_array_equal(
np.argmax(model.predict_proba(X_test), axis=1),
model.predict(X_test)
)
@pytest.mark.parametrize(
'model, X, y',
[(AdaBoostClassifier(), iris.data, iris.target),
(AdaBoostRegressor(), diabetes.data, diabetes.target)]
)
def test_adaboost_negative_weight_error(model, X, y):
sample_weight = np.ones_like(y)
sample_weight[-1] = -10
err_msg = "sample_weight cannot contain negative weight"
with pytest.raises(ValueError, match=err_msg):
model.fit(X, y, sample_weight=sample_weight)
|
kevin-intel/scikit-learn
|
sklearn/ensemble/tests/test_weight_boosting.py
|
Python
|
bsd-3-clause
| 21,053
|
import os
import hashlib
import random
import warnings
from tempfile import mkdtemp, TemporaryFile
from shutil import rmtree
from twisted.trial import unittest
from scrapy.item import Item, Field
from scrapy.http import Request, Response
from scrapy.settings import Settings
from scrapy.pipelines.images import ImagesPipeline
from scrapy.utils.python import to_bytes
skip = False
try:
from PIL import Image
except ImportError as e:
skip = 'Missing Python Imaging Library, install https://pypi.python.org/pypi/Pillow'
else:
encoders = set(('jpeg_encoder', 'jpeg_decoder'))
if not encoders.issubset(set(Image.core.__dict__)):
skip = 'Missing JPEG encoders'
def _mocked_download_func(request, info):
response = request.meta.get('response')
return response() if callable(response) else response
class ImagesPipelineTestCase(unittest.TestCase):
skip = skip
def setUp(self):
self.tempdir = mkdtemp()
self.pipeline = ImagesPipeline(self.tempdir, download_func=_mocked_download_func)
def tearDown(self):
rmtree(self.tempdir)
def test_file_path(self):
file_path = self.pipeline.file_path
self.assertEqual(file_path(Request("https://dev.mydeco.com/mydeco.gif")),
'full/3fd165099d8e71b8a48b2683946e64dbfad8b52d.jpg')
self.assertEqual(file_path(Request("http://www.maddiebrown.co.uk///catalogue-items//image_54642_12175_95307.jpg")),
'full/0ffcd85d563bca45e2f90becd0ca737bc58a00b2.jpg')
self.assertEqual(file_path(Request("https://dev.mydeco.com/two/dirs/with%20spaces%2Bsigns.gif")),
'full/b250e3a74fff2e4703e310048a5b13eba79379d2.jpg')
self.assertEqual(file_path(Request("http://www.dfsonline.co.uk/get_prod_image.php?img=status_0907_mdm.jpg")),
'full/4507be485f38b0da8a0be9eb2e1dfab8a19223f2.jpg')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532/")),
'full/97ee6f8a46cbbb418ea91502fd24176865cf39b2.jpg')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532")),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1.jpg')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532"),
response=Response("http://www.dorma.co.uk/images/product_details/2532"),
info=object()),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1.jpg')
def test_thumbnail_name(self):
thumb_path = self.pipeline.thumb_path
name = '50'
self.assertEqual(thumb_path(Request("file:///tmp/foo.jpg"), name),
'thumbs/50/38a86208c36e59d4404db9e37ce04be863ef0335.jpg')
self.assertEqual(thumb_path(Request("file://foo.png"), name),
'thumbs/50/e55b765eba0ec7348e50a1df496040449071b96a.jpg')
self.assertEqual(thumb_path(Request("file:///tmp/foo"), name),
'thumbs/50/0329ad83ebb8e93ea7c7906d46e9ed55f7349a50.jpg')
self.assertEqual(thumb_path(Request("file:///tmp/some.name/foo"), name),
'thumbs/50/850233df65a5b83361798f532f1fc549cd13cbe9.jpg')
self.assertEqual(thumb_path(Request("file:///tmp/some.name/foo"), name,
response=Response("file:///tmp/some.name/foo"),
info=object()),
'thumbs/50/850233df65a5b83361798f532f1fc549cd13cbe9.jpg')
def test_convert_image(self):
SIZE = (100, 100)
# straigh forward case: RGB and JPEG
COLOUR = (0, 127, 255)
im = _create_image('JPEG', 'RGB', SIZE, COLOUR)
converted, _ = self.pipeline.convert_image(im)
self.assertEquals(converted.mode, 'RGB')
self.assertEquals(converted.getcolors(), [(10000, COLOUR)])
# check that thumbnail keep image ratio
thumbnail, _ = self.pipeline.convert_image(converted, size=(10, 25))
self.assertEquals(thumbnail.mode, 'RGB')
self.assertEquals(thumbnail.size, (10, 10))
# transparency case: RGBA and PNG
COLOUR = (0, 127, 255, 50)
im = _create_image('PNG', 'RGBA', SIZE, COLOUR)
converted, _ = self.pipeline.convert_image(im)
self.assertEquals(converted.mode, 'RGB')
self.assertEquals(converted.getcolors(), [(10000, (205, 230, 255))])
class DeprecatedImagesPipeline(ImagesPipeline):
def file_key(self, url):
return self.image_key(url)
def image_key(self, url):
image_guid = hashlib.sha1(to_bytes(url)).hexdigest()
return 'empty/%s.jpg' % (image_guid)
def thumb_key(self, url, thumb_id):
thumb_guid = hashlib.sha1(to_bytes(url)).hexdigest()
return 'thumbsup/%s/%s.jpg' % (thumb_id, thumb_guid)
class DeprecatedImagesPipelineTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
def init_pipeline(self, pipeline_class):
self.pipeline = pipeline_class(self.tempdir, download_func=_mocked_download_func)
self.pipeline.open_spider(None)
def test_default_file_key_method(self):
self.init_pipeline(ImagesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.file_key("https://dev.mydeco.com/mydeco.gif"),
'full/3fd165099d8e71b8a48b2683946e64dbfad8b52d.jpg')
self.assertEqual(len(w), 1)
self.assertTrue('image_key(url) and file_key(url) methods are deprecated' in str(w[-1].message))
def test_default_image_key_method(self):
self.init_pipeline(ImagesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.image_key("https://dev.mydeco.com/mydeco.gif"),
'full/3fd165099d8e71b8a48b2683946e64dbfad8b52d.jpg')
self.assertEqual(len(w), 1)
self.assertTrue('image_key(url) and file_key(url) methods are deprecated' in str(w[-1].message))
def test_overridden_file_key_method(self):
self.init_pipeline(DeprecatedImagesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.file_path(Request("https://dev.mydeco.com/mydeco.gif")),
'empty/3fd165099d8e71b8a48b2683946e64dbfad8b52d.jpg')
self.assertEqual(len(w), 1)
self.assertTrue('image_key(url) and file_key(url) methods are deprecated' in str(w[-1].message))
def test_default_thumb_key_method(self):
self.init_pipeline(ImagesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.thumb_key("file:///tmp/foo.jpg", 50),
'thumbs/50/38a86208c36e59d4404db9e37ce04be863ef0335.jpg')
self.assertEqual(len(w), 1)
self.assertTrue('thumb_key(url) method is deprecated' in str(w[-1].message))
def test_overridden_thumb_key_method(self):
self.init_pipeline(DeprecatedImagesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.thumb_path(Request("file:///tmp/foo.jpg"), 50),
'thumbsup/50/38a86208c36e59d4404db9e37ce04be863ef0335.jpg')
self.assertEqual(len(w), 1)
self.assertTrue('thumb_key(url) method is deprecated' in str(w[-1].message))
def tearDown(self):
rmtree(self.tempdir)
class ImagesPipelineTestCaseFields(unittest.TestCase):
def test_item_fields_default(self):
class TestItem(Item):
name = Field()
image_urls = Field()
images = Field()
for cls in TestItem, dict:
url = 'http://www.example.com/images/1.jpg'
item = cls({'name': 'item1', 'image_urls': [url]})
pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': 's3://example/images/'}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['images'], [results[0][1]])
def test_item_fields_override_settings(self):
class TestItem(Item):
name = Field()
image = Field()
stored_image = Field()
for cls in TestItem, dict:
url = 'http://www.example.com/images/1.jpg'
item = cls({'name': 'item1', 'image': [url]})
pipeline = ImagesPipeline.from_settings(Settings({
'IMAGES_STORE': 's3://example/images/',
'IMAGES_URLS_FIELD': 'image',
'IMAGES_RESULT_FIELD': 'stored_image'
}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['stored_image'], [results[0][1]])
class ImagesPipelineTestCaseCustomSettings(unittest.TestCase):
img_cls_attribute_names = [
# Pipeline attribute names with corresponding setting names.
("EXPIRES", "IMAGES_EXPIRES"),
("MIN_WIDTH", "IMAGES_MIN_WIDTH"),
("MIN_HEIGHT", "IMAGES_MIN_HEIGHT"),
("IMAGES_URLS_FIELD", "IMAGES_URLS_FIELD"),
("IMAGES_RESULT_FIELD", "IMAGES_RESULT_FIELD"),
("THUMBS", "IMAGES_THUMBS")
]
# This should match what is defined in ImagesPipeline.
default_pipeline_settings = dict(
MIN_WIDTH=0,
MIN_HEIGHT=0,
EXPIRES=90,
THUMBS={},
IMAGES_URLS_FIELD='image_urls',
IMAGES_RESULT_FIELD='images'
)
def setUp(self):
self.tempdir = mkdtemp()
def tearDown(self):
rmtree(self.tempdir)
def _generate_fake_settings(self, prefix=None):
"""
:param prefix: string for setting keys
:return: dictionary of image pipeline settings
"""
def random_string():
return "".join([chr(random.randint(97, 123)) for _ in range(10)])
settings = {
"IMAGES_EXPIRES": random.randint(1, 1000),
"IMAGES_STORE": self.tempdir,
"IMAGES_RESULT_FIELD": random_string(),
"IMAGES_URLS_FIELD": random_string(),
"IMAGES_MIN_WIDTH": random.randint(1, 1000),
"IMAGES_MIN_HEIGHT": random.randint(1, 1000),
"IMAGES_THUMBS": {
'small': (random.randint(1, 1000), random.randint(1, 1000)),
'big': (random.randint(1, 1000), random.randint(1, 1000))
}
}
if not prefix:
return settings
return {prefix.upper() + "_" + k if k != "IMAGES_STORE" else k: v for k, v in settings.items()}
def _generate_fake_pipeline_subclass(self):
"""
:return: ImagePipeline class will all uppercase attributes set.
"""
class UserDefinedImagePipeline(ImagesPipeline):
# Values should be in different range than fake_settings.
MIN_WIDTH = random.randint(1000, 2000)
MIN_HEIGHT = random.randint(1000, 2000)
THUMBS = {
'small': (random.randint(1000, 2000), random.randint(1000, 2000)),
'big': (random.randint(1000, 2000), random.randint(1000, 2000))
}
EXPIRES = random.randint(1000, 2000)
IMAGES_URLS_FIELD = "field_one"
IMAGES_RESULT_FIELD = "field_two"
return UserDefinedImagePipeline
def test_different_settings_for_different_instances(self):
"""
If there are two instances of ImagesPipeline class with different settings, they should
have different settings.
"""
custom_settings = self._generate_fake_settings()
default_settings = Settings()
default_sts_pipe = ImagesPipeline(self.tempdir, settings=default_settings)
user_sts_pipe = ImagesPipeline.from_settings(Settings(custom_settings))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
expected_default_value = self.default_pipeline_settings.get(pipe_attr)
custom_value = custom_settings.get(settings_attr)
self.assertNotEqual(expected_default_value, custom_value)
self.assertEqual(getattr(default_sts_pipe, pipe_attr.lower()), expected_default_value)
self.assertEqual(getattr(user_sts_pipe, pipe_attr.lower()), custom_value)
def test_subclass_attrs_preserved_default_settings(self):
"""
If image settings are not defined at all subclass of ImagePipeline takes values
from class attributes.
"""
pipeline_cls = self._generate_fake_pipeline_subclass()
pipeline = pipeline_cls.from_settings(Settings({"IMAGES_STORE": self.tempdir}))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
# Instance attribute (lowercase) must be equal to class attribute (uppercase).
attr_value = getattr(pipeline, pipe_attr.lower())
self.assertNotEqual(attr_value, self.default_pipeline_settings[pipe_attr])
self.assertEqual(attr_value, getattr(pipeline, pipe_attr))
def test_subclass_attrs_preserved_custom_settings(self):
"""
If image settings are defined but they are not defined for subclass default
values taken from settings should be preserved.
"""
pipeline_cls = self._generate_fake_pipeline_subclass()
settings = self._generate_fake_settings()
pipeline = pipeline_cls.from_settings(Settings(settings))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
# Instance attribute (lowercase) must be equal to
# value defined in settings.
value = getattr(pipeline, pipe_attr.lower())
self.assertNotEqual(value, self.default_pipeline_settings[pipe_attr])
setings_value = settings.get(settings_attr)
self.assertEqual(value, setings_value)
def test_no_custom_settings_for_subclasses(self):
"""
If there are no settings for subclass and no subclass attributes, pipeline should use
attributes of base class.
"""
class UserDefinedImagePipeline(ImagesPipeline):
pass
user_pipeline = UserDefinedImagePipeline.from_settings(Settings({"IMAGES_STORE": self.tempdir}))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
# Values from settings for custom pipeline should be set on pipeline instance.
custom_value = self.default_pipeline_settings.get(pipe_attr.upper())
self.assertEqual(getattr(user_pipeline, pipe_attr.lower()), custom_value)
def test_custom_settings_for_subclasses(self):
"""
If there are custom settings for subclass and NO class attributes, pipeline should use custom
settings.
"""
class UserDefinedImagePipeline(ImagesPipeline):
pass
prefix = UserDefinedImagePipeline.__name__.upper()
settings = self._generate_fake_settings(prefix=prefix)
user_pipeline = UserDefinedImagePipeline.from_settings(Settings(settings))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
# Values from settings for custom pipeline should be set on pipeline instance.
custom_value = settings.get(prefix + "_" + settings_attr)
self.assertNotEqual(custom_value, self.default_pipeline_settings[pipe_attr])
self.assertEqual(getattr(user_pipeline, pipe_attr.lower()), custom_value)
def test_custom_settings_and_class_attrs_for_subclasses(self):
"""
If there are custom settings for subclass AND class attributes
setting keys are preferred and override attributes.
"""
pipeline_cls = self._generate_fake_pipeline_subclass()
prefix = pipeline_cls.__name__.upper()
settings = self._generate_fake_settings(prefix=prefix)
user_pipeline = pipeline_cls.from_settings(Settings(settings))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
custom_value = settings.get(prefix + "_" + settings_attr)
self.assertNotEqual(custom_value, self.default_pipeline_settings[pipe_attr])
self.assertEqual(getattr(user_pipeline, pipe_attr.lower()), custom_value)
def test_cls_attrs_with_DEFAULT_prefix(self):
class UserDefinedImagePipeline(ImagesPipeline):
DEFAULT_IMAGES_URLS_FIELD = "something"
DEFAULT_IMAGES_RESULT_FIELD = "something_else"
pipeline = UserDefinedImagePipeline.from_settings(Settings({"IMAGES_STORE": self.tempdir}))
self.assertEqual(pipeline.images_result_field, "something_else")
self.assertEqual(pipeline.images_urls_field, "something")
def test_user_defined_subclass_default_key_names(self):
"""Test situation when user defines subclass of ImagePipeline,
but uses attribute names for default pipeline (without prefixing
them with pipeline class name).
"""
settings = self._generate_fake_settings()
class UserPipe(ImagesPipeline):
pass
pipeline_cls = UserPipe.from_settings(Settings(settings))
for pipe_attr, settings_attr in self.img_cls_attribute_names:
expected_value = settings.get(settings_attr)
self.assertEqual(getattr(pipeline_cls, pipe_attr.lower()),
expected_value)
def _create_image(format, *a, **kw):
buf = TemporaryFile()
Image.new(*a, **kw).save(buf, format)
buf.seek(0)
return Image.open(buf)
if __name__ == "__main__":
unittest.main()
|
wenyu1001/scrapy
|
tests/test_pipeline_images.py
|
Python
|
bsd-3-clause
| 18,224
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2017 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Modis level 1b hdf-eos format reader.
Introduction
------------
The ``modis_l1b`` reader reads and calibrates Modis L1 image data in hdf-eos format. Files often have
a pattern similar to the following one:
.. parsed-literal::
M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf
Other patterns where "collection" and/or "proccessing_time" are missing might also work
(see the readers yaml file for details). Geolocation files (MOD03) are also supported.
Geolocation files
-----------------
For the 1km data (mod021km) geolocation files (mod03) are optional. If not given to the reader
1km geolocations will be interpolated from the 5km geolocation contained within the file.
For the 500m and 250m data geolocation files are needed.
References:
- Modis gelocation description: http://www.icare.univ-lille1.fr/wiki/index.php/MODIS_geolocation
"""
import logging
import numpy as np
import xarray as xr
from satpy import CHUNK_SIZE
from satpy.readers.hdf4_utils import from_sds
from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader
logger = logging.getLogger(__name__)
class HDFEOSBandReader(HDFEOSBaseFileReader):
"""Handler for the regular band channels."""
res = {"1": 1000,
"Q": 250,
"H": 500}
def __init__(self, filename, filename_info, filetype_info):
"""Init the file handler."""
HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info)
ds = self.metadata['INVENTORYMETADATA'][
'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE']
self.resolution = self.res[ds[-3]]
def get_dataset(self, key, info):
"""Read data from file and return the corresponding projectables."""
datadict = {
1000: ['EV_250_Aggr1km_RefSB',
'EV_500_Aggr1km_RefSB',
'EV_1KM_RefSB',
'EV_1KM_Emissive'],
500: ['EV_250_Aggr500_RefSB',
'EV_500_RefSB'],
250: ['EV_250_RefSB']}
if self.resolution != key['resolution']:
return
datasets = datadict[self.resolution]
for dataset in datasets:
subdata = self.sd.select(dataset)
var_attrs = subdata.attributes()
band_names = var_attrs["band_names"].split(",")
# get the relative indices of the desired channel
try:
index = band_names.index(key['name'])
except ValueError:
continue
uncertainty = self.sd.select(dataset + "_Uncert_Indexes")
array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :],
dims=['y', 'x']).astype(np.float32)
valid_range = var_attrs['valid_range']
# Fill values:
# Data Value Meaning
# 65535 Fill Value (includes reflective band data at night mode
# and completely missing L1A scans)
# 65534 L1A DN is missing within a scan
# 65533 Detector is saturated
# 65532 Cannot compute zero point DN, e.g., SV is saturated
# 65531 Detector is dead (see comments below)
# 65530 RSB dn** below the minimum of the scaling range
# 65529 TEB radiance or RSB dn** exceeds the maximum of the
# scaling range
# 65528 Aggregation algorithm failure
# 65527 Rotation of Earth view Sector from nominal science
# collection position
# 65526 Calibration coefficient b1 could not be computed
# 65525 Subframe is dead
# 65524 Both sides of the PCLW electronics on simultaneously
# 65501 - 65523 (reserved for future use)
# 65500 NAD closed upper limit
array = array.where(array >= np.float32(valid_range[0]))
array = array.where(array <= np.float32(valid_range[1]))
array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15)
if key['calibration'] == 'brightness_temperature':
projectable = calibrate_bt(array, var_attrs, index, key['name'])
info.setdefault('units', 'K')
info.setdefault('standard_name', 'toa_brightness_temperature')
elif key['calibration'] == 'reflectance':
projectable = calibrate_refl(array, var_attrs, index)
info.setdefault('units', '%')
info.setdefault('standard_name',
'toa_bidirectional_reflectance')
elif key['calibration'] == 'radiance':
projectable = calibrate_radiance(array, var_attrs, index)
info.setdefault('units', var_attrs.get('radiance_units'))
info.setdefault('standard_name',
'toa_outgoing_radiance_per_unit_wavelength')
elif key['calibration'] == 'counts':
projectable = calibrate_counts(array, var_attrs, index)
info.setdefault('units', 'counts')
info.setdefault('standard_name', 'counts') # made up
else:
raise ValueError("Unknown calibration for "
"key: {}".format(key))
projectable.attrs = info
# if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or
# (platform_name == 'Terra' and key['name'] in ["29"])):
# height, width = projectable.shape
# row_indices = projectable.mask.sum(1) == width
# if row_indices.sum() != height:
# projectable.mask[row_indices, :] = True
# Get the orbit number
# if not satscene.orbit:
# mda = self.data.attributes()["CoreMetadata.0"]
# orbit_idx = mda.index("ORBITNUMBER")
# satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116]
# Trimming out dead sensor lines (detectors) on terra:
# (in addition channel 27, 30, 34, 35, and 36 are nosiy)
# if satscene.satname == "terra":
# for band in ["29"]:
# if not satscene[band].is_loaded() or satscene[band].data.mask.all():
# continue
# width = satscene[band].data.shape[1]
# height = satscene[band].data.shape[0]
# indices = satscene[band].data.mask.sum(1) < width
# if indices.sum() == height:
# continue
# satscene[band] = satscene[band].data[indices, :]
# satscene[band].area = geometry.SwathDefinition(
# lons=satscene[band].area.lons[indices, :],
# lats=satscene[band].area.lats[indices, :])
self._add_satpy_metadata(key, projectable)
return projectable
class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader):
"""A file handler for the files that have both regular bands and geographical information in them."""
def __init__(self, filename, filename_info, filetype_info):
"""Init the file handler."""
HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info)
HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info)
def get_dataset(self, key, info):
"""Get the dataset."""
if key['name'] in HDFEOSGeoReader.DATASET_NAMES:
return HDFEOSGeoReader.get_dataset(self, key, info)
return HDFEOSBandReader.get_dataset(self, key, info)
def calibrate_counts(array, attributes, index):
"""Calibration for counts channels."""
offset = np.float32(attributes["corrected_counts_offsets"][index])
scale = np.float32(attributes["corrected_counts_scales"][index])
array = (array - offset) * scale
return array
def calibrate_radiance(array, attributes, index):
"""Calibration for radiance channels."""
offset = np.float32(attributes["radiance_offsets"][index])
scale = np.float32(attributes["radiance_scales"][index])
array = (array - offset) * scale
return array
def calibrate_refl(array, attributes, index):
"""Calibration for reflective channels."""
offset = np.float32(attributes["reflectance_offsets"][index])
scale = np.float32(attributes["reflectance_scales"][index])
# convert to reflectance and convert from 1 to %
array = (array - offset) * scale * 100
return array
def calibrate_bt(array, attributes, index, band_name):
"""Calibration for the emissive channels."""
offset = np.float32(attributes["radiance_offsets"][index])
scale = np.float32(attributes["radiance_scales"][index])
array = (array - offset) * scale
# Planck constant (Joule second)
h__ = np.float32(6.6260755e-34)
# Speed of light in vacuum (meters per second)
c__ = np.float32(2.9979246e+8)
# Boltzmann constant (Joules per Kelvin)
k__ = np.float32(1.380658e-23)
# Derived constants
c_1 = 2 * h__ * c__ * c__
c_2 = (h__ * c__) / k__
# Effective central wavenumber (inverse centimeters)
cwn = np.array([
2.641775E+3, 2.505277E+3, 2.518028E+3, 2.465428E+3,
2.235815E+3, 2.200346E+3, 1.477967E+3, 1.362737E+3,
1.173190E+3, 1.027715E+3, 9.080884E+2, 8.315399E+2,
7.483394E+2, 7.308963E+2, 7.188681E+2, 7.045367E+2],
dtype=np.float32)
# Temperature correction slope (no units)
tcs = np.array([
9.993411E-1, 9.998646E-1, 9.998584E-1, 9.998682E-1,
9.998819E-1, 9.998845E-1, 9.994877E-1, 9.994918E-1,
9.995495E-1, 9.997398E-1, 9.995608E-1, 9.997256E-1,
9.999160E-1, 9.999167E-1, 9.999191E-1, 9.999281E-1],
dtype=np.float32)
# Temperature correction intercept (Kelvin)
tci = np.array([
4.770532E-1, 9.262664E-2, 9.757996E-2, 8.929242E-2,
7.310901E-2, 7.060415E-2, 2.204921E-1, 2.046087E-1,
1.599191E-1, 8.253401E-2, 1.302699E-1, 7.181833E-2,
1.972608E-2, 1.913568E-2, 1.817817E-2, 1.583042E-2],
dtype=np.float32)
# Transfer wavenumber [cm^(-1)] to wavelength [m]
cwn = 1. / (cwn * 100)
# Some versions of the modis files do not contain all the bands.
emmissive_channels = ["20", "21", "22", "23", "24", "25", "27", "28", "29",
"30", "31", "32", "33", "34", "35", "36"]
global_index = emmissive_channels.index(band_name)
cwn = cwn[global_index]
tcs = tcs[global_index]
tci = tci[global_index]
array = c_2 / (cwn * np.log(c_1 / (1000000 * array * cwn ** 5) + 1))
array = (array - tci) / tcs
return array
|
pytroll/satpy
|
satpy/readers/modis_l1b.py
|
Python
|
gpl-3.0
| 11,468
|
from __future__ import print_function, division
from functools import reduce
import copy
class Grid:
def __init__( self, width=3, height=3 ):
self.grid = [[0 for _ in range(width)] for _ in range(height)]
@staticmethod
def make_move( row, column, grid, first_player ):
grid = copy.deepcopy(grid)
if row < 0 or row >= len(grid) or column < 0 or column >= len(grid[0]):
raise IndexError('The row or column you specified is not within the correct range.')
if grid[row][column] is not 0:
raise ValueError('The space you selected is already occupied.')
grid[row][column] = 1 if first_player else 2
return grid
@staticmethod
def open_spaces( grid ):
lst = []
for row in range(len(grid)):
for column in range(len(grid[row])):
if grid[row][column] == 0:
lst.append((row, column))
return lst
def render( self ):
print(self)
def __str__( self ):
return '\n--+---+--\n'.join(self.decode_keys(self.grid))
def decode_keys( self, grid ):
return [ ' | '.join([self.decode_key(x[i]) for i in range(len(x))]) for x in self.grid ]
def decode_key( self, key ):
if key == 1:
return 'x'
elif key == 2:
return 'o'
else:
return ' '
@staticmethod
def check_win( grid ):
grid = copy.deepcopy(grid)
# Check for win conditions.
for x in win_list:
if grid[x[0][0]][x[0][1]] == grid[x[1][0]][x[1][1]] and grid[x[1][0]][x[1][1]] == grid[x[2][0]][x[2][1]] and grid[x[0][0]][x[0][1]] != 0:
return grid[x[0][0]][x[0][1]]
# Check if it's a tie.
for row in range(len(grid)):
for column in range(len(grid[0])):
if grid[row][column] == 0:
return 0
return -1
win_list = [[(0,0),(0,1),(0,2)], [(1,0),(1,1),(1,2)], [(2,0),(2,1),(2,2)],
[(0,0),(1,0),(2,0)], [(0,1),(1,1),(2,1)], [(0,2),(1,2),(2,2)],
[(0,0),(1,1),(2,2)], [(0,2),(1,1),(2,0)]]
|
shanesatterfield/tic-tac-toe
|
tic-tac-toe/grid.py
|
Python
|
mit
| 2,134
|
import time
import numpy as np
from openest.curves import basic, ushape_analytic, ushape_numeric
curve = basic.ZeroInterceptPolynomialCurve([-10, 30], [1, 1, -.03]) # u-shaped to about 22
fillxxs = np.arange(-10, 30)
fillyys = curve(fillxxs)
ucurve_old = ushape_numeric.UShapedCurve(curve, 0, lambda x: x, ordered=False, fillxxs=fillxxs, fillyys=fillyys)
ucurve_new = ushape_numeric.UShapedCurve(curve, 0, lambda x: x, ordered='maintain', fillxxs=fillxxs, fillyys=fillyys)
#tas = 40 * np.random.sample(100) - 10 # -10 to 30
#tas = np.array([1, 30, 2, 35, 3, 40])
tas = 40 * np.random.sample(100) - 10 # -10 to 30
results_old = ucurve_old(tas)
results_new = ucurve_new(tas)
## Check that same results from old version
np.testing.assert_equal(sorted(results_old), sorted(results_new))
## Check that all values match up to plateau
hiplateau = None
loplateau = None
for ii in range(len(tas)):
if results_new[ii] != curve(tas[ii]):
if tas[ii] > 0:
if hiplateau is None:
hiplateau = results_new[ii]
else:
np.testing.assert_equal(results_new[ii], hiplateau)
else:
if loplateau is None:
loplateau = results_new[ii]
else:
np.testing.assert_equal(results_new[ii], loplateau)
ucurveclip_old = ushape_numeric.UShapedClipping(ucurve_old, curve, 0, lambda x: x, ordered=False)
ucurveclip_new = ushape_numeric.UShapedClipping(ucurve_new, curve, 0, lambda x: x, ordered='maintain')
results_old = ucurveclip_old(tas)
results_new = ucurveclip_new(tas)
## Check that same results from old version
np.testing.assert_equal(sorted(results_old), sorted(results_new))
## Check that all values match up to plateau
hiplateau = None
for ii in range(len(tas)):
if curve(tas[ii]) < 0:
np.testing.assert_equal(results_new[ii], 0)
elif results_new[ii] != curve(tas[ii]):
if hiplateau is None:
hiplateau = results_new[ii]
else:
np.testing.assert_equal(results_new[ii], hiplateau)
|
ClimateImpactLab/open-estimate
|
openest/test/curves/test_ucurve.py
|
Python
|
gpl-3.0
| 2,048
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.