prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
# coding=utf-8
"""Definitions for basic report.
"""
from __future__ import absolute_import
from safe.utilities.i18n import tr
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
# Meta description about component
# component generation type
jinja2_component_type = {
'key': 'jinja2_component_type',
'name': 'Jinja2',
'description': tr('A component that is generated using Jinja2 API.')
}
qgis_composer_component_type = {
'key': 'qgis_composer_component_type',
'name': 'QGISComposer',
'description': tr('A component that is generated using QGISComposer API.')
}
qt_renderer_component_type = {
'key': 'qt_renderer_component_type',
'name': 'QtRenderer',
'description': tr('A component that is generated using QtRenderer API.')
}
available_component_type = [
jinja2_component_type,
qgis_composer_component_type,
qt_renderer_component_type
]
# Tags
# Tags is a way to categorize different component quickly for easy
# retrieval
final_product_tag = {
'key': 'final_prod | uct_tag',
'name': tr('Fina | l Product'),
'description': tr(
'Tag this component as a Final Product of report generation.')
}
infographic_product_tag = {
'key': 'infographic_product_tag',
'name': tr('Infographic'),
'description': tr(
'Tag this component as an Infographic related product.')
}
map_product_tag = {
'key': 'map_product_tag',
'name': tr('Map'),
'description': tr(
'Tag this component as a product mainly to show map.')
}
table_product_tag = {
'key': 'table_product_tag',
'name': tr('Table'),
'description': tr(
'Tag this component as a product mainly with table.')
}
template_product_tag = {
'key': 'template_product_tag',
'name': tr(
'Tag this component as a QGIS Template product.')
}
product_type_tag = [
table_product_tag,
map_product_tag,
template_product_tag,
infographic_product_tag
]
html_product_tag = {
'key': 'html_product_tag',
'name': tr('HTML'),
'description': tr('Tag this product as HTML output.')
}
pdf_product_tag = {
'key': 'pdf_product_tag',
'name': tr('PDF'),
'description': tr('Tag this product as PDF output.')
}
qpt_product_tag = {
'key': 'qpt_product_tag',
'name': tr('QPT'),
'description': tr('Tag this product as QPT output.')
}
png_product_tag = {
'key': 'png_product_tag',
'name': tr('PNG'),
'description': tr('Tag this product as PNG output.')
}
svg_product_tag = {
'key': 'svg_product_tag',
'name': tr('SVG'),
'description': tr('Tag this product as SVG output.')
}
product_output_type_tag = [
html_product_tag,
pdf_product_tag,
qpt_product_tag,
png_product_tag,
]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
if (sys.version_info > (3,)):
import http.client
from http.client import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
else:
import httplib
from httplib import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
from flask import session, request, make_response
from flask_restful import Resource
from cairis.data.ConceptReferenceDAO import ConceptReferenceDAO
from cairis.t | ools.JsonConverter import json_serialize
from cairis.tools.MessageDefinitions import ConceptReferenceMessage
from cairis.tools.ModelDefinitions import ConceptReferenceModel
from cairis.tools.SessionValidator import get_session_id
__author__ = 'Shamal Faily'
class ConceptReferencesAPI(Resource):
def get(self):
session_id = get_session_id(session, request)
constraint_id = request.args.get('constraint_id', -1)
| dao = ConceptReferenceDAO(session_id)
crs = dao.get_concept_references(constraint_id=constraint_id)
dao.close()
resp = make_response(json_serialize(crs, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
def post(self):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
new_cr = dao.from_json(request)
dao.add_concept_reference(new_cr)
dao.close()
resp_dict = {'message': 'Concept Reference successfully added'}
resp = make_response(json_serialize(resp_dict, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class ConceptReferenceByNameAPI(Resource):
def get(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
found_cr = dao.get_concept_reference(name)
dao.close()
resp = make_response(json_serialize(found_cr, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
def put(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
upd_cr = dao.from_json(request)
dao.update_concept_reference(upd_cr, name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully updated'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
def delete(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
dao.delete_concept_reference(name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully deleted'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
|
import json
import os
import shutil
import zipfile
from build import cd
def create_template(name, path, **kw):
os.makedirs(os.path.join(path, 'module'))
with open(os.path.join(path, 'module', 'manifest.json'), 'w') as manifest_file:
manifest = {
"name": name,
"version": "0.1",
"description": "My module template"
}
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'platform_version.txt'))) as platform_version_file:
manifest['platform_version'] = platform_version_file.read()
json.dump(manifest, manifest_file, indent=4, sort_keys=True)
# Copy template module
template_path = os.path.abspath(os.path.join(os.path.split(__file__)[0], 'templatemodule'))
for root, dirnames, filenames in os.walk(template_path):
for filename in filenames:
relative_path = os.path.join(root, filename)[len(template_path)+1:]
with open(os.path.join(root, filename), 'r') as source:
lines = source.readlines()
new_dir = os.path.split(os.path.join(path, 'module', relative_path.replace('templatemodule', name)))[0]
if not os.path.isdir(new_dir):
os.makedirs(new_dir)
with open(os.path.join(path, 'module', relative_path.replace('templatemodule', name)), 'w') as output:
for line in lines:
output.write(line.replace('templatemodule', name))
return load(path, manifest)
def load(path, manifest, **kw):
module_model = {}
module_model['local_path'] = path
module_model['module_dynamic_path'] = os.path.join(path, ".trigger", "module_dynamic")
module_model['files'] = {
'manifest': os.path.join(path, 'module', 'manifest.json'),
'module_structure': os.path.join(path, ".trigger", "schema", "module_structure.json")
}
module_model['rawfiles'] = {
'dynamic_platform_version': os.path.join(path, ".trigger", "platform_version.txt")
}
module_model['directories'] = {
'module_directory': os.path.join(path, 'module')
}
return module_model
def create_upload_zip(path, subdirs = [], **kw):
module_path = os.path.abspath(os.path.join(path, 'module'))
zip_base = os.path.abspath(os.path.join(path, '.trigger', 'upload_tmp'))
if os.path.exists(zip_base+".zip"):
os.unlink(zip_base+".zip")
if len(subdirs):
zip_path = _make_partial_archive(zip_base, subdirs, root_dir=modu | le_path)
else:
zip_path = shutil.make_archive(zip_base, 'zip', root_dir=modul | e_path)
return zip_path
def _make_partial_archive(zip_base, subdirs, root_dir):
zip = zipfile.ZipFile(zip_base + ".zip", "w")
with cd(root_dir):
for subdir in subdirs:
if not os.path.exists(subdir):
continue
for root, dirs, files in os.walk(subdir):
for file in files:
zip.write(os.path.join(root, file))
zip.close()
return zip_base + ".zip"
|
"""
WSGI config for server_admin project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later dele | gates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os, sys
sys.path.append('/home/terrywong/server_admin')
sys.path.append('/home/terrywong/server_admin/server_admin')
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in it | s own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "server_admin.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server_admin.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
ent_id = raven.captureException(request=request)
context = {
'detail': 'Internal Error',
'errorId': event_id,
}
response = Response(context, status=500)
response.exception = True
return response
def create_audit_entry(self, request, transaction_id=None, **kwargs):
return create_audit_entry(request, transaction_id, audit_logger, **kwargs)
def initialize_request(self, request, *args, **kwargs):
rv = super(Endpoint, self).initialize_request(request, *args, **kwargs)
# If our request is being made via our internal API client, we need to
# stitch back on auth and user information
if getattr(request, '__from_api_client__', False):
if rv.auth is None:
rv.auth = getattr(request, 'auth', None)
if rv.user is None:
rv.user = getattr(request, 'user', None)
return rv
@csrf_exempt
def dispatch(self, request, *args, **kwargs):
"""
Identical to rest framework's dispatch except we add the ability
to convert arguments (for common URL params).
"""
self.args = args
self.kwargs = kwargs
request = self.initialize_request(request, *args, **kwargs)
self.request = request
self.headers = self.default_response_headers # deprecate?
if settings.SENTRY_API_RESPONSE_DELAY:
time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0)
origin = request.META.get('HTTP_ORIGIN', 'null')
# A "null" value should be treated as no Origin for us.
# See RFC6454 for more information on this behavior.
if origin == 'null':
origin = None
try:
if origin and request.auth:
allowed_origins = request.auth.get_allowed_origins()
if not is_valid_origin(origin, allowed=allowed_origins):
response = Response('Invalid origin: %s' %
(origin, ), status=400)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
self.initial(request, *args, **kwargs)
if getattr(request, 'user', None) and request.user.is_authenticated():
raven.user_context({
'id': request.user.id,
'username': request.user.username,
'email': request.user.email,
})
# Get the appropriate handler method
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
(args, kwargs) = self.convert_args(request, *args, **kwargs)
self.args = args
self.kwargs = kwargs
else:
handler = self.http_method_not_allowed
if getattr(request, 'access', None) is None:
# setup default access
request.access = access.from_request(request)
response = handler(request, *args, **kwargs)
except Exception as exc:
response = self.handle_exception(request, exc)
if origin:
self.add_cors_headers(request, response)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
def add_cors_headers(self, request, response):
response['Access-Control-Allow-Origin'] = request.META['HTTP_ORIGIN']
response['Access-Control-Allow-Methods'] = ', '.join(
self.http_method_names)
def add_cursor_headers(self, request, response, cursor_result):
if cursor_result.hits is not None:
response['X-Hits'] = cursor_result.hits
if cursor_result.max_hits is not None:
response['X-Max-Hits'] = cursor_result.max_hits
response['Link'] = ', '.join(
[
self.build_cursor_link(
request, 'previous', cursor_result.prev),
self.build_cursor_link(request, 'next', cursor_result.next),
]
)
def respond(self, context=None, **kwargs):
return Response(context, **kwargs)
def paginate(
self, request, on_results=None, paginator=None,
paginator_cls=Paginator, default_per_page=100, **paginator_kwargs
):
assert (paginator and not paginator_kwargs) or (paginator_cls and paginator_kwargs)
per_page = int(request.GET.get('per_page', default_per_page))
input_cursor = request.GET.get('cursor')
if input_cursor:
input_cursor = Cursor.from_string(input_cursor)
else:
input_cursor = None
assert per_page <= max(100, default_per_page)
if not paginator:
paginator = paginator_cls(**paginator_kwargs)
cursor_result = paginator.get_result(
limit=per_page,
cursor=input_cursor,
)
# map results based on callback
if on_results:
results = on_results(cursor_result.results)
response = Response(results)
self.add_cursor_headers(request, response, cursor_result)
return response
class EnvironmentMixin(object):
def _get_environment_func(self, request, organization_id):
"""\
Creates a function that when called returns the ``Environment``
associated with a request object, or ``None`` if no environment was
provided. If the environment doesn't exist, an ``Environment.DoesNotExist``
exception will be raised.
This returns as a callable since some objects outside of the API
endpoint need to handle the "environment was provided but does not
exist" state in addition to the two non-exceptional states (the
environment was provided and exists, or the environment was not
provided.)
"""
return functools.partial(
self._get_environment_from_request,
request,
organization_id,
)
def _get_environment_id_from_request(self, request, organization_id):
environment = self._get_environment_from_request(request, organization_id)
return environment and environment.id
def _get_environment_from_request(self, request, organization_id):
if not hasattr(request, '_cached_environment'):
environment_param = request.GET.get('environment')
if environment_param is None:
environment = None
else:
environment = Environment.get_for_organization_id(
| name=environment_param,
organization_id=organization_id,
)
request._cached_environment = environment
re | turn request._cached_environment
class StatsMixin(object):
def _parse_args(self, request, environment_id=None):
resolution = request.GET.get('resolution')
if resolution:
resolution = self._parse_resolution(resolution)
assert resolution in tsdb.get_rollups()
end = request.GET.get('until')
if end:
end = to_datetime(float(end))
else:
end = datetime.utcnow().replace(tzinfo=utc)
start = request.GET.get('since')
if start:
start = to_datetime(float(start))
assert start <= end, 'start must be before or equal to end'
else:
start = end - timedelta(days=1, seconds=-1)
return {
'start': start,
'end': end,
'rollup': resolution,
'environment_id': environment_id,
}
def _parse_resolution(self, value):
if value.endswith('h'):
return int(value[:-1]) * ONE_HOUR
elif value.endswith('d'):
return int(value[:-1]) * ONE_DAY
elif value.endswith('m'):
return int(value[:-1]) * ONE_MINUTE
elif value.endswith('s'): |
import json
from PIL import Image
import collections
with open('../config/nodes.json') as data_file:
nodes = json.load(data_file)
# empty fucker
ordered_nodes = [None] * len(nodes)
# populate fucker
for i, pos in nodes.items():
ordered_nodes[int(i)] = [pos['x'], pos['y']]
filename = "04_rgb_vertical_lines"
im = Image.open("../gif_generators/output/"+filename+".gif") #Can be many different formats.
target_size = 400, 400
resize = False
if target_size != im.size:
resize = True
data = []
# To iterate through the entire gif
try:
frame_num = 0
while True:
im.seek(frame_num | )
frame_data = []
# do something to im
img = im.convert('RGB')
if resize == True:
print "Resizing"
img.thumbnail(target_size, Image.ANTIALIAS)
for x, y in ordered_nodes:
frame_data.append(img.getpixel((x, y)))
#print r, g, b
data.append(frame_data)
# write to json
pr | int frame_num
frame_num+=1
except EOFError:
pass # end of sequence
#print data
#print r, g, b
with open(filename+'.json', 'w') as outfile:
json.dump({
"meta": {},
"data": data
}, outfile)
print im.size #Get the width and hight of the image for iterating over
#print pix[,y] #Get the RGBA Value of the a pixel of an image
|
import os.path
from crumbs.utils.bin_utils import create_get_binary_path
from | bam_crumbs.settings import get_se | tting
BIN_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', 'bin'))
get_binary_path = create_get_binary_path(os.path.split(__file__)[0],
get_setting)
|
from django.contrib import admin
from .models impo | rt Line
# Register your models here.
admin.sit | e.register(Line)
|
"""
Runs peaktrough.py, which generates Cooley-Rupert figures for specified
series from FRED.
Execute peaktrough.py first, then run this program.
Written by Dave Backus under the watchful eye of Chase Coleman and Spencer Lyon
Date: July 10, 2014
"""
# import functions from peaktrough.py. * means all of them
# generates the msg "UMD has deleted: peaktrough" which means it reloads
from peaktrough import *
# do plots one at a time
manhandle_freddata("GD | PC1", saveshow="show")
print("aaaa")
# do plots all at once with map
fred_series = ["GDPC1", "PCECC96", "GPDIC96", "OPHNFB"]
# uses default saveshow parameter
g | dpc1, pcecc96, gpdic96, ophnfb = map(manhandle_freddata, fred_series)
print("xxxx")
# lets us change saveshow parameter
gdpc1, pcecc96, gpdic96, ophnfb = map(lambda s:
manhandle_freddata(s, saveshow="save"), fred_series)
print("yyyy")
# skip lhs (this doesn't seem to work, not sure why)
map(lambda s:
manhandle_freddata(s, saveshow="show"), fred_series)
print("zzzz")
|
# -*- | coding: utf-8 | -*-
from django.apps import AppConfig
class PostsConfig(AppConfig):
name = 'posts'
verbose_name = '图片列表'
|
#!/usr/bin/env python
#
# PyGab - Python Jabber Framework
# Copyright (c) 2008, Patrick Kennedy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of condition | s and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
| # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from common import utils
from core.mounts import *
try:
exec(utils.get_import(
mod=utils.get_module(), from_=['mounts'], import_=['*']))
except ImportError, e:
# If the bot module doesn't overwrite anything, no problem.
pass
#raise
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.api.app_identity.app_identity import get_default_gcs_bucket_name
from google.appengine.ext.blobstore import blobstore
from blob_app import blob_facade
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from tekton import router
from routes.updown import upload, download
from tekton.gae.middleware.redirect impor | t RedirectResponse
@no_csrf
def index(_logged_user):
success_url = router.to_path(upload)
bucket = | get_default_gcs_bucket_name()
url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket)
cmd = blob_facade.list_blob_files_cmd(_logged_user)
blob_files = cmd()
delete_path = router.to_path(delete)
download_path = router.to_path(download)
blob_file_form = blob_facade.blob_file_form()
def localize_blob_file(blob_file):
blob_file_dct = blob_file_form.fill_with_model(blob_file, 64)
blob_file_dct['delete_path'] = router.to_path(delete_path, blob_file_dct['id'])
blob_file_dct['download_path'] = router.to_path(download_path,
blob_file.blob_key,
blob_file_dct['filename'])
return blob_file_dct
localized_blob_files = [localize_blob_file(blob_file) for blob_file in blob_files]
context = {'upload_url': url,
'blob_files': localized_blob_files}
return TemplateResponse(context, 'updown/home.html')
def delete(blob_file_id):
blob_facade.delete_blob_file_cmd(blob_file_id).execute()
return RedirectResponse(router.to_path(index)) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests that all containers are imported correctly."""
import unittest
from tests import test_lib
class ContainersImportTest(test_lib.ImportCheckTestCase):
"""Tests that container classes are imported correctly."""
_IGNORABLE_FILES = frozenset(['manager.py', 'interface.py'])
def testContainersImported | (self):
"""Tests that all parsers are imported."""
| self._AssertFilesImportedInInit(
test_lib.CONTAINERS_PATH, self._IGNORABLE_FILES)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
import unittest
from gppylib.operations.test.regress.test_package import GppkgTestCase, GppkgSpec, BuildGppkg, RPMSpec, BuildRPM, run_command, run_remote_command
class SimpleGppkgTestCase(GppkgTestCase):
"""Covers simple build/install/remove/update test cases"""
def test00_simple_build(self):
self.build(self.alpha_spec, self.A_spec)
def test01_simple_install(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
#Check RPM database
self.check_rpm_install(self.A_spec.get_package_name())
def test02_simple_update(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
update_rpm_spec = RPMSpec("A", "1", "2")
| update_gppkg_spec = GppkgSpec("al | pha", "1.1")
update_gppkg_file = self.build(update_gppkg_spec, update_rpm_spec)
self.update(update_gppkg_file)
#Check for the packages
self.check_rpm_install(update_rpm_spec.get_package_name())
def test03_simple_uninstall(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
self.remove(gppkg_file)
results = run_command("gppkg -q --all")
results = results.split('\n')[self.start_output:self.end_output]
self.assertEqual(results, [])
def test04_help(self):
help_options = ["--help", "-h", "-?"]
for opt in help_options:
results = run_command("gppkg " + opt)
self.assertNotEqual(results, "")
def test05_version(self):
results = run_command("gppkg --version")
self.assertNotEqual(results, "")
if __name__ == "__main__":
unittest.main()
|
"""
Test basic std::vector functionality but with a declaration from
the debug info (the Foo struct) as content.
"""
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestDbgInfoContentVector(TestBase):
mydir = TestBase.compute_mydir(__file__)
# FIXME: This should work on more setups, so remove these
# skipIf's in the future.
@add_test_categories(["libc++"])
@skipIf(compiler=no_match("clang"))
@skipIf(oslist=no_match(["linux"]))
@skipIf(debug_info=no_match(["dwarf"]))
def test(self):
self.build()
lldbutil.run_to_source_breakpoint(self,
"// Set break point at this line.", lldb.SBFileSpec("main.cpp"))
self.runCmd("settings set target.import-std-module true")
self.expect("expr (size_t)a.size()", substrs=['(size_t) $0 = 3'])
self.expect("expr (int)a.front().a", substrs=['(int) $1 = 3'])
self.expect("expr (int)a[1].a", substrs=['(int) $2 = 1'])
self.expect("expr (int)a.back().a", substrs=['(int) $3 = 2'])
self.expect("expr std::reverse(a.begin(), a.end())")
self.expect("expr (int)a.front().a", substrs=['(int) $4 = 2'])
self.expect("expr (int)(a.begin()->a)", substrs=[' | (int) $5 = 2'])
self.expect("expr (int)(a.rbegin()->a)", substrs=['(int) $6 = 3'])
self.expect("expr a.pop_back()")
self.expect("expr (int)a.back().a", substrs=['(int) $7 = 1'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $8 = 2'])
self.expect("expr (int)a.at(0).a", substrs=['(int) $9 = 2'])
self.expect("expr a.push | _back({4})")
self.expect("expr (int)a.back().a", substrs=['(int) $10 = 4'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $11 = 3'])
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import inselect
REQUIREMENTS = [
# TODO How to specify OpenCV? 'cv2>=3.1.0',
'numpy>=1.11.1,<1.12',
'Pillow>=3.4.2,<3.5',
'python-dateutil>=2.6.0,<2.7',
'pytz>=2016.7',
'PyYAML>=3.12,<3.2',
'schematics>=1.1.1,<1.2',
'scikit-learn>=0.18.1,<0.19',
'scipy>=0.18.1,<0.19',
'unicodecsv>=0.14.1,<0.15',
]
SCRIPTS = ('export_metadata', 'ingest', 'read_barcodes', 'save_crops', 'segment')
setup_data = {
'name': 'inselect',
'version': inselect.__version__,
'author': (u'Lawrence Hudson, Alice Heaton, Pieter Holtzhausen, '
u'Stéfan van der Walt'),
'author_email': 'l.hudson@nhm.ac.uk',
'maintainer': 'Lawrence Hudson',
'maintainer_email': 'l.hudson@nhm.ac.uk',
'url': 'https://github.com/NaturalHistoryMuseum/inselect/',
'license': 'Modified BSD',
'description': inselect.__doc__,
'long_description': inselect.__doc__,
'packages': [
'inselect', 'inselect.gui', 'inselect.gui.plugins',
'inselect.gui.views', 'inselect.gui.views.boxes', 'inselect.lib',
'inselect.lib.templates', 'inselect.scripts',
],
'include_package_data': True,
'test_suite': 'inselect.tests',
'scripts': ['inselect/scripts/{0}.py'.format(script) for script in SCRIPTS],
'install_requires': REQUIREMENTS,
'extras_require': {
'gui': [
'ExifRead>=2.1.2', 'humanize>=0.5.1', 'psutil>=5.0.0',
'PyQt5>=5.6.0'
],
'barcodes': ['gouda>=0.1.13', 'pylibdmtx>=0.1.6', 'pyzbar>=0.1.3'],
'windows': ['pywin32>=220'],
'development': ['coveralls>=1.1', 'mock>=2.0.0', 'nose>=1.3.7'],
},
'entry_points': {
'gui_scripts':
['inselect = inselect.gui.app:main'],
'console_scripts':
['{0} = inselect.scripts.{0}:main'.format(script) for script in SCRIPTS],
},
'classifiers': [
'Development Status :: 4 - Beta',
'Topic :: Utilities',
'Topic :: Scientific/Engineering :: Bio-Informatics'
'Programming Language :: Python :: 3.5',
],
}
def setuptools_setup():
"""setuptools setup"""
from setuptools import setup
setup(**setup_data)
def _qt_files(site_packages):
"""Returns a list of tuples (src, dest) of Qt dependencies to be installed.
Elements are instances of Path.
site_packages should be an instance of Path to the site-packages directory.
IF we leave cx_Freeze to do its thing then the entirety of PyQt5, Qt5 and
uic are included in the installer. The only way to avoid horrible bloat is
to hand-tune which files we include.
This whole system is fucked beyond belief.
"""
from pathlib import Path
return [
# Qt DLLs
(
site_packages.joinpath('PyQt5/Qt/bin').joinpath(dep),
dep
)
for dep in ('Qt5Core.dll', 'Qt5Gui.dll', 'Qt5Widgets.dll')
] + [
# Qt plugins
(
site_packages.joinpath('PyQt5/Qt/plugins/platforms').joinpath(dep),
Path('platforms').joinpath(dep)
)
for dep in ('qwindows.dll',)
] + [
# PyQt extension modules
(
site_packages.joinpath('PyQt5').joinpath(dep),
Path('PyQt5').joinpath(dep)
)
for dep in ('__init__.py', 'Qt.pyd', 'QtCore.pyd', 'QtGui.pyd', 'QtWidgets.pyd')
]
def cx_setup():
"""cx_Freeze setup. Used for building Windows installers"""
import scipy
from pathlib import Path
from distutils.sysconfig import get_python_lib
from cx_Freeze import setup, Executable
from pylibdmtx import pylibdmtx
from pyzbar import pyzbar
# Useful paths
environment_root = Path(sys.executable).parent
site_packages = Path(get_python_lib())
project_root = Path(__file__).parent
# Files as tuples (source, dest)
include_files = [
# Evil, evil, evil
# cx_Freeze breaks pywintypes and pythoncom on Python 3.5
# https://bitbucket.org/anthony_tuininga/cx_freeze/issues/194/error-with-frozen-executable-using-35-and
(site_packages.joinpath('win32/lib/pywintypes.py'), 'pywintypes.py'),
(site_packages.joinpath('pythoncom.py'), 'pythoncom.py'),
# Binary dependencies that are not detected
(environment_root.joinpath('Library/bin/mkl_core.dll'), 'mkl_core.dll'),
(environment_root.joinpath('Library/bin/mkl_intel_thread.dll'), 'mkl_intel_thread.dll'),
(environment_root.joinpath('Library/bin/libiomp5md.dll'), 'libiomp5md.dll'),
# Stylesheet
(project_root.joinpath('inselect/gui/inselect.qss'), 'inselect.qss'),
] + [
# DLLs that are not detected because they are loaded by ctypes
(dep._name, Path(dep._name).name)
for dep in pylibdmtx.EXTERNAL_DEPENDENCIES + pyzbar.EXTERNAL_DEPENDENCIES
] + _qt_files(site_packages)
# Convert instances of Path to strs
include_files = [(str(source), str(dest)) for source, dest in include_files]
# Directories as strings
include_files += [
# Fixes scipy freeze
# http://stackoverflow.com/a/32822431/1773758
str(Path(scipy.__file__).parent),
]
# Packages to exclude.
exclude_packages = [
str(p.relative_to(site_packages)).replace('\\', '.') for p in
site_packages.rglob('*/tests')
]
setup(
name=setup_data['name'],
version=setup_data['version'],
options={
'build_exe': {
'packages':
setup_data.get('packages', []) + [
'urllib', 'sklearn.neighbors', 'win32com.gen_py',
'win32timezone',
],
'excludes': [
# '_bz2', # Required by sklearn
'_decimal', '_elementtree', '_hashlib', '_lzma',
'_ssl', 'curses',
'distutils', 'email', 'http', 'lib2to3', 'mock', 'nose',
'PyQt5',
# 'pydoc', # Required by sklearn
'tcl', 'Tkinter', 'ttk', 'Tkconstants',
# 'unittest', # Required by numpy.core.multiarray
'win32com.HTML', 'win32com.test', 'win32evtlog', 'win32pdh',
'win32trace', 'win32ui', 'win32wnet',
| 'xml', 'xmlrpc',
'inselect.tests',
] + ex | clude_packages,
'includes': [
],
'include_files': include_files,
'include_msvcr': True,
'optimize': 2,
},
'bdist_msi': {
'upgrade_code': '{fe2ed61d-cd5e-45bb-9d16-146f725e522f}'
}
},
executables=[
Executable(
script='inselect/scripts/inselect.py',
targetName='inselect.exe',
icon='icons/inselect.ico',
base='Win32GUI',
shortcutName='Inselect', # See http://stackoverflow.com/a/15736406
shortcutDir='ProgramMenuFolder'
)
] + [
Executable(
script='inselect/scripts/{0}.py'.format(script),
targetName='{0}.exe'.format(script),
icon='icons/inselect.ico',
base='Console'
)
for script in SCRIPTS
],
)
if (3, 5) <= sys.version_info:
if 'bdist_msi' in sys.argv:
cx_setup()
else:
setuptools_setup()
else:
sys.exit('Only Python >= 3.5 is supported')
|
e(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz'])
self.assertEqual(Baz._val, False)
def test_bool_true(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz', '--bool'])
self.assertEqual(Baz._val, True)
def test_forgot_param(self):
self.assertRaises(luigi.parameter.MissingParameterException, luigi.run, ['--local-scheduler', '--no-lock', 'ForgotParam'],)
@email_patch
def test_forgot_param_in_dep(self, emails):
# A programmatic missing parameter will cause an error email to be sent
luigi.run(['--local-scheduler', '--no-lock', 'ForgotParamDep'])
self.assertNotEquals(emails, [])
def test_default_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'WithDefault'])
self.assertEqual(WithDefault().x, 'xyz')
def test_global_param_defaults(self):
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParam', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline_flipped(self):
luigi.run(['--local-scheduler', '--no-lock', '--global-param', '125', 'HasGlobalParam', '--x', 'xyz'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 125)
self.assertEqual(h.global_bool_param, False)
def test_global_param_override(self):
h1 = HasGlobalParam(x='xyz', global_param=124)
h2 = HasGlobalParam(x='xyz')
self.assertEquals(h1.global_param, 124)
self.assertEquals(h2.global_param, 123)
def test_global_param_dep_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_optparse(self):
luigi.run(['--local-scheduler', '--no-lock', '--task', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'], use_optparse=True)
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_bool(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-bool-param'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_bool_param, True)
def test_global_param_shared(self):
luigi.run(['--local-scheduler', '--no-lock', 'SharedGlobalParamA', '--shared-global-param', 'abc'])
b = SharedGlobalParamB()
self.assertEqual(b.shared_global_param, 'abc')
def test_insignificant_parameter(self):
class InsignificantParameterTask(luigi.Task):
foo = luigi.Parameter(significant=False, default='foo_default')
bar = luigi.Parameter()
t1 = InsignificantParameterTask(foo='x', bar='y')
self.assertEqual(t1.task_id, 'InsignificantParameterTask(bar=y)')
t2 = InsignificantParameterTask('u', 'z')
self.assertEqual(t2.foo, 'u')
self.assertEqual(t2.bar, 'z')
self.assertEqual(t2.task_id, 'InsignificantParameterTask(bar=z)')
def test_local_significant_param(self):
""" Obviously, if anything should be positional, so should local
significant parameters """
class MyTask(luigi.Task):
# This could typically be "--label-company=disney"
x = luigi.Parameter(significant=True)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
def test_local_insignificant_param(self):
""" Ensure we have the same behavior as in before a78338c """
class MyTask(luigi.Task):
# This could typically be "--num-threads=True"
x = luigi.Parameter(significant=False)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
class TestNewStyleGlobalParameters(unittest.TestCase):
def setUp(self):
super(TestNewStyleGlobalParameters, self).setUp()
MockTarget.fs.clear()
BananaDep.y.reset_global()
def expect_keys(self, expected):
self.assertEquals(set(MockTarget.fs.get_all_data().keys()), set(expected))
def test_x_arg(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-xyz'])
def test_x_arg_override_stupid(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_y_arg(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override_all(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y', 'xyz', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_y_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_both(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz', '--BananaDep-y', 'blah'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_banana(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz', '--Banana-x', 'baz'])
self.expect_keys(['banana-baz-bar', 'banana-dep-xyz-bar'])
class TestRemoveGlobalParameters(unittest.TestCase):
def setUp(self):
super(TestRemoveGlobalParameters, self).setUp()
MyConfig.mc_p.reset_global()
MyConfig.mc_q.reset_global()
MyConfigWithoutSection.mc_r.reset_global()
MyConfigWithoutSection.mc_s.reset_global()
def run_and_check(self, args):
run_exit_status = luigi.run(['--local-scheduler', '--no-lock'] + args)
self.assertTrue(run_exit_status)
return run_exit_status
def test_use_config_class_1(self):
self.run_and_check(['--MyConfig-mc-p', '99', '--mc-r', '55', 'NoopTask'])
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
def test_use_config_class_2(self):
self.run_and_check(['NoopTask', '--MyConfig-mc-p', '99', '--mc-r', '55'])
sel | f.assertEqual(MyConfig().mc_p, 99)
self.a | ssertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
def test_use_config_class_more_args(self):
self.run_and_check(['--MyConfig-mc-p', '99', '--mc |
# -*- python -*-
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011-2012 Serge Noiraud
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import os
from gi.repository import GObject
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
_LOG = logging.getLogger("maps.dummylayer")
#-------------------------------------------------------------------------
#
# Gramps Modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# osmGpsMap
#
#-------------------------------------------------------------------------
try:
from gi.repository import OsmGpsMap as osmgpsmap
except:
raise
class DummyLayer(GObject.GObject, osmgpsmap.MapLayer):
def __init__(self):
"""
| Initialize the dummy layer
"""
GObject.GObject.__init__(self)
def do_draw(self, gpsmap, gdkdrawable):
"""
Draw the layer
"""
pass
def do_render(self, gpsmap):
"""
Render the layer
"""
pass
def do_busy(self):
"""
The layer | is busy
"""
return False
def do_button_press(self, gpsmap, gdkeventbutton):
"""
Someone press a button
"""
return False
GObject.type_register(DummyLayer)
|
epare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
for (k, v) in type_.__dict__.items():
# All __promise__ return the same wrapper method, but they
# also do setup, inserting the method into the dispatch
# dict.
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_str = str in resultclasses
cls._delegate_unicode = unicode in resultclasses
assert not (cls._delegate_str and cls._delegate_unicode), "Cannot call lazy() with both str and unicode return types."
if cls._delegate_unicode:
cls.__unicode__ = cls.__unicode_cast
elif cls._delegate_str:
cls.__str__ = cls.__str_cast
__prepare_class__ = classmethod(__prepare_class__)
def __promise__(cls, klass, funcname, method):
# Builds a wrapper around some magic method and registers that magic
# method for the given type and method name.
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
for t in type(res).mro():
if t in self.__dispatch:
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
return __wrapper__
__promise__ = classmethod(__promise__)
def __unicode_cast(self):
return func(*self.__args, **self.__kw)
def __str_cast(self):
return str(func(*self.__args, **self.__kw))
def __cmp__(self, rhs):
if self._delegate_str:
s = str(func(*self.__args, **self.__kw))
elif self._delegate_unicode:
s = unicode(func(*self.__args, **self.__kw))
else:
s = func(*self.__args, **self.__kw)
if isinstance(rhs, Promise):
return -cmp(rhs, s)
else:
return cmp(s, rhs)
def __mod__(self, rhs):
if self._delegate_str:
return str(self) % rhs
elif self._delegate_unicode:
return unicode(self) % rhs
else:
raise AssertionError('__mod__ not supported for non-string types')
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def allow_lazy(func, *resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
@wraps(func)
def wrapper(*args, **kwargs):
for arg in list(args) + kwargs.values():
if isinstance(arg, Promise):
break
else:
return func(*args, **kwargs)
return lazy(func, *resultclasses)(*args, **kwargs)
return wrapper
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
def __init__(self):
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialise the wrapped object.
"""
raise NotImplementedError
# introspection support:
__members__ = property(lambda self: self.__dir__())
__dir__ = new_method_proxy(dir)
class SimpleLazyObject(LazyObject):
"""
A lazy object initialised from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__['_setupfunc'] = func
super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = self._setupfunc()
__str__ = new_method_proxy(str)
__unicode__ = new_method_proxy(unicode)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. It also appears to sto | p __reduce__ from being
# called. So, we define __getstate__ in a way that cooperates with the way
# that pickle interprets this class. This fails when the wrapped class is a
# builtin, but it is better than nothing.
def __getstate__(self):
| if self._wrapped is empty:
self._setup()
return self._wrapped.__dict__
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__hash__ = new_method_proxy(hash)
__nonzero__ = new_method_proxy(bool)
class lazy_property(property):
"""
A property that works with subclasses by wrapping the decorated
functions of the base class.
"""
def __new__(cls, fget=None, fset=None, fdel=None, doc=None):
if fget is not None:
@wraps(fget)
def fget(instance, instance_type=None, name=fget.__name__):
return getattr(instance, name)()
if fset is not None:
@wraps(fset)
def fset(instance, value, name=fset.__name__):
return getattr(instance, name)(value)
if fdel is not None:
@wraps(fdel)
def fdel(instance, name=fdel.__name__):
|
invalid' : recID is an error code, i.e. in the interval [-99,-1]
@param return: body of the page
"""
_ = gettext_set_language(ln)
if status == 'inexistant':
body = _("Sorry, the record %s does not seem to exist.") % (recID,)
elif status in ('nan', 'invalid'):
body = _("Sorry, %s is not a valid ID value.") % (recID,)
else:
body = _("Sorry, no record ID was provided.")
body += "<br /><br />"
link = "<a href=\"%s?ln=%s\">%s</a>." % (CFG_SITE_URL, ln, CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME))
body += _("You may want to start browsing from %s") % link
return body
def tmpl_get_first_comments_with_ranking(self, recID, ln, comments=None, nb_comments_total=None, avg_score=None, warnings=[]):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param avg_score: average score of all reviews
@param warnings: list of warning tuples (warning_msg, arg1, arg2, ...)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_nb_votes_yes = 4
c_nb_votes_total = 5
c_star_score = 6
c_title = 7
c_id = 8
warnings = self.tmpl_warnings(warnings, ln)
#stars
if avg_score > 0:
avg_score_img = 'stars-' + str(avg_score).split('.')[0] + '-' + str(avg_score).split('.')[1] + '.png'
else:
avg_score_img = "stars-0-0.png"
# voting links
useful_dict = { 'siteurl' : CFG_SITE_URL,
'recID' : recID,
'ln' : ln,
'yes_img' : 'smchk_gr.gif', #'yes.gif',
'no_img' : 'iconcross.gif' #'no.gif'
}
link = '<a href="%(siteurl)s/record/%(recID)s/reviews/vote?ln=%(ln)s&comid=%%(comid)s' % useful_dict
useful_yes = link + '&com_value=1">' + _("Yes") + '</a>'
useful_no = link + '&com_value=-1">' + _("No") + '</a>'
#comment row
comment_rows = ' '
max_comment_round_name = comments[-1][0]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%i" class="cmtRound">' % (comment_round_name)
comment_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "<br/>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid, nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, display, ln)
comment_rows += '''
<tr>
<td>'''
report_link = '%s/record/%s/reviews/report?ln=%s&comid=%s' % (CFG_SITE_URL, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_with_ranking(None, ln=ln, nickname=messaging_link,
comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body],
status='', nb_reports=0,
nb_votes_total=comment[c_nb_votes_total],
nb_votes_yes=comment[c_nb_votes_yes],
star_score=comment[c_star_score],
title=comment[c_title], report_link=report_link, recID=recID)
comment_rows += '''
%s %s / %s<br />''' % (_("Was this review helpful?"), useful_yes % {'comid':comment[c_id]}, useful_no % {'comid':comment[c_id]})
comment_rows += '''
<br />
</td>
</tr>'''
# Close comment round
comment_rows += '</div>'
# write button
write_button_link = '''%s/record/%s/reviews/add''' % (CFG_SITE_URL, recID)
write_button_form = ' <input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=_("Write a review"))
if nb_comments_total > 0:
avg_score_img = str(avg_score_img)
avg_score = str(avg_score)
nb_comments_total = str(nb_comments_total)
score = '<b>'
score += _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '</b><img src="' + CFG_SITE_URL + '/img/' + avg_score_img + '" alt="' + avg_score + '" />',
'x_nb_reviews': nb_comments_total}
useful_label = _("Readers found the following %s reviews to be most helpful.")
useful_label %= len(comments) > 1 and len(comments) or ""
view_all_comments_link ='<a href="%s/record/%s/reviews/display?ln=%s&do=hh">' % (CFG_SITE_URL, recID, ln)
view_all_comments_link += _("View all %s reviews") % nb_comments_total
view_all_comments_link += '</a><br />'
out = warnings + """
<!-- review title table -->
<table>
<tr>
<td class="blocknote">%(comment_title)s:</td>
</tr>
</table>
%(score_label)s<br />
%(useful_label)s
<!-- review table -->
<table style="border: 0px; border- | collapse: separate; border-spacing: 5px; padding: 5px; width: 100%%">
| %(comment_rows)s
</table>
%(view_all_comments_link)s
%(write_button_form)s<br />
""" % \
{ 'comment_title' : _("Rate this document"),
'score_label' : score,
'useful_label' : useful_label,
'recID' : recID,
'view_all_comments' : _("View all %s reviews") % (nb_comments_total,),
'write_comment' : _("Write a review"),
'comment_rows' : comment_rows,
'tab' : ' '*4,
'siteurl' : CFG_SITE_URL,
'view_all_comments_link': nb_comments_total>0 and view_all_comments_link or "",
'write_button_form' : write_button_form
}
else:
out = '''
<!-- review title table -->
<table>
<tr>
<td class="blocknote">%s:</td>
</tr>
</table>
%s<br />
%s
<br />''' % (_("Rate this document"),
_("Be the first to review this document."),
write_button_form)
return out
def tmpl_get_comment_without_ranking(self, req, ln, nickname, comment_uid, date_creation, body, status, nb_reports, reply_link=None, report_link=None, undelete_link=None, delete_links=None, unreport_link=None, recID=-1, com_id='', attached_files=None):
"""
private function
@param req: request object to fetch user info
@param ln: lang |
from sopel import module
from sopel.tools import Identifier
import time
import re
TIMEOUT = 36000
@module.rule('^(</?3)\s+([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})\s*$')
@module.intent('ACTION')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def heart_cmd(bot, trigger):
luv_h8(bot, trigger, trigger.group(2), 'h8' if '/' in trigger.group(1) else 'luv')
@module.rule('.*?(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2})).*?')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def karma_cmd(bot, trigger):
if re.match('^({prefix})({cmds})'.format(prefix=bot.config.core.prefix, cmds='|'.join(luv_h8_cmd.commands)),
trigger.group(0)):
return # avoid processing commands if people try to be tricky
for (nick, act) in re.findall('(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2}))', trigger.raw):
if luv_h8(bot, trigger, nick, 'luv' if act == '++' else 'h8', warn_nonexistent=False):
break
@module.commands('luv', 'h8')
@module.example(".luv Phixion")
@module.example(".h8 Thaya")
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def luv_h8_cmd(bot, trigger):
if not trigger.group(3):
bot.reply("No user specified.")
return
target = Identifier(trigger.group(3))
luv_h8(bot, trigger, target, trigger.group(1))
def luv_h8(bot, trigger, target, which, warn_nonexistent=True):
target = verified_nick(bot, target, trigger.sender)
which = which.lower() # issue #18
pfx = change = selfreply = None # keep PyCharm & other linters happy
if not target:
if warn_nonexistent:
bot.reply("You can only %s someone who is here." % which)
return False
if rep_too_soon(bot, trigger.nick):
return False
if which == 'luv':
selfreply = "No narcissism allowed!"
pfx, change = 'in', 1
if which == 'h8':
selfreply = "Go to 4chan if you really hate yourself!"
pfx, change = 'de', -1
if not (pfx and change and selfreply): # safeguard against leaving something in the above mass-None assignment
bot.say("Logic error! Please report this to %s." % bot.config.core.owner)
return
if is_self(bot, trigger.nick, target):
bot.reply(selfreply)
return False
rep = mod_rep(bot, trigger.nick, target, change)
bot.say("%s has %screased %s's reputation score to %d" % (trigger.nick, pfx, target, rep))
return True
@module.commands('rep')
@module.example(".rep Phixion")
def show_rep(bot, trigger):
target = trigger.group(3) or trigger.nick
rep = get_rep(bot, target)
if rep is None:
bot.say("%s has no reputation score yet." % target)
return
bot.say("%s's current reputation score is %d." % (target, rep))
# helpers
def get_rep(bot, target):
return bot.db.get_nick_value(Identifier(target), 'rep_score')
def set_rep(bot, caller, target, newrep):
bot.db.set_nick_value(Identifier(target), 'rep_score', newrep)
bot.db.set_nick_value(Identifier(caller), 'rep_used', time.time())
def mo | d_rep(bot, caller, target, change):
rep = get_rep(bot, target) or 0
rep += change
set_rep(bot, caller, target, rep)
return rep
def get_rep_used(bot, nick):
return bot.db.get_nick_value(Identifier(nick), 'rep_used') or 0
def set_rep_used(bo | t, nick):
bot.db.set_nick_value(Identifier(nick), 'rep_used', time.time())
def rep_used_since(bot, nick):
now = time.time()
last = get_rep_used(bot, nick)
return abs(last - now)
def rep_too_soon(bot, nick):
since = rep_used_since(bot, nick)
if since < TIMEOUT:
bot.notice("You must wait %d more seconds before changing someone's rep again." % (TIMEOUT - since), nick)
return True
else:
return False
def is_self(bot, nick, target):
nick = Identifier(nick)
target = Identifier(target)
if nick == target:
return True # shortcut to catch common goofballs
try:
nick_id = bot.db.get_nick_id(nick, False)
target_id = bot.db.get_nick_id(target, False)
except ValueError:
return False # if either nick doesn't have an ID, they can't be in a group
return nick_id == target_id
def verified_nick(bot, nick, channel):
nick = re.search('([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})', nick).group(1)
if not nick:
return None
nick = Identifier(nick)
if nick.lower() not in bot.privileges[channel.lower()]:
if nick.endswith('--'):
if Identifier(nick[:-2]).lower() in bot.privileges[channel.lower()]:
return Identifier(nick[:-2])
return None
return nick |
the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import utils
import os
from datetime import datetime
import new
from gettext import dgettext
from ufo.debugger import Debugger
from ufo.constants import ShareDoc
from ufo.database import *
from ufo.utils import get_user_infos
from ufo.user import user
class TranslatableText:
def __init__(self, text):
self.text = text
def __repr__(self):
return dgettext("python-ufo", self.text)
def _(message):
return TranslatableText(message)
class action:
def __init__(self, description):
self.description = description
def __call__(self, func):
func.action = True
func.description = self.description
return func
class NotificationDocument(Document, Debugger):
doctype = TextField(default="NotificationDocument")
subtype = TextField(default="")
date = DateTimeField(default=datetime.now)
initiator = TextField()
target = TextField()
by_id = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument') {" \
"emit(doc._id, doc);" \
"}" | \
"}")
by_subt | ype_and_initiator = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument' && doc.subtype && doc.initiator) {" \
"emit([doc.subtype, doc.initiator], doc);" \
"}" \
"}")
def __init__(self, *args, **fields):
super(NotificationDocument, self).__init__(*args, **fields)
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Dismiss"))
def dismiss(self):
user.dismiss(self)
def __getitem__(self, key):
try:
value = getattr(self, "pretty_" + key)
except:
try:
value = getattr(self, key)
except:
value = super(Document, self).__getitem__(key)
if isinstance(value, TranslatableText):
return repr(value)
else:
return value
@property
def fullname(self):
return get_user_infos(login=self.initiator)['fullname']
@property
def actions(self):
actions = {}
for k, v in self.__class__.__dict__.items():
if type(v) == new.function and getattr(v, "action", False):
actions[k] = repr(v.description)
return actions
@property
def default_action(self):
for action in self.actions.values():
if getattr(getattr(self, action), "default", False):
return action
return "dismiss"
class NewFriendshipNotification(NotificationDocument):
subtype = TextField(default="NewFriendship")
title = _('New friendship invitation')
body = _('You have been invited by %(fullname)s to be his/her friend.')
summary = _("%(fullname)s wants to be your friend")
def __init__(self, **fields):
super(NewFriendshipNotification, self).__init__()
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Accept"))
def accept_invitation(self):
self.debug("Accepting the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_friend(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.refuse_friend(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class FollowRequestNotification(NotificationDocument):
subtype = TextField(default="FollowRequest")
title = _('New file sharing request')
body = _('%(fullname)s would like to be in your followers list.')
summary = _("%(fullname)s wants to follow you")
@action(_("Accept"))
def accept_invitation(self):
self.debug("Accepting the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_following(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.refuse_following(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class AcceptedFriendshipNotification(NotificationDocument):
subtype = TextField(default="AcceptedFriendship")
title = _('Friendship invitation accepted')
body = _('%(fullname)s has accepted your friendship invitation, '
'you can now share some document with him/her.')
summary = _("%(fullname)s has accepted your invitation")
@action
def accept_friend(self):
self.debug("Proceed pending shares from '%s' to '%s'" % (self.initiator, self.target))
# user.accept_friend(self.initiator)
class CanceledFriendshipNotification(NotificationDocument):
subtype = TextField(default="CanceledFriendship")
title = _('A friendship has been canceled')
body = _('%(fullname)s has removed you from his friend list, '
'you can not access his files any more.')
summary = _("%(fullname)s has canceled his friendship with you")
class RefusedFriendshipNotification(NotificationDocument):
subtype = TextField(default="RefusedFriendship")
title = _('%(fullname)s has refused your friend request')
body = _('%(fullname)s would rather be stranger than friends.')
summary = _("%(fullname)s has refused your friend request")
class NewShareNotification(NotificationDocument):
subtype = TextField(default="NewShare")
files = ListField(TextField())
title = _('Someone has shared some files with you')
body = _('%(fullname)s has shared the following files with you : %(files)s')
summary = _("%(fullname)s has shared some files with you")
def __init__(self, **fields):
super(NewShareNotification, self).__init__(**fields)
if fields.get('files'):
self.files = fields['files']
class CanceledShareNotification(NotificationDocument):
subtype = TextField(default="CanceledShare")
files = ListField(TextField())
title = _('A share has been canceled')
body = _('%(fullname)s has canceled the share of \'%(file)s\', '
'you can\'t access the file any more.')
summary = _("%(fullname)s has canceled a share with you")
def __init__(self, **fields):
super(CanceledShareNotification, self).__init__()
if fields.get('files'):
self.files = fields['file |
isinstance(resource['Tags'], dict):
tags = resource['Tags']
else:
tags = {tag['Key']: tag['Value'] for tag in resource['Tags']}
targets = []
for target_tag_key in target_tag_keys:
if target_tag_key in tags:
targets.append(tags[target_tag_key])
return targets
def get_message_subject(sqs_message):
default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name'])
subject = sqs_message['action'].get('subject', default_subject)
jinja_template = jinja2.Template(subject)
subject = jinja_template.render(
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
region=sqs_message.get('region', '')
)
return subject
def setup_defaults(config):
config.setdefault('region', 'us-east-1')
config.setdefault('ses_region', config.get('region'))
config.setdefault('memory', 1024)
config.setdefault('runtime', 'python3.7')
config.setdefault('timeout', 300)
config.setdefault('subnets', None)
config.setdefault('security_groups', None)
config.setdefault('contact_tags', [])
config.setdefault('ldap_uri', None)
config.setdefault('ldap_bind_dn', None)
config.setdefault('ldap_bind_user', None)
config.setdefault('ldap_bind_password', None)
config.setdefault('endpoint_url', None)
config.setdefault('datadog_api_key', None)
config.setdefault('slack_token', None)
config.setdefault('slack_webhook', None)
def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'):
return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format)
def get_date_time_delta(delta):
return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta))
def get_date_age(date):
return (datetime.now(tz=tzutc()) - parser.parse(date)).days
def format_struct(evt):
return json.dumps(evt, indent=2, ensure_ascii=False)
def get_resource_tag_value(resource, k):
for t in resource.get('Tags', []):
if t['Key'] == k:
return t['Value']
return ''
def strip_prefix(value, prefix):
if value.startswith(prefix):
return value[len(prefix):]
return value
def resource_format(resource, resource_type):
if resource_type.startswith('aws.'):
resource_type = strip_prefix(resource_type, 'aws.')
if resource_type == 'ec2':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s %s %s %s" % (
resource['InstanceId'],
resource.get('VpcId', 'NO VPC!'),
resource['InstanceType'],
resource.get('LaunchTime'),
tag_map.get('Name', ''),
resource.get('PrivateIpAddress'))
elif resource_type == 'ami':
return "%s %s %s" % (
resource.get('Name'), resource['ImageId'], resource['CreationDate'])
elif resource_type == 'sagemaker-notebook':
return "%s" % (resource['NotebookInstanceName'])
elif resource_type == 's3':
return "%s" % (resource['Name'])
elif resource_type == 'ebs':
return "%s %s %s %s" % (
resource['VolumeId'],
resource['Size'],
resource['State'],
resource['CreateTime'] | )
elif resource_type == 'rds':
return "%s %s %s %s" % (
resource['DBInstanceIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
re | source['DBInstanceClass'],
resource['AllocatedStorage'])
elif resource_type == 'rds-cluster':
return "%s %s %s" % (
resource['DBClusterIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['AllocatedStorage'])
elif resource_type == 'asg':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s" % (
resource['AutoScalingGroupName'],
tag_map.get('Name', ''),
"instances: %d" % (len(resource.get('Instances', []))))
elif resource_type == 'elb':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
if 'ProhibitedPolicies' in resource:
return "%s %s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']),
"prohibited_policies: %s" % ','.join(
resource['ProhibitedPolicies']))
return "%s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']))
elif resource_type == 'redshift':
return "%s %s %s" % (
resource['ClusterIdentifier'],
'nodes:%d' % len(resource['ClusterNodes']),
'encrypted:%s' % resource['Encrypted'])
elif resource_type == 'emr':
return "%s status:%s" % (
resource['Id'],
resource['Status']['State'])
elif resource_type == 'cfn':
return "%s" % (
resource['StackName'])
elif resource_type == 'launch-config':
return "%s" % (
resource['LaunchConfigurationName'])
elif resource_type == 'security-group':
name = resource.get('GroupName', '')
for t in resource.get('Tags', ()):
if t['Key'] == 'Name':
name = t['Value']
return "%s %s %s inrules: %d outrules: %d" % (
name,
resource['GroupId'],
resource.get('VpcId', 'na'),
len(resource.get('IpPermissions', ())),
len(resource.get('IpPermissionsEgress', ())))
elif resource_type == 'log-group':
if 'lastWrite' in resource:
return "name: %s last_write: %s" % (
resource['logGroupName'],
resource['lastWrite'])
return "name: %s" % (resource['logGroupName'])
elif resource_type == 'cache-cluster':
return "name: %s created: %s status: %s" % (
resource['CacheClusterId'],
resource['CacheClusterCreateTime'],
resource['CacheClusterStatus'])
elif resource_type == 'cache-snapshot':
cid = resource.get('CacheClusterId')
if cid is None:
cid = ', '.join([
ns['CacheClusterId'] for ns in resource['NodeSnapshots']])
return "name: %s cluster: %s source: %s" % (
resource['SnapshotName'],
cid,
resource['SnapshotSource'])
elif resource_type == 'redshift-snapshot':
return "name: %s db: %s" % (
resource['SnapshotIdentifier'],
resource['DBName'])
elif resource_type == 'ebs-snapshot':
return "name: %s date: %s" % (
resource['SnapshotId'],
resource['StartTime'])
elif resource_type == 'subnet':
return "%s %s %s %s %s %s" % (
resource['SubnetId'],
resource['VpcId'],
resource['AvailabilityZone'],
resource['State'],
resource['CidrBlock'],
resource['AvailableIpAddressCount'])
elif resource_type == 'account':
return " %s %s" % (
resource['account_id'],
resource['account_name'])
elif resource_type == 'cloudtrail':
return "%s" % (
resource['Name'])
elif resource_type == 'vpc':
return "%s " % (
resource['VpcId'])
elif resource_type == 'iam-group':
return " %s %s %s" % (
resource['GroupName'],
resource['Arn'],
resource['CreateDate'])
elif resource_type == 'rds-snapshot':
return " %s %s %s" % (
resource['DBSnapshotIdentifier'],
resource['DBInstanceIdentifier'],
resource['SnapshotCreateTime'])
elif resource_type == 'iam-user':
return " %s " % (
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2019-11-24 03:04
from __future__ import unicode_literals
from django.db import migratio | ns, models
class Migration(migrations.Migration):
dependencies = [
('logger', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='logentry',
name='log_type',
field=models.IntegerField(choices=[(0, 'Debug'), (1, 'Info'), (2, 'Warning'), (3, 'Error'), (4, 'Critical')], default=1),
| ),
migrations.AlterField(
model_name='logentry',
name='message',
field=models.TextField(default=''),
),
]
|
#!/usr/bin/env python
############################################################################
#
# Copyright (C) 2012, 2013 PX4 Development Team. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name PX4 nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
#
# PX4 firmware image generator
#
# The PX4 firmware file is a JSON-encoded Python object, containing
# metadata fields and a zlib-compressed base64-encoded firmware image.
#
import sys
import argparse
import json
import base64
import zlib
import time
import subprocess
#
# Construct a basic firmware description
#
def mkdesc():
proto = {}
proto['magic'] = "PX4FWv1"
proto['board_id'] = 0
proto['board_revision'] = 0
proto['version'] = ""
proto['summary'] = ""
proto['description'] = ""
proto['git_identity'] = ""
proto['build_time'] = 0
proto['image'] = bytes()
proto['image_size'] = 0
return proto
# Parse commandline
parser = argparse.ArgumentParser(description="Firmware generator for the PX autopilot system.")
parser.add_argument("--prototype", action="store", help="read a prototype description from a file")
parser.add_argument("--board_id", action="store", help="set the board ID required")
parser.add_argument("--board_revision", action="store", help="set the board revision required")
parser.add_argument("--version", action="store", help="set a version string")
parser.add_argument("--summary", action="store", help="set a brief description")
parser.add_argument("--description", action="store", help="set a longer description")
parser.add_argument("--git_identity", action="store", help="the working directory to check for git identity")
parser.add_argument("--parameter_xml", action="store", help="the parameters.xml file")
parser.add_argument("--airframe_xml", action="store", help="the airframes.xml file")
parser.add_argument("--image", action="store", help="the firmware image")
args = parser.parse_args()
# Fetch the firmware descriptor prototype if specified
if args.prototype != None:
f = open(args.prototype,"r")
desc = json.load(f)
f.close()
else:
desc = mkdesc()
desc['build_time'] = int(time.time())
if args.board_id != None:
desc['board_id'] = int(args.board_id)
if args.board_revision != None:
desc['board_revision'] = int(args.board_revision)
if args.version != None:
desc['version'] = str(args.version)
if args.summary != None:
desc['summary'] = str(args.summary)
if args.description != None:
desc['description'] = str(args.description)
if args.git_ | identity != None:
cmd = " ".join(["git", | "--git-dir", args.git_identity + "/.git", "describe", "--always", "--dirty"])
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
desc['git_identity'] = str(p.read().strip())
p.close()
if args.parameter_xml != None:
f = open(args.parameter_xml, "rb")
bytes = f.read()
desc['parameter_xml_size'] = len(bytes)
desc['parameter_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.airframe_xml != None:
f = open(args.airframe_xml, "rb")
bytes = f.read()
desc['airframe_xml_size'] = len(bytes)
desc['airframe_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.image != None:
f = open(args.image, "rb")
bytes = f.read()
desc['image_size'] = len(bytes)
desc['image'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
print(json.dumps(desc, indent=4))
|
# -*- coding: utf-8 -*-
import pytest
import turnstile.models.message as message
from turnstile.checks import CheckIgnore
from turnstile.checks.commit_msg.specification import check
def test_check():
commit_1 = message.CommitMessage('something', 'https://github.com/jmcs/turnstile/issues/42 m€sságe')
result_1 = check(None, {}, commit_1)
assert result_1.successful
assert result_1.details == []
commit_2 = message.CommitMessage('something', 'invalid-1')
result_2 = check(None, {}, commit_2)
assert not result_2.successful
assert result_2.details == ['invalid-1 is not a valid specification.']
# Merge messages are ignored
with pytest.raises(CheckIgnore):
commit_3 = message.CommitMessage('something', 'Merge stuff')
check(None, {}, commit_3)
commit_4 = message.CommitMessage('something', 'ftp://example.com/spec')
result_4 = check(None, {'specification': { | 'allowed_schemes': ['https']}}, commit_4)
assert not result_4.suc | cessful
assert result_4.details == ['ftp://example.com/spec is not a valid specification.']
commit_5 = message.CommitMessage('something', 'ftp://example.com/spec')
result_5 = check(None, {'specification': {'allowed_schemes': ['https', 'ftp']}}, commit_5)
assert result_5.successful
assert result_5.details == []
|
import logging, time, commands
from autotest.client.shared import error
from virttest import utils_test, aexpect
def run_timedrift(test, params, env):
"""
Time drift test (mainly for Windows guests):
1) Log into a guest.
2) Take a time reading from the guest and host.
3) Run load on the guest and host.
4) Take a second time reading.
5) Stop the load and rest for a while.
6) Take a third time reading.
7) If the drift immediately after load is higher than a user-
specified value (in %), fail.
If the drift after the rest period is higher than a user-specified value,
fail.
@param test: QEMU test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
# Helper functions
def set_cpu_affinity(pid, mask):
"""
Set the CPU affinity of all threads of the process with | PID pid.
Do this recursively for all child processes as well.
@param pid: The process ID.
| @param mask: The CPU affinity mask.
@return: A dict containing the previous mask for each thread.
"""
tids = commands.getoutput("ps -L --pid=%s -o lwp=" % pid).split()
prev_masks = {}
for tid in tids:
prev_mask = commands.getoutput("taskset -p %s" % tid).split()[-1]
prev_masks[tid] = prev_mask
commands.getoutput("taskset -p %s %s" % (mask, tid))
children = commands.getoutput("ps --ppid=%s -o pid=" % pid).split()
for child in children:
prev_masks.update(set_cpu_affinity(child, mask))
return prev_masks
def restore_cpu_affinity(prev_masks):
"""
Restore the CPU affinity of several threads.
@param prev_masks: A dict containing TIDs as keys and masks as values.
"""
for tid, mask in prev_masks.items():
commands.getoutput("taskset -p %s %s" % (mask, tid))
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
# Collect test parameters:
# Command to run to get the current time
time_command = params.get("time_command")
# Filter which should match a string to be passed to time.strptime()
time_filter_re = params.get("time_filter_re")
# Time format for time.strptime()
time_format = params.get("time_format")
guest_load_command = params.get("guest_load_command")
guest_load_stop_command = params.get("guest_load_stop_command")
host_load_command = params.get("host_load_command")
guest_load_instances = int(params.get("guest_load_instances", "1"))
host_load_instances = int(params.get("host_load_instances", "0"))
# CPU affinity mask for taskset
cpu_mask = params.get("cpu_mask", "0xFF")
load_duration = float(params.get("load_duration", "30"))
rest_duration = float(params.get("rest_duration", "10"))
drift_threshold = float(params.get("drift_threshold", "200"))
drift_threshold_after_rest = float(params.get("drift_threshold_after_rest",
"200"))
guest_load_sessions = []
host_load_sessions = []
try:
# Set the VM's CPU affinity
prev_affinity = set_cpu_affinity(vm.get_shell_pid(), cpu_mask)
try:
# Open shell sessions with the guest
logging.info("Starting load on guest...")
for i in range(guest_load_instances):
load_session = vm.login()
# Set output func to None to stop it from being called so we
# can change the callback function and the parameters it takes
# with no problems
load_session.set_output_func(None)
load_session.set_output_params(())
load_session.set_output_prefix("(guest load %d) " % i)
load_session.set_output_func(logging.debug)
guest_load_sessions.append(load_session)
# Get time before load
# (ht stands for host time, gt stands for guest time)
(ht0, gt0) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Run some load on the guest
for load_session in guest_load_sessions:
load_session.sendline(guest_load_command)
# Run some load on the host
logging.info("Starting load on host...")
for i in range(host_load_instances):
host_load_sessions.append(
aexpect.run_bg(host_load_command,
output_func=logging.debug,
output_prefix="(host load %d) " % i,
timeout=0.5))
# Set the CPU affinity of the load process
pid = host_load_sessions[-1].get_pid()
set_cpu_affinity(pid, cpu_mask)
# Sleep for a while (during load)
logging.info("Sleeping for %s seconds...", load_duration)
time.sleep(load_duration)
# Get time delta after load
(ht1, gt1) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Report results
host_delta = ht1 - ht0
guest_delta = gt1 - gt0
drift = 100.0 * (host_delta - guest_delta) / host_delta
logging.info("Host duration: %.2f", host_delta)
logging.info("Guest duration: %.2f", guest_delta)
logging.info("Drift: %.2f%%", drift)
finally:
logging.info("Cleaning up...")
# Restore the VM's CPU affinity
restore_cpu_affinity(prev_affinity)
# Stop the guest load
if guest_load_stop_command:
session.cmd_output(guest_load_stop_command)
# Close all load shell sessions
for load_session in guest_load_sessions:
load_session.close()
for load_session in host_load_sessions:
load_session.close()
# Sleep again (rest)
logging.info("Sleeping for %s seconds...", rest_duration)
time.sleep(rest_duration)
# Get time after rest
(ht2, gt2) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
finally:
session.close()
# Report results
host_delta_total = ht2 - ht0
guest_delta_total = gt2 - gt0
drift_total = 100.0 * (host_delta_total - guest_delta_total) / host_delta
logging.info("Total host duration including rest: %.2f", host_delta_total)
logging.info("Total guest duration including rest: %.2f", guest_delta_total)
logging.info("Total drift after rest: %.2f%%", drift_total)
# Fail the test if necessary
if abs(drift) > drift_threshold:
raise error.TestFail("Time drift too large: %.2f%%" % drift)
if abs(drift_total) > drift_threshold_after_rest:
raise error.TestFail("Time drift too large after rest period: %.2f%%"
% drift_total)
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, absolute_import, divisio | n)
import os as os_module
import xbmc
from lib.constants import *
userdatafolder = os_module.path.join(xbmc.translate | Path("special://profile").decode("utf-8"), "addon_data", addonid, "test data")
libpath = os_module.path.join(userdatafolder, "Library")
|
from mybottle import Bottle, run, ServerAdapter, get, post, request
import KalutServer.conf as myconf
class SSLWSGIRefServer(ServerAdapter):
def run(self, handler, quiet=False):
from wsgiref.simple_server import make_server, WSGIRequestHandler
import ssl
if quiet:
class QuietHandler(WSGIRequestHandler):
def log_request(*args, **kw | ): pass
self.options['handler_class'] = QuietHandler
srv = ma | ke_server(self.host, self.port, handler, **self.options)
srv.socket = ssl.wrap_socket (
srv.socket,
certfile=myconf.certfile, # path to chain file
keyfile=myconf.keyfile, # path to RSA private key
server_side=True)
srv.serve_forever()
|
from django.conf.urls import patterns, url
from publicaciones import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^(?P<ar | ticulo_titulo>[ | \W\w]+)/$', views.ver_articulo, name='ver_articulo'),
) |
"""
@package mi.dataset.driver.optaa_dj.cspp
@file mi-dataset/mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py
@author Joe Padula
@brief Telemetered driver for the optaa_dj_cspp instrument
Release notes:
Initial Release
"""
__author__ = 'jpadula'
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.cspp_base import \
DATA_PARTICLE_CLASS_KEY, \
METADATA_PARTICLE_CLASS_KEY
from mi.dataset.parser.optaa_dj_cspp import \
OptaaDjCsppPars | er, \
OptaaDjCsppMetadataTelemeteredDataParticle, \
OptaaDjCsppInstrumentTelemeteredDataParticle
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFile | Path This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create an instance of the concrete driver class defined below
driver = OptaaDjCsppTelemeteredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class OptaaDjCsppTelemeteredDriver(SimpleDatasetDriver):
"""
The optaa_dj_cspp telemetered driver class extends the SimpleDatasetDriver.
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.optaa_dj_cspp',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: OptaaDjCsppMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: OptaaDjCsppInstrumentTelemeteredDataParticle
}
}
parser = OptaaDjCsppParser(parser_config,
stream_handle,
self._exception_callback)
return parser
|
#!/usr/bin/env python
#
# Copyright (c) 2014 Hamilton Kibbe <ham@hamiltonkib.be>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIN | D, EXPRESS
# OR IMPLIED, INCLUDING | BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
""" PyAbleton
A library for creating and editing Ableton Live instrument/effect presets in Python.
"""
__author__ = 'ham@hamiltonkib.be'
__version__ = '1.0'
import presets
|
from pymongo import MongoClient
import schedule
import time
##############
## This script will be deployed in bluemix with --no-route set to true
##############
con = MongoClient( | "mongodb://abcd:qwerty@ds111798.mlab.com:11798/have_a_seat")
db = con.have_a_seat
cursor = db.Bookings.find()
#Bookings is {customerName:"", customerEmail: "", customerPhone: "", Slot: ""}
dict = {}
db.Exploration.delete_many({})
db.Exploitation.delete_many({})
for i in range(4): # Finding for all slots
for c in cursor:
if c['Slot'] == i and c['customerEmail'] not in dict.keys():
dict[c['customerEmail']] | = 1
elif c['Slot'] == i and c['customerEmail'] in dict.keys():
dict[c['customerEmail']] += 1
tuples_list = sorted(dict.items(), key=lambda x: x[1], reverse=True)
print tuples_list
print 'Completed for slot ', i
db.Exploitation.insert({'Slot': i, 'customerEmail': tuples_list[0][0],
'customerName': db.Bookings.find_one({'customerEmail': tuples_list[0][0]})['customerName']})
db.Exploration.insert({'Slot': i, 'customerEmail': tuples_list[len(tuples_list) - 1][0], 'customerName':
db.Bookings.find_one({'customerEmail': tuples_list[len(tuples_list) - 1][0]})['customerName']})
|
print | 28433 * 2**7830457 + 1
| |
from collections import Counter
def unalk_coeff(l):
'''Source: https://ww2.amstat.org/publications/jse/v15n2/kader.html'''
n = len(l)
freq = Counter(l)
freqsum = 0
for key, freq in freq.items():
p = freq / n
freqsum += p**2
unalk_coeff = 1 - freqsum
return unalk_coeff
def IQV(l, k):
'''k = number of categories
a value can take
Source: http://sjam.selcuk.edu.tr/sjam/article/view/291'''
IQV = (k / (k - 1)) * unalk_coeff(l)
return IQV
def IQV_var(l, k):
'''k = number of categories
a value can take
Source: https://www.youtube.com/watch?v=oQCqaS1ICwk'''
freq = Counter(l)
freqsum = 0
for k, v in freq.items():
freqsum += v
p2sum = 0
for k, v in freq.items():
p2sum += ((v / freqsum) * 100)**2
IQV = (k * (100**2 - p2sum)) / (((100**2) * (k - 1)))
return IQV
def simpsons_d(l):
freq = Counter(l)
n = 0
for k, v in freq.items():
n += v
s = 0
for k, v in freq.items():
s += | v * (v - 1)
d = s / (n * n - 1)
return 1 - d
# TEST, Source: https://ww2.amstat.org/publications/jse/v15n2/kader | .html
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B']))
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B']))
# print(unalk_coeff(['A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B'])) |
from flask_admin.contrib.sqla.type | fmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements im | port WKBElement
from sqlalchemy import func
from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
|
print "!!! begin SVGCanvas.transform", a, b, c, d, e, f
tr = self.currGroup.getAttribute("transform")
t = 'matrix(%f, %f, %f, %f, %f, %f)' % (a,b,c,d,e,f)
if (a, b, c, d, e, f) != (1, 0, 0, 1, 0, 0):
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def translate(self, x, y):
# probably never used
print "!!! begin SVGCanvas.translate"
return
tr = self.currGroup.getAttribute("transform")
t = 'translate(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def scale(self, x, y):
# probably never used
print "!!! begin SVGCanvas.scale"
return
tr = self.groups[-1].getAttribute("transform")
t = 'scale(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
### paths ###
def moveTo(self, x, y):
self.path = self.path + 'M %f %f ' % (x, y)
def lineTo(self, x, y):
self.path = self.path + 'L %f %f ' % (x, y)
def curveTo(self, x1, y1, x2, y2, x3, y3):
self.path = self.path + 'C %f %f %f %f %f %f ' % (x1, y1, x2, y2, x3, y3)
def closePath(self):
self.path = self.path + 'Z '
def saveState(self):
pass
def restoreState(self):
pass
class _SVGRenderer(Renderer):
"""This draws onto an SVG document.
"""
def __init__(self):
self._tracker = StateTracker()
self.verbose = 0
def drawNode(self, node):
"""This is the | recursive method called for each node in the tree.
"""
if self.verbose: print "### begin _SVGRenderer.drawNode(%r)" % node
self._canvas.comment('begin node %s'%`node`)
color = self._canvas._color
style = self._canvas.style.copy()
if not (isinstance(node, Path) an | d node.isClipPath):
pass # self._canvas.saveState()
#apply state changes
deltas = getStateDelta(node)
self._tracker.push(deltas)
self.applyStateChanges(deltas, {})
#draw the object, or recurse
self.drawNodeDispatcher(node)
rDeltas = self._tracker.pop()
if not (isinstance(node, Path) and node.isClipPath):
pass #self._canvas.restoreState()
self._canvas.comment('end node %s'%`node`)
self._canvas._color = color
#restore things we might have lost (without actually doing anything).
for k, v in rDeltas.items():
if self._restores.has_key(k):
setattr(self._canvas,self._restores[k],v)
self._canvas.style = style
if self.verbose: print "### end _SVGRenderer.drawNode(%r)" % node
_restores = {'strokeColor':'_strokeColor','strokeWidth': '_lineWidth','strokeLineCap':'_lineCap',
'strokeLineJoin':'_lineJoin','fillColor':'_fillColor','fontName':'_font',
'fontSize':'_fontSize'}
def _get_link_info_dict(self, obj):
#We do not want None or False as the link, even if it is the
#attribute's value - use the empty string instead.
url = getattr(obj, "hrefURL", "") or ""
title = getattr(obj, "hrefTitle", "") or ""
if url :
#Is it valid to have a link with no href? The XML requires
#the xlink:href to be present, but you might just want a
#tool tip shown (via the xlink:title attribute). Note that
#giving an href of "" is equivalent to "the current page"
#(a relative link saying go nowhere).
return {"xlink:href":url, "xlink:title":title, "target":"_top"}
#Currently of all the mainstream browsers I have tested, only Safari/webkit
#will show SVG images embedded in HTML using a simple <img src="..." /> tag.
#However, the links don't work (Safari 3.2.1 on the Mac).
#
#Therefore I use the following, which also works for Firefox, Opera, and
#IE 6.0 with Adobe SVG Viewer 6 beta:
#<object data="..." type="image/svg+xml" width="430" height="150" class="img">
#
#Once displayed, Firefox and Safari treat the SVG like a frame, and
#by default clicking on links acts "in frame" and replaces the image.
#Opera does what I expect, and replaces the whole page with the link.
#
#Therefore I use target="_top" to force the links to replace the whole page.
#This now works as expected on Safari 3.2.1, Firefox 3.0.6, Opera 9.20.
#Perhaps the target attribute should be an option, perhaps defaulting to
#"_top" as used here?
else :
return None
def drawGroup(self, group):
if self.verbose: print "### begin _SVGRenderer.drawGroup"
currGroup = self._canvas.startGroup()
a, b, c, d, e, f = self._tracker.getState()['transform']
for childNode in group.getContents():
if isinstance(childNode, UserNode):
node2 = childNode.provideNode()
else:
node2 = childNode
self.drawNode(node2)
self._canvas.transform(a, b, c, d, e, f)
self._canvas.endGroup(currGroup)
if self.verbose: print "### end _SVGRenderer.drawGroup"
def drawRect(self, rect):
link_info = self._get_link_info_dict(rect)
if rect.rx == rect.ry == 0:
#plain old rectangle
self._canvas.rect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height, link_info=link_info)
else:
#cheat and assume ry = rx; better to generalize
#pdfgen roundRect function. TODO
self._canvas.roundRect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height,
rect.rx, rect.ry,
link_info=link_info)
def drawString(self, stringObj):
if self._canvas._fillColor:
S = self._tracker.getState()
text_anchor, x, y, text = S['textAnchor'], stringObj.x, stringObj.y, stringObj.text
if not text_anchor in ('start', 'inherited'):
font, fontSize = S['fontName'], S['fontSize']
textLen = stringWidth(text, font,fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2
elif text_anchor=='numeric':
x -= numericXShift(text_anchor,text,textLen,font,fontSize)
else:
raise ValueError, 'bad value for text_anchor ' + str(text_anchor)
self._canvas.drawString(text,x,y,link_info=self._get_link_info_dict(stringObj))
def drawLine(self, line):
if self._canvas._strokeColor:
self._canvas.line(line.x1, line.y1, line.x2, line.y2)
def drawCircle(self, circle):
self._canvas.circle( circle.cx, circle.cy, circle.r, link_info=self._get_link_info_dict(circle))
def drawWedge(self, wedge):
centerx, centery, radius, startangledegrees, endangledegrees = \
wedge.centerx, wedge.centery, wedge.radius, wedge.startangledegrees, wedge.endangledegrees
yradius = wedge.yradius or wedge.radius
(x1, y1) = (centerx-radius, centery-yradius)
(x2, y2) = (centerx+radius, centery+yradius)
extent = endangledegrees - startangledegrees
self._canvas.drawArc(x1, y1, x2, y2, startangledegrees, extent, fromcenter=1)
def drawPolyLine(self, p):
if self._canvas._strokeColor:
self._canvas.polyLine(_pointsFromList(p.points))
def drawEllipse(self, ellipse):
#need to convert to pdfgen's bounding box representation
x1 = ellipse.cx - ellipse.rx
x2 = ellipse.cx + ellipse.rx
y1 = ellipse.cy - ellipse.ry
y2 = ellipse.cy + ellipse.ry
self._canvas.ellipse(x1,y1,x2,y2, link_info=self._get_link_info_dict(ellipse))
def drawPolygon(self, p):
self._canvas.polygon(_pointsFromList(p.points), closed=1, link_info=self._get_link_info_dict(p))
|
import struct
from coinpy.lib.serialization.common.serializer import Serializer
from coinpy.lib.serialization.exceptions import MissingDataException
class VarintSerializer(Serializer):
def __init__(self, desc=""):
self.desc = desc
def serialize(self, value):
if (value < 0xfd):
return (struct.pack("<B", value))
if (value <= 0xffff):
return ("\xfd" + struct.pack("<H", value))
if (value <= 0xffffffff):
return ("\xfe" + struct.pack("<I", value))
return ("\xff" + struct.pack("<Q", value))
def get_size(self, value):
if (value < 0xfd):
return (1)
if (value <= 0xffff):
return (3)
if (value <= 0xffffffff):
return (5)
return (9)
def deserialize(self, data, cursor=0):
if (len(data) - cursor < 1):
raise MissingDataException("Decoding error: not enough data for varint")
prefix = struct.unpack_from("<B", data, cursor)[0]
cursor += 1
if (prefix < 0xFD):
return (prefix, cursor)
if (len(data) - cursor < {0xFD: 2, 0xFE: 4, 0xFF: 8}[prefix]):
raise Mis | singDataException("Decoding error: not enough data for varint of type : %d" % (prefix))
if (prefix == 0xFD):
return (struct.unpack_from("<H", data, cursor)[0], cursor + 2)
if (prefix == 0xFE):
return (struct.unpack_from("<I", data, cursor)[0], cursor + 4)
return (struct.unpack_from("<Q", data, cursor)[0], cursor + 8)
| |
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exit | ing Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents | , Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('TEST', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if not os.path.exists(AGENT_SCRIPT_PATH):
LG().Log(LogType.Error, 'npmd set cap script does not exist, exiting test')
return [retval]
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting test')
return [retval]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, resource is present on the agent, set will purge')
retval = -1
return [retval]
if TestResourceVersion() != 0 or TestConfigUpdate(Contents) != 0:
retval = -1
return [retval]
def Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
retval = 0
ShowMof('GET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return [retval]
def Print(s, file=sys.stdout):
file.write(s + '\n')
# Compare resource version in DSC and agent machine
# Returns
# 0 if version is same
# dsc version number if there is a mismatch or agent config not present
def TestResourceVersion():
retval = 0
dscVersion = ReadFile(DSC_RESOURCE_VERSION_PATH)
if not os.path.exists(AGENT_RESOURCE_VERSION_PATH):
#npmd agent is not present, copy binaries
retval = dscVersion
else:
agentVersion = ReadFile(AGENT_RESOURCE_VERSION_PATH)
if agentVersion != dscVersion:
#version mismatch, copy binaries
retval = dscVersion
return retval
def TestConfigUpdate(Contents):
retval = 0
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = 0
elif not os.path.exists(destFileFullPath):
# Configuration does not exist, fail
retval = -1
else:
origConfigData = ReadFile(destFileFullPath)
#compare
if origConfigData is None or origConfigData != Contents:
retval = -1
return retval
def SetConfigUpdate(Contents):
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
# Update config after checking if directory exists
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = -1
else:
retval = WriteFile(destFileFullPath, Contents)
if retval == 0 and os.path.exists(AGENT_RESOURCE_VERSION_PATH): #notify server only if plugin is present
LG().Log(LogType.Info, 'Updated the file, going to notify server')
NotifyServer(Commands.Config)
return retval
def SetFilesUpdate(newVersion):
retval = UpdateAgentBinary(newVersion)
retval &= UpdatePluginFiles()
if retval:
return 0
return -1
def UpdateAgentBinary(newVersion):
retval = True
arch = platform.architecture()
src = ''
if arch is not None and arch[0] == X64:
src = RESOURCE_MODULE_PATH.__add__(DSC_X64_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
else:
src = RESOURCE_MODULE_PATH.__add__(DSC_X86_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
LOG_ACTION.log(LogType.Error, 'npmd agent binary do not support 32-bit.')
#Update version number after deleting and copying new agent files
if retval == True:
WriteFile(AGENT_RESOURCE_VERSION_PATH, newVersion)
# set capabilities to binary
src_files = os.listdir(src)
for file_name in src_files:
if AGENT_BINARY_NAME in file_name:
full_file_name = os.path.join(AGENT_BINARY_PATH, file_name)
break
NPM_ACTION.binary_setcap(full_file_name)
# Notify ruby plugin
#retval &= NotifyServer(Commands.RestartNPM)
return retval
def UpdatePluginFiles():
retval = True
#replace files
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# restart oms agent
retval &= OMS_ACTION.restart_oms_agen |
[]
rst_prolog = \
'''\
.. role:: bash(code)
:language: bash
.. role:: python(code)
:language: python
'''
rst_prolog = _dedent(rst_prolog)
nitpicky = True
# FIXME: encapsulate this in a Sphinx extension. make ``rfc_uri_tmpl`` a
# Sphinx config setting
rfc_uri_tmpl = 'https://tools.ietf.org/html/rfc{}.html'
def rfc_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
_rst.roles.set_classes(options)
rfcrefpattern = r'(?:(?P<displaytext>[^<]*)'\
r' <)?(?P<refname>[^>]*)(?(displaytext)>|)'
match = _re.match(rfcrefpattern, _rst.roles.utils.unescape(text))
if match:
rfcnum, anchorsep, anchor = match.group('refname').partition('#')
try:
rfcnum = int(rfcnum)
if rfcnum <= 0:
raise ValueError
except ValueError:
message = \
inliner\
.reporter\
.error('invalid RFC number {!r}; expected a positive integer'
.format(rfcnum),
line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
uri = rfc_uri_tmpl.format(rfcnum)
if anchor:
uri += anchorsep + anchor
displaytext = match.group('displaytext')
if displaytext:
refnode = _rst.nodes.reference(rawtext, displaytext, refuri=uri,
**options)
else:
displaytext = 'RFC {}'.format(rfcnum)
if anchor:
displaytext += ' ' + anchor.replace('-', ' ')
strongnode = _rst.nodes.strong(rawtext, displaytext)
refnode = _rst.nodes.reference('', '', strongnode, refuri=uri,
**options)
return [refnode], []
else:
message = \
inliner\
.reporter\
.error('invalid RFC reference {!r}'.format(text), line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
_rst.roles.register_local_role('rfc', rfc_role)
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inse | rted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to | template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '{}-doc'.format(project)
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', '{}.tex'.format(project), u'{} documentation'.format(project),
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', project.lower(), u'{} documentation'.format(project),
[author], 1)]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [('index', project, u'{} documentation'.format(project),
author, project, description, 'Miscellaneous')]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table |
import os
import sys
import string
filenames = os.listdir(os.getcwd())
for file in filenames:
if os.path.splitext(file)[1] == ".o" or os.path.splitext(file)[1] == ".elf" :
print "objdumparm.exe -D "+file
os.system("C:/WindRiver/gnu/4.1.2-vxworks-6.8/x86- | win32/bin/objdumparm.exe -D "+file +" > " +file + ". | txt")
os.system("pause")
|
"""
WSGI config for cloudlynt project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudlynt.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICAT | ION
# setting points here.
from django.core.wsgi import ge | t_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
# stupid 2.7.2 by peterm, patch by Smack
# http://robotgame.org/viewrobot/5715
import random
import math
import rg
def around(l):
return rg.locs_around(l)
def around2(l):
return [(l[0]+2, l[1]), (l[0]+1, l[1]+1), (l[0], l[1]+2), (l[0]-1, l[1]+1),
(l[0]-2, l[1]), (l[0]-1, l[1]-1), (l[0], l[1]-2), (l[0]+1, l[1]-1)]
def diag(l1, l2):
if rg.wdist(l1, l2) == 2:
if abs(l1[0] - l2[0]) == 1:
return True
return False
def infront(l1, l2):
if rg.wdist(l1, l2) == 2:
if diag(l1, l2):
return False
else:
return True
return False
def mid(l1, l2):
return (int((l1[0]+l2[0]) / 2), int((l1[1]+l2[1]) / 2))
def sign(x):
if x > 0:
return 1
elif x == 0:
return 0
else:
return -1
class Robot:
def act(self, game):
robots = game['robots']
##print self.location, "starts thinking"
def isenemy(l):
if robots.get(l) != None:
if robots[l]['player_id'] != self.player_id:
return True
return False
def isteammate(l):
if robots.get(l) != None:
if robots[l]['player_id'] == self.player_id:
return True
return False
def isempty(l):
if ('normal' in rg.loc_types(l)) and not ('obstacle' in rg.loc_types(l)):
if robots.get(l) == None:
return True
return False
def isspawn(l):
if 'spawn' in rg.loc_types(l):
return True
return False
# scan the area around
enemies = []
for loc in around(self.location):
if isenemy(loc):
enemies.append(loc)
moveable = []
moveable_safe = []
for loc in around(self.location):
if isempty(loc):
moveable.append(loc)
if isempty(loc) and not isspawn(loc):
moveable_safe.append(loc)
def guard():
return ['guard']
def suicide():
return ['suicide']
def canflee():
return len(moveable) > 0
def flee():
if len(moveable_safe) > 0:
return ['move', random.choice(moveable_safe)]
if len(moveable) > 0:
return ['move', random.choice(moveable)]
return guard()
def canattack():
return len(enemies) > 0
def attack():
r = enemies[0]
for loc in enemies:
if robots[loc]['hp'] > robots[r]['hp']:
r = loc
return ['attack', r]
def panic():
if canflee():
return flee()
elif canattack():
return attack()
else:
return guard()
def imove(to):
f = self.location
d = (to[0]-f[0], to[1]-f[1])
di = (sign(d[0]), sign(d[1]))
good = []
if di[0]*di[1] != 0:
good.append((di[0], 0))
good.append((0, di[1]))
else:
good.append(di)
for dmove in good:
loc = (f[0]+dmove[0], f[1]+dmove[1])
if isempty(loc):
return ['move', loc]
return flee()
##print "There are", len(enemies), "enemies close"
if len(enemies) > 1:
# we gonna die next turn if we don't move?
if self.hp <= len(enemies)*10:
# it's ok to suicide if you take someone else with you
for loc in enemies:
if robots[loc]['hp'] <= 15:
##print "Suicide!"
pass#return | suicide()
##print "Too many enemies around, panic!"
return panic()
elif len(enemies) == 1:
if self.hp <= 10:
if robots[enemies[0]]['hp'] > | 15:
##print "Enemy will kill me, panic!"
return panic()
elif robots[enemies[0]]['hp'] <= 10:
##print "I will kill enemy, attack!"
return attack()
#else:
# # might tweak this
# ##print "I'm too low on health, suicide!"
# return suicide()
else:
if robots[enemies[0]]['hp'] <= 10:
if self.hp <= 15:
# avoid suiciders
##print "Avoiding suicider, panic!"
return panic()
else:
##print "Attack!"
return attack()
# if we're at spawn, get out
if isspawn(self.location):
##print "I'm on spawn, panic!"
return panic()
closehelp = None
prediction = None
# are there enemies in 2 squares?
for loc in around2(self.location):
if isenemy(loc):
##print "Enemy in 2 squares:", loc
# try to help teammates
for loc2 in around(loc):
if isteammate(loc2):
##print "And a teammate close to him:", loc2
closehelp = imove(loc)
# predict and attack
if infront(loc, self.location):
prediction = ['attack', mid(loc, self.location)]
elif rg.wdist(rg.toward(loc, rg.CENTER_POINT), self.location) == 1:
prediction = ['attack', rg.toward(loc, rg.CENTER_POINT)]
else:
prediction = ['attack', (self.location[0], loc[1])]
if closehelp != None:
##print "Help teammate fight:", closehelp
return closehelp
if prediction != None:
##print "Predict:", prediction
return prediction
# move randomly
##print "Can't decide, panic!"
return panic()
|
ts=root.findall(".//"+self.ELEMENT_VARIABLE)
gads=[]
for e in elements:
gads.append(self.__getVariableDesc(e))
return gads
def getColumnDesc(self, columnName):
pass
def getGlobalAttributeDesc(self, attributeName):
element=self.__getGlobalAttributeElement(attributeName)
return self.__getGlobalAttributeDesc(element)
def getGlobalAttributeStrategyDesc(self, attributeName):
element=self.__getGlobalAttributeStrategyElement(attributeName)
className=element.find(self.ELEMENT_CLASS_NAME).text
return GlobalAttributeStrategyDesc(className)
def getHeaderStrategyDesc(self):
element=self.__getHeaderStrategyElement()
className=element.find(self.ELEMENT_CLASS_NAME).text
return HeaderStrategyDesc(className)
def getVariableAttributeDesc(self, variableName):
pass
def getVariableAttributeStrategyDesc(self, variableName):
pass
def getVariableDesc(self, variableName):
element=self.__getVariableElement()
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
return VariableDesc(name, className)
def __getGlobalAttributeDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=GlobalAttributeStrategyDesc(className)
return GlobalAttributeDesc(name, dataType, strategyDesc)
def __getGlobalAttributeElement(self, attributeName):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_GLOBAL_ATTRIBUTE)
element=None
for e in elements:
if e.find(self.ELEMENT_NAME).text == attributeName:
element=e
break
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getGlobalAttributeStrategyElement(self, attributeName):
globalAttributeElement=self.__getGlobalAttributeElement(attributeName)
element=globalAttributeElement.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getVariableAttributeDesc(self, element):
name=element.find(self.ELEMENT_VARIABLE_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_VARIABLE_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableAttributeStrategyDesc(className)
return VariableAttributeDesc(name, dataType, "attributes", strategyDesc)
def __getVariableDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableStrategyDesc(className)
return VariableDesc(name, strategyDesc)
def __getHeaderStrategyElement(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_HEADER_STRATEGY)
if len(elements) == 0:
raise Exception(self.ELEMENT_HEADER_STRATEGY+" element "+
"' not found in file '"+self.xmlFile+"'.")
return elements[0]
def __eq__(self, other):
if self.xmlFile != other.xmlFile:
return False
return True
class ColumnDesc:
def __init__ (self, columnName, index, dataType):
self.columnName=columnName
self.index=index
self.dataType=dataType
def getColumnName(self):
return self.columnName
def getDataType(self):
return self.dataType
def getIndex(self):
return self.index
def __eq__(self, other):
if self.columnName != other.columnName:
return False
if self.index != other.index:
return False
if self.dataType != other.dataType:
return False
return True
class GlobalAttributeDesc:
def __init__ (self, attributeName, attributeType, globalAttributeStrategyDesc):
self.attributeName=attributeName
self.attributeType=attributeType
self.globalAttributeStrategyDesc=globalAttributeStrategyDesc
def getAttributeName(self):
return self.attributeName
def getAttributeType(self):
return self.attributeType
def getGlobalAttributeStrategyDesc(self):
return self.globalAttributeStrategyDesc
def __eq__(self, other):
if self.attributeName != other.attributeName:
return False
if self.attributeType != other.attributeType:
return False
if self.globalAttributeStrategyDesc != other.globalAttributeStrategyDesc:
return False
return True
#A base class for strategy descriptions.
class StrategyDesc(object):
#Hold the name of the strategy class to be loaded.
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
def getStrategyClassName(self):
return self.strategyClassName
def __eq__(self, other):
if self.strategyClassName != other.strategyClassName:
return False
return True
class GlobalAttributeStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
return self.strategyClassName
#Return the value parsed from the header of the given global attribute
def parse (self, attributeName, header):
#Instantiate the strategy class by name.
c=Util().getClass(self.strategyClassName)
return c.parse(attributeName, header)
class HeaderStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
return self.strategyClassName
#Return the header parsed from the file.
def parse (self, file):
c=Util().getClass(self.strategyClassName)
return c.parse(file)
class VariableAttributeDesc:
def __init__ (self, variableName, variableType, attributes, variableAttributeStrategyDesc):
self.var | iableName=variableName
self.variableType=variableType
self.attribut | es=attributes
self.variableAttributeStrategyDesc=variableAttributeStrategyDesc
def getVariableName(self):
return self.variableName
def getVariableType(self):
return self.variableType
def getAttributes(self):
return self.attributes
def getVariableAttributeStrategyDesc(self):
return self.variableAttributeStrategyDesc
def __eq__(self, other):
if self.variableName != other.variableName:
return False
if self.variableType != other.variableType:
return False
if self.attributes != other.attributes:
return False
return True
#A strategy for parsing variable attributes
class VariableAttributeStrategyDesc:
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
#Parse the variable attributes from the header
def parse (self, variableName, header):
#Return the variable attribute
return Util().getClass(self.strategyClassName).parse(variableName, header)
class VariableDesc:
def __init__ (self, variableName, variableStrategyDesc):
self.variableName=variableName
self.variableStrategyDesc= |
"""phial's custom errors."""
class ArgumentValidationError(Exception):
"""Excep | tion indicating argum | ent validation has failed."""
pass
class ArgumentTypeValidationError(ArgumentValidationError):
"""Exception indicating argument type validation has failed."""
pass
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient import base
class Flavor(base.Resource):
"""
A Flavor is an Instance type, specifying among other things, RAM size.
"""
def __repr__(self):
return "<Flavor: %s>" % self.name
class Flavors(base.ManagerWithFind):
"""
Manage :class:`Flavor` resources.
"""
resource_class = Flavor
def __repr__(self):
return "<Flavors Manager at %s>" % id(self)
def _list(self, url, response_key):
resp, body = self.api.client.get(url)
if not body:
raise Exception("Call to " + url + " did not return a body.")
return [self.resource_class(self, res) for res in body[response_key]]
def list(self):
"""
Get a list of all flavors.
:rtype: list of :class:`Flavor`.
"""
r | eturn self._list("/flavors", "flavors")
def get(self, flavor):
"""
Get a specific flavor.
:rtype: :class:`Flavor`
"""
return self._get("/flavors/%s" % base. | getid(flavor),
"flavor")
|
']
wx.MessageBox('{} is down'.format(cb.GetValue()),
APPNAME,
style=wx.OK | wx.CENTRE | wx.ICON_ERROR)
def OnClickFullScreen(self, evt):
geometry = wx.Display().GetGeometry()
self._input['x'].SetValue(str(geometry[0]))
self._input['y'].SetValue(str(geometry[1]))
self._input['w'].SetValue(str(geometry[2]))
self._input['h'].SetValue(str(geometry[3]))
def OnClickFullArea(self, evt):
logging.debug('Event: {}'.format(evt.GetId()))
if evt.GetId() == self._input_rb_fullscreen.GetId():
self.OnClickFullScreen(evt)
else:
self.OnClickSelectionArea(evt)
def sync(func):
def wrapper(*args, **kv):
self = args[0]
self._lock.acquire()
try:
return func(*args, **kv)
finally:
self._lock.release()
return wrapper
class Core(Thread):
def __init__(self, args, extra_args):
Thread.__init__(self)
self._args = args
self._extra_args = extra_args
self._threads = []
self._event_handler = CoreEventHandler()
if CrossPlatform.get().is_linux():
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def is_streaming(self):
if hasattr(self, '_stream_server') and self._stream_server is not None:
return True
return False
def stream_server_start(self, *args, **kargs):
if self.is_streaming():
return
logging.info('StreamServer start: {}'.format(kargs))
self._stream_server = StreamServer(kargs, lambda data:
self.handler('server', data))
self._stream_server.start()
def stream_server_stop(self):
if hasattr(self, '_stream_server') and self._stream_server is not None:
self._stream_server.stop()
self._stream_server = None
def playme(self, remote_ip, remote_port, service):
def myip(remote_ip):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((remote_ip, 0))
return s.getsockname()[0]
def xbmc():
stream_url = self._stream_server.url.format(ip=myip(remote_ip))
url = 'http://{}:{}/xbmcCmds/xbmcHttp?command=PlayFile({})'.format(
remote_ip, remote_port, stream_url)
req = urllib2.Request(url)
logging.info('url = {}'.format(url))
response = urllib2.urlopen(req, None, 5)
result = response.read()
logging.info('result: {}'.format(result))
def desktop_mirror():
stream_url = self._stream_server.url.format(ip=myip(remote_ip))
data_as_json = json.dumps({'method': 'Player.Open',
'id': 1, 'jsonrpc': '2.0',
'params': {'item': {'file': stream_url}}}
)
url = 'http://{}:{}/jsonrpc'.for | mat(remote_ip, remote_port)
logging.info('url = {}'.format(url))
logging.info(' json = {}'.format(data_as_json))
req = urllib2.Request(url, data_as_json,
| {'Content-Type': 'application/json'})
response = urllib2.urlopen(req, None, 5)
result = response.read()
logging.info('result: {}'.format(result))
result = json.loads(result)
#switch back to json with pretty format
logging.debug(json.dumps(result, indent=4))
#logging.info('Got streaming url: {}'.
# format(self._stream_server.url))
#if service == '_desktop-mirror._tcp':
# desktop_mirror()
#else:
# xbmc()
desktop_mirror()
@property
def targets(self):
if not hasattr(self, '_avahi_browse'):
return dict()
return self._avahi_browse.targets
@property
def hosts(self):
if not hasattr(self, '_avahi_browse'):
return dict()
return self._avahi_browse.hosts
def run(self):
self._avahi_browse = AvahiService(lambda data:
self.handler('avahi', data))
self._stream_recever = StreamReceiver(lambda data:
self.handler('srx', data))
self._threads.append(self._avahi_browse)
self._threads.append(self._stream_recever)
for thread in self._threads:
thread.start()
for thread in self._threads:
thread.join()
def stop(self):
for thread in self._threads:
logging.debug('Stopping thread - {}'.format(thread.name))
thread.stop()
self.stream_server_stop()
def launch_selection_area_process(self):
SelectionArea(lambda data:
self.handler('selection', data)).start()
def register_listener(self, ui_window):
self._event_handler.register_listener(ui_window)
def on_event_relay(self, event_name, data):
self._event_handler.on_event_relay(event_name, data)
def on_event_stream_ready(self, event_name, data):
self._event_handler.on_event_stream_ready(event_name, data)
def handler(self, obj_id, data):
self._event_handler.handler(obj_id, data)
def signal_handler(self, signum, frame):
logging.info('signal: ' + str(signum))
if signal.SIGTERM == signum:
self.send_form_destroy()
try:
if CrossPlatform.get().is_linux():
if signal.SIGCHLD == signum:
os.waitpid(-1, os.WNOHANG)
except OSError:
pass
class CoreEventHandler(object):
def __init__(self):
self._lock = Lock()
self._listener = []
def register_listener(self, ui_window):
if ui_window not in self._listener:
self._listener.append(ui_window)
def on_event_relay(self, event_name, data):
evt = SomeNewEvent(attr1=event_name, attr2=data)
for listener in self._listener:
wx.PostEvent(listener, evt)
def on_event_stream_ready(self, event_name, data):
self.on_event_relay(event_name, data)
@sync
def handler(self, obj_id, data):
dispatch_map = {'avahi': self.on_event_relay,
'selection': self.on_event_relay,
'server': self.on_event_stream_ready,
'srx': self.on_event_relay}
if obj_id in dispatch_map:
dispatch_map[obj_id](obj_id, data)
return
logging.error('event not process: ' + obj_id)
class SelectionAreaExternalProgram(Thread):
def __init__(self, callback):
Thread.__init__(self)
self._callback = callback
def run(self):
if os.path.isfile('lib/areachooser.py') and \
os.access('lib/areachooser.py', os.X_OK):
execution = 'lib/areachooser.py'
else:
execution = 'areachooser.py'
cmd = Command(execution + ' "%x %y %w %h"', True, True).run()
line = cmd.stdout.split()
self._callback(line[0:4])
class SelectionArea(object):
def __init__(self, callback):
#Thread.__init__(self)
self._callback = callback
def run(self):
frame = FrmAreaChooser(None, -1, 'Live Area', self._callback)
frame.Show(True)
frame.SetTransparent(100)
frame.Center()
def start(self):
self.run()
class MyArgumentParser(object):
"""Command-line argument parser
"""
def __init__(self):
"""Create parser object
"""
description = ('IBS command line interface. '
'')
epilog = ('')
parser = ArgumentParser(description=description, epilog=epilog)
log_levels = ['notset', 'debug', 'info',
'warning', 'error', 'critical']
parser.add_argument('--log-level', dest='log_level_str',
default='info', choices=log |
(name):
raise StackException, 'Patch "%s" already exists' % name
# TODO: move this out of the stgit.stack module, it is really
# for higher level commands to handle the user interaction
def sign(msg):
return add_sign_line(msg, sign_str,
committer_name or git.committer().name,
committer_email or git.committer().email)
if not message and can_edit:
descr = edit_file(
self, sign(''),
'Please enter the description for the patch above.',
show_patch)
else:
descr = sign(message)
head = git.get_head()
if name == None:
name = make_patch_name(descr, self.patch_exists)
patch = self.get_patch(name)
patch.create()
patch.set_description(descr)
p | atch.set_authname(author_name)
patch.set_authemail(author_email)
patch.set_authdate(author_date)
patch.set_commname(committer_name)
patch.set_commemail(committer_email)
if before_existing:
insert_string(self.__applied_file, patch.get_name())
| elif unapplied:
patches = [patch.get_name()] + self.get_unapplied()
write_strings(self.__unapplied_file, patches)
set_head = False
else:
append_string(self.__applied_file, patch.get_name())
set_head = True
if commit:
if top:
top_commit = git.get_commit(top)
else:
bottom = head
top_commit = git.get_commit(head)
# create a commit for the patch (may be empty if top == bottom);
# only commit on top of the current branch
assert(unapplied or bottom == head)
commit_id = git.commit(message = descr, parents = [bottom],
cache_update = False,
tree_id = top_commit.get_tree(),
allowempty = True, set_head = set_head,
author_name = author_name,
author_email = author_email,
author_date = author_date,
committer_name = committer_name,
committer_email = committer_email)
# set the patch top to the new commit
patch.set_top(commit_id)
else:
patch.set_top(top)
self.log_patch(patch, 'new')
return patch
def delete_patch(self, name, keep_log = False):
"""Deletes a patch
"""
self.__patch_name_valid(name)
patch = self.get_patch(name)
if self.__patch_is_current(patch):
self.pop_patch(name)
elif self.patch_applied(name):
raise StackException, 'Cannot remove an applied patch, "%s", ' \
'which is not current' % name
elif not name in self.get_unapplied():
raise StackException, 'Unknown patch "%s"' % name
# save the commit id to a trash file
write_string(os.path.join(self.__trash_dir, name), patch.get_top())
patch.delete(keep_log = keep_log)
unapplied = self.get_unapplied()
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
def forward_patches(self, names):
"""Try to fast-forward an array of patches.
On return, patches in names[0:returned_value] have been pushed on the
stack. Apply the rest with push_patch
"""
unapplied = self.get_unapplied()
forwarded = 0
top = git.get_head()
for name in names:
assert(name in unapplied)
patch = self.get_patch(name)
head = top
bottom = patch.get_bottom()
top = patch.get_top()
# top != bottom always since we have a commit for each patch
if head == bottom:
# reset the backup information. No logging since the
# patch hasn't changed
patch.set_top(top, backup = True)
else:
head_tree = git.get_commit(head).get_tree()
bottom_tree = git.get_commit(bottom).get_tree()
if head_tree == bottom_tree:
# We must just reparent this patch and create a new commit
# for it
descr = patch.get_description()
author_name = patch.get_authname()
author_email = patch.get_authemail()
author_date = patch.get_authdate()
committer_name = patch.get_commname()
committer_email = patch.get_commemail()
top_tree = git.get_commit(top).get_tree()
top = git.commit(message = descr, parents = [head],
cache_update = False,
tree_id = top_tree,
allowempty = True,
author_name = author_name,
author_email = author_email,
author_date = author_date,
committer_name = committer_name,
committer_email = committer_email)
patch.set_top(top, backup = True)
self.log_patch(patch, 'push(f)')
else:
top = head
# stop the fast-forwarding, must do a real merge
break
forwarded+=1
unapplied.remove(name)
if forwarded == 0:
return 0
git.switch(top)
append_strings(self.__applied_file, names[0:forwarded])
write_strings(self.__unapplied_file, unapplied)
return forwarded
def merged_patches(self, names):
"""Test which patches were merged upstream by reverse-applying
them in reverse order. The function returns the list of
patches detected to have been applied. The state of the tree
is restored to the original one
"""
patches = [self.get_patch(name) for name in names]
patches.reverse()
merged = []
for p in patches:
if git.apply_diff(p.get_top(), p.get_bottom()):
merged.append(p.get_name())
merged.reverse()
git.reset()
return merged
def push_empty_patch(self, name):
"""Pushes an empty patch on the stack
"""
unapplied = self.get_unapplied()
assert(name in unapplied)
# patch = self.get_patch(name)
head = git.get_head()
append_string(self.__applied_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
self.refresh_patch(bottom = head, cache_update = False, log = 'push(m)')
def push_patch(self, name):
"""Pushes a patch on the stack
"""
unapplied = self.get_unapplied()
assert(name in unapplied)
patch = self.get_patch(name)
head = git.get_head()
bottom = patch.get_bottom()
top = patch.get_top()
# top != bottom always since we have a commit for each patch
if head == bottom:
# A fast-forward push. Just reset the backup
# information. No need for logging
patch.set_top(top, backup = True)
git.switch(top)
append_string(self.__applied_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
return False
# Need to create a new commit an merge in the old patch
ex = None
modified = False
# Try the fast applying first. If this fails, fall back to the
# three-way merge
if not git.apply_diff(bottom, top):
# if git.apply_diff() fails, the patch requires a di |
ver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation | of the TensorFlow m | aster to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_op |
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
| "vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
| "r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"All the prefixes advertised from RED_1 and BLUE_1 should carry"
" attributes set by outbound route-maps within specific vrfs. "
"Router R1 should be able to match and permit/deny those "
"prefixes based on received attributes. Please use below "
"commands to verify."
)
input_dict = {
"community": "1:1 1:2 1:3 1:4 1:5",
}
for addr_type in ADDR_TYPES:
vrf = "RED_A"
routes = [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]]
result = verify_bgp_community(tgen, addr_type, "r1", routes, input_dict, vrf)
assert result is True, "Test case {} : Failed \n Error: {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
vrf = "RED_B"
routes = [NETWORK2_1[addr_type]] |
cept Exception:
fs_enc = 'ascii'
if fs_enc == 'ascii':
raise RuntimeError('Click will abort further execution '
'because Python 3 was configured to use '
'ASCII as encoding for the environment. '
'Either switch to Python 2 or consult '
'http://click.pocoo.org/python3/ '
'for mitigation steps.')
else:
_check_for_unicode_literals()
if args is None:
args = sys.argv[1:]
else:
args = list(args)
if prog_name is None:
prog_name = make_str(os.path.basename(
sys.argv and sys.argv[0] or __file__))
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
# noop.
_bashcomplete(self, prog_name, complete_var)
try:
try:
with self.make_context(prog_name, args, **extra) as ctx:
rv = self.invoke(ctx)
if not standalone_mode:
return rv
ctx.exit()
except (EOFError, KeyboardInterrupt):
echo(file=sys.stderr)
raise Abort()
except ClickException as e:
if not standalone_mode:
raise
e.show()
sys.exit(e.exit_code)
except Abort:
if not standalone_mode:
raise
echo('Aborted!', file=sys.stderr)
sys.exit(1)
def __call__(self, *args, **kwargs):
"""Alias for :meth:`main`."""
return self.main(*args, **kwargs)
class Command(BaseCommand):
"""Commands are the basic building block of command line interfaces in
Click. A basic command handles command line parsing and might dispatch
more parsing to commands nested below it.
.. versionchanged:: 2.0
Added the `context_settings` parameter.
:param name: the name of the command to use unless a group overrides it.
:param context_settings: an optional dictionary with defaults that are
passed to the context object.
:param callback: the callback to invoke. This is optional.
:param params: the parameters to register with this command. This can
be either :class:`Option` or :class:`Argument` objects.
:param help: the help string to use for this command.
:param epilog: like the help string but it's printed at the end of the
help page after everything else.
:param short_help: the short help to use for this command. This is
shown on the command listing of the parent command.
:param add_help_option: by default each command registers a ``--help``
option. This can be disabled by this parameter.
"""
def __init__(self, name, context_settings=None, callback=None,
params=None, help=None, epilog=None, short_help=None,
options_metavar='[OPTIONS]', add_help_option=True):
BaseCommand.__init__(self, name, context_settings)
#: the callback to execute when the command fires. This might be
#: `None` in which case nothing happens.
self.callback = callback
#: the list of parameters for this command in the order they
#: should show up in the help page and execute. Eager parameters
#: will automatically be handled before non eager ones.
self.params = params or []
self.help = help
self.epilog = epilog
self.options_metavar = options_metavar
if short_help is None and help:
short_help = make_default_short_help(help)
self.short_help = short_help
self.add_help_option = add_help_option
def get_usage(self, ctx):
formatter = ctx.make_formatter()
self.format_usage(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def get_params(self, ctx):
rv = self.params
help_option = self.get_help_option(ctx)
if help_option is not None:
rv = rv + [help_option]
return rv
def format_usage(self, ctx, formatter):
"""Writes the usage line into the formatter."""
pieces = self.collect_usage_pieces(ctx)
formatter.write_usage(ctx.command_path, ' '.join(pieces))
def collect_usage_pieces(self, ctx):
"""Returns all the pieces that go into the usage line and returns
it as a list of strings.
"""
rv = [self.options_metavar]
for param in self.get_params(ctx):
rv.extend(param.get_usage_pieces(ctx))
return rv
def get_help_option_names(self, ctx):
"""Returns the names for the help option."""
all_names = set(ctx.help_option_names)
for param in self.params:
all_names.difference_update(param.opts)
all_names.difference_update(param.secondary_opts)
return all_names
def get_help_option(self, ctx):
"""Returns the help option object."""
help_options = self.get_help_option_names(ctx)
if not help_options or not self.add_help_option:
return
def show_help(ctx, param, value):
if value and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
return Option(help_options, is_flag=True,
is_eager=True, expose_value=False,
callback=show_help,
help='Show this message and exit.')
def make_parser(self, ctx):
"""Creates the underlying option parser for this command."""
parser = OptionParser(ctx)
parser.allow_interspersed_args = ctx.allow_interspersed_args
parser.ignore_unknown_options = ctx.ignore_unknown_options
for param in self.get_params(ctx):
param.add_to_parser(parser, ctx)
return parser
def get_help(self, ctx):
"""Formats the help into a string and returns it. This creates a
formatter and will call into the following formatting methods:
"""
formatter = ctx.make_formatter()
self.format_help(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def format_help(self, ctx, formatter):
"""Writes the help into the formatter if it exists.
This calls into the following methods:
- :meth:`format_usage`
- :meth:`format_help_text`
- :meth:`format_options`
- :meth:`format_epilog`
"""
self.format_usage(ctx, formatter)
self.format_help_text(ctx, formatter)
self.format_options(ctx, formatter)
self.format_epilog(ctx, formatter)
def format_help_text(self, ctx, formatter):
"""Writes the help text to the formatter if it exists."""
if self.help:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.help)
def format_options(self, ctx, formatter):
"""Writes all the options into the formatter if they exist."""
opts = []
for param in self.get_par | ams(ctx):
rv = param.get_help_record(ctx)
if rv is not None:
opts.append(rv)
if opts:
with formatter.section('Options'):
formatter.write_dl(opts)
def format_epilog(self, ctx, form | atter):
"""Writes the epilog into the formatter if it exists."""
if self.epilog:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.epilog)
def parse_args(self, ctx, args):
parser = self.make_parser(ctx)
opts, args, param_order = parser.parse_args(args=args)
for param in iter_params_for_processing(
param_order, self.get_params(ctx)):
value, args = param. |
job_execution = prepared_job_params['job_execution']
job_params = self._get_oozie_job_params(hdfs_user,
path_to_workflow,
oozie_params,
use_hbase_lib)
client = self.get_client()
oozie_job_id = client.add_job(x.create_hadoop_xml(job_params),
job_execution)
job_execution = conductor.job_execution_get(ctx, job_execution.id)
if job_execution.info['status'] == edp.JOB_STATUS_TOBEKILLED:
return (None, edp.JOB_STATUS_KILLED, None)
conductor.job_execution_update(
context.ctx(), job_execution.id,
{'info': {'status': edp.JOB_STATUS_READYTORUN},
'engine_job_id': oozie_job_id})
client.run_job(job_execution, oozie_job_id)
try:
status = client.get_job_info(job_execution, oozie_job_id)['status']
except Exception:
status = None
return (oozie_job_id, status, None)
def run_scheduled_job(self, job_execution):
prepared_job_params = self._prepare_run_job(job_execution)
oozie_server = prepared_job_params['oozie_server']
wf_dir = prepared_job_params['wf_dir']
hdfs_user = prepared_job_params['hdfs_user']
oozie_params = prepared_job_params['oozie_params']
use_hbase_lib = prepared_job_params['use_hbase_lib']
ctx = prepared_job_params['context']
job_execution = prepared_job_params['job_execution']
coord_configs = {"jobTracker": "${jobTracker}",
"nameNode": "${nameNode}"}
coord_xml = self._create_coordinator_xml(coord_configs)
self._upload_coordinator_file(oozie_server, wf_dir, coord_xml,
hdfs_user)
job_params = self._get_oozie_job_params(
hdfs_user, None, oozie_params, use_hbase_lib,
job_execution.job_configs.job_execution_info, wf_dir,
"scheduled")
client = self.get_client()
oozie_job_id = client.add_job(x.create_hadoop_xml(job_params),
job_execution)
job_execution = conductor.job_execution_get(ctx, job_execution.id)
if job_execution.info['status'] == edp.JOB_STATUS_TOBEKILLED:
return (None, edp.JOB_STATUS_KILLED, None)
try:
status = client.get_job_status(job_execution,
oozie_job_id)['status']
except Exception:
status = None
return (oozie_job_id, status, None)
@abc.abstractmethod
def get_hdfs_user(self):
pass
@abc.abstractmethod
def create_hdfs_dir(self, remote, dir_name):
pass
@abc.abstractmethod
def get_oozie_server_uri(self, cluster):
pass
@abc.abstractmethod
def get_oozie_server(self, cluster):
pass
@abc.abstractmethod
def get_name_node_uri(self, cluster):
pass
@abc.abstractmethod
def get_resource_manager_uri(self, cluster):
pass
def validate_job_execution(self, cluster, job, data):
# Shell job type requires no specific fields
if job.type == edp.JOB_TYPE_SHELL:
return
# All other types except Java require input and output
# objects and Java require main class
if job.type == edp.JOB_TYPE_JAVA:
j.check_main_class_present(data, job)
else:
j.check_data_sources(data, job)
job_type, subtype = edp.split_job_type(job.type)
if job_type == edp.JOB_TYPE_MAPREDUCE and (
subtype == edp.JOB_SUBTYPE_STREAMING):
j.check_streaming_present(data, job)
@staticmethod
| def get_possible_job_config(job_type):
return workflow_factory.get_possible_job_config(job_type)
@staticmethod
def get_supported_job_types():
return [edp.JOB_TYPE_HIVE,
edp.JOB_TYPE_JAVA,
edp.JOB_TYPE_MAPREDUCE,
edp.JOB_TYPE_MAPREDUCE_STREAMING,
edp.JOB_TYPE_PIG,
edp.JOB_TYPE_SHELL]
def _prepare_job_binaries(self, job_binaries, r):
fo | r jb in job_binaries:
jb_manager.JOB_BINARIES.get_job_binary_by_url(jb.url). \
prepare_cluster(jb, remote=r)
def _upload_job_files_to_hdfs(self, where, job_dir, job, configs,
proxy_configs=None):
mains = list(job.mains) if job.mains else []
libs = list(job.libs) if job.libs else []
builtin_libs = edp.get_builtin_binaries(job, configs)
uploaded_paths = []
hdfs_user = self.get_hdfs_user()
job_dir_suffix = 'lib' if job.type != edp.JOB_TYPE_SHELL else ''
lib_dir = os.path.join(job_dir, job_dir_suffix)
with remote.get_remote(where) as r:
job_binaries = mains + libs
self._prepare_job_binaries(job_binaries, r)
# upload mains
uploaded_paths.extend(self._upload_job_binaries(r, mains,
proxy_configs,
hdfs_user,
job_dir))
# upload libs
if len(libs) and job_dir_suffix:
# HDFS 2.2.0 fails to put file if the lib dir does not exist
self.create_hdfs_dir(r, lib_dir)
uploaded_paths.extend(self._upload_job_binaries(r, libs,
proxy_configs,
hdfs_user,
lib_dir))
# upload buitin_libs
for lib in builtin_libs:
h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
hdfs_user)
uploaded_paths.append(lib_dir + lib['name'])
return uploaded_paths
def _upload_job_binaries(self, r, job_binaries, proxy_configs,
hdfs_user, job_dir):
uploaded_paths = []
for jb in job_binaries:
path = jb_manager.JOB_BINARIES. \
get_job_binary_by_url(jb.url). \
copy_binary_to_cluster(jb, proxy_configs=proxy_configs,
remote=r, context=context.ctx())
h.copy_from_local(r, path, job_dir, hdfs_user)
uploaded_paths.append(path)
return uploaded_paths
def _create_hdfs_workflow_dir(self, where, job):
constructed_dir = '/user/%s/' % self.get_hdfs_user()
constructed_dir = self._add_postfix(constructed_dir)
constructed_dir += '%s/%s' % (job.name, uuidutils.generate_uuid())
with remote.get_remote(where) as r:
self.create_hdfs_dir(r, constructed_dir)
return constructed_dir
def _create_coordinator_xml(self, coord_configs, config_filter=None,
appname='coord'):
doc = xml.Document()
# Create the <coordinator-app> base element
coord = doc.createElement('coordinator-app')
coord.attributes['name'] = appname
coord.attributes['start'] = "${start}"
coord.attributes['end'] = "${end}"
coord.attributes['frequency'] = "${frequency}"
coord.attributes['timezone'] = 'UTC'
coord.attributes['xmlns'] = 'uri:oozie:coordinator:0.2'
doc.appendChild(coord)
action = doc.createElement('action')
workflow = doc.createElement('workflow')
coord.appendChild(action)
action.appendChild(workflow)
x.add_text_element_to_tag(doc, "workflow", 'app-path',
"${workflowAppUri}")
configuration = doc.createElement('configuration')
workflow.appendChild(configuration)
default_configs = []
if config_filter is not None:
default_configs = [cfg['name'] for cfg in |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt'), encoding='utf-8') as f:
CHANGES = f.read()
setup(
name='sloth',
version='0.1',
description='',
long_description=README,
license='AGPLv3',
# TODO: add author info
#author='',
#au | thor_email='',
url='https://bitbucket.org/pride/sloth/',
# TODO: add keywords
#keywords='',
install_requires = ['python-dateutil', 'arrow'],
classifiers = [
"License :: OSI Approved :: GNU Affero General Public License v3"
"Operating System :: MacOS :: MacOS X",
"Operatin | g System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=find_packages(include=['sloth']),
include_package_data=True,
zip_safe=False,
entry_points="""\
[console_scripts]
sloth-game = sloth.start:run
""",
)
|
#!/usr/bin/python
import unittest
from biosignalformat import *
class TestBaseObjects(unittest.TestCase):
def test_MinimalExperiment(self):
provider = XArchiveProvider("experiment001.7z")
#provider = ZipArchiveProvider("experiment001.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
experiment.write()
with self.assertRaises(Exception):
experiment.remove(provider)
metadata = experiment.readMetadata(provider)
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructure7z(self):
provider = XArchiveProvider("experiment002B.7z")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
})
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
channel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructureZip(self):
provider = ZipArchiveProvider("experiment002.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
| })
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
cha | nnel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
class TestPlugins(unittest.TestCase):
def test_plugins(self):
from biosignalformat.external import sample
self.assertEqual(sample.ConstantVariable, 12)
class TestConverters(unittest.TestCase):
def test_single_edf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleEDF.edf", SevenZipArchiveProvider("ExampleEDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleEDF.edf", XArchiveProvider("ExampleEDFAscii.bif.zip"))
importer.convert()
def atest_multiple_edf(self):
from biosignalformat.external import base_converter
importer = base_converter.EDFImporter("ExampleEDF.edf", XZipArchiveProvider("ExampleMultipleEDFAscii.bif.7z"))
#importer = base_converter.EDFImporter("ExampleEDF.edf", ZipArchiveProvider("ExampleMultipleEDFAscii.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment)
importer3.convert()
def test_single_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.BDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleBDFAscii.bif.7z"))
importer = base_converter.BDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleBDFAscii.bif.zip"))
importer.convert()
def test_multiple_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleMultipleBDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleMultipleBDFAscii-3.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment)
importer3.convert()
def test_all():
test_loader = unittest.TestLoader()
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestBaseObjects))
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestPlugins))
unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestConverters))
|
e(obj, 'foo__' + constants.COMPARISON_ISNULL)
assert value is None
assert comparison == constants.COMPARISON_ISNULL, comparison
def test_get_attribute_returns_nested_object_value(self):
obj = MagicMock(child=MagicMock(foo='test'))
value, comparison = utils.get_attribute(obj, 'child__foo__' + constants.COMPARISON_IEXACT)
assert value == 'test'
assert comparison == constants.COMPARISON_IEXACT
def test_get_attribute_returns_default_value_when_object_is_none(self):
obj = None
default_value = ''
value, comparison = utils.get_attribute(obj, 'foo', default_value)
assert value == default_value
assert comparison is None
def test_get_attribute_with_date(self):
obj = MagicMock(foo=date(2017, 12, 31))
value, comparison = utils.get_attribute(
obj, 'foo__' + constants.COMPARISON_YEAR + '__' + constants.COMPARISON_GT
)
assert value == date(2017, 12, 31)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_GT)
def test_get_attribute_returns_tuple_with_exact_as_default_comparison(self):
obj = MagicMock(foo=datetime(2017, 1, 1))
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_YEAR)
assert value == datetime(2017, 1, 1)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_EXACT)
def test_validate_date_or_datetime_raises_value_error(self):
with self.assertRaisesRegexp(ValueError, r'13 is incorrect value for month'):
utils.validate_date_or_datetime(13, constants.COMPARISON_MONTH)
def test_is_match_equality_check_when_comparison_none(self):
result = utils.is_match(1, 1)
assert result is True
result = utils.is_match('a', 'a')
assert result is True
result = utils.is_match(1, '1')
assert result is False
def test_is_match_case_sensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_EXACT)
assert result is False
result = utils.is_match('a', 'a', constants.COMPARISON_EXACT)
assert result is True
def test_is_match_case_insensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_IEXACT)
assert result is True
result = utils.is_match('a', 'a', constants.COMPARISON_IEXACT)
assert result is True
def test_is_match_case_sensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_CONTAINS)
assert result is False
result = utils.is_match('abc', 'a', constants.COMPARISON_CONTAINS)
assert result is True
def test_is_match_case_insensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_ICONTAINS)
assert result is True
result = utils.is_match('abc', 'a', constants.COMPARISON_ICONTAINS)
assert result is True
def test_is_match_startswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_STARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_STARTSWITH)
assert result is False
def test_is_match_istartswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_ISTARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_ISTARTSWITH)
assert result is True
def test_is_match_endswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_ENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_ENDSWITH)
assert result is False
def test_is_match_iendswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_IENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_IENDSWITH)
assert result is True
def test_is_match_greater_than_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GT)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GT)
assert result is False
def test_is_match_greater_than_equal_to_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(5, 5, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GTE)
assert result is False
def test_is_match_less_than_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LT)
assert result is True
result = utils.is_match(2, 2, constants.COMPARISON_LT)
assert result is False
def test_is_match_less_than_equal_to_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(1, 1, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(2, 1, constants.COMPARISON_LTE)
assert result is False
def test_is_match_isnull_check(self):
result = utils.is_match(1, True, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(1, False, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, True, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, False, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(None, 1, constants.COMPARISON_ISNULL)
assert result is True
def test_is_match_in_value_check(self):
result = utils.is_match(2, [1, 3], constants.COMPARISON_IN)
assert result is False
result = utils.is_match(1, [1, 3], constants.COMPARISON_IN)
assert result is True
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=True))
def test_matches_inclu | des_object_in_results_when_match(self, get_attr_mock):
source = [
MagicMock(foo=1),
MagicMock(foo=2),
]
get_attr_mock.return_value = None, None
results = utils.matches(*source, | foo__gt=0)
for x in source:
assert x in results
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=False))
def test_matches_excludes_object_from_results_when_not_match(self, get_attr_mock):
source = [
MagicMock(foo=1),
MagicMock(foo=2),
]
get_attr_mock.return_value = None, None
results = utils.matches(*source, foo__gt=5)
for x in source:
assert x not in results
def test_is_match_regex(self):
result = utils.is_match('Monty Python 1234', r'M\w+\sPython\s\d+', constants.COMPARISON_REGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+\spython\s\d+', constants.COMPARISON_REGEX)
assert result is False
result = utils.is_match('Monty Python 1234', r'm\w+Holy Grail\s\d+', constants.COMPARISON_REGEX)
assert result is False
def test_is_match_iregex(self):
result = utils.is_match('Monty Python 1234', r'M\w+\sPython\s\d+', constants.COMPARISON_IREGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+\spython\s\d+', constants.COMPARISON_IREGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+Holy Grail\s\d+', constants.COMPARISON_IREGEX)
assert result is False
def test_is_match_processes_datetime_field(self):
result = utils.is_match(datetime(2017, 1, 1, 2, 3, 4), 1, (constants.COMPARISON_HOUR, constants.COMPARISON_LT))
assert result is False
def test_is_match_processes_date_field(self):
result = utils.is_match(date(2017, 1, 1), 2016, (constants |
import talkey
from gtts import gTTS
import vlc
import time
import wave
import contextlib
class Speaker:
def __init__(self):
self.engine =talkey.Talkey()
def say(self, text_to_say):
self.engine.say(text_to_say)
def google_say(self, text_to_say, fname="1.mp3"):
tts = gTTS(text=text_to_say, lang="en")
tts.save(fname)
self.player = vlc.MediaPlayer(fname)
| self.player.play()
self.player.stop()
os.re | move(fname)
|
# This file is part of cloud-init. See LICENSE file for license information.
"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeysGenerate(base.CloudTestCase):
"""Test ssh keys module."""
# TODO: Check cloud-init-output for the correct keys being generated
def test_dsa_public(self):
"""Test dsa public key not generated."""
out = self.get_data_file('dsa_public')
self.assertEqual('', out)
def test_dsa_private(self):
"""Test dsa private key not generated."""
out = self.get_data_file('dsa_private')
self.assertEqual('', out)
def test_rsa_public(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_public')
self.assertEqual('', out)
def test_rsa_private(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_private')
self.assertEqual('', out)
def test_ecdsa_public(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_public')
self.assertIsNotNone(out)
def test_ecdsa_private(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_private')
self.assertIsNotNone(out)
def test_ed25519_public(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_public')
self.ass | ertIsNotNone(out)
def test_ed25519_private(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_private')
self.assertIsNotNone(out)
| # vi: ts=4 expandtab
|
models.fields.AutoField', [], {'primary_key': 'True'}),
'low': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_amount'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbydate': {
'Meta': {'ordering': "['date']", 'object_name': 'ContributionsByDate'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_date'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbystate': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByState'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_state'", 'to': u"orm['tx_tecreports.Report']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbyzipcode': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByZipcode'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_zipcode'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.contributor': {
'Meta': {'object_name': 'Contributor'},
'address_1': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'address_2': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'city': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_entity': ('django.db.models.fields.BooleanField', [], {}),
'is_individual': ('django.db.models.fields.BooleanField', [], {}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'state': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'suffix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'title': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'type_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contributors'", 'to': u"orm['tx_tecreports.ContributorType']"}),
'zipcode': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'zipcode_short': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True'})
},
u'tx_tecreports.contributortype': {
'Meta': {'object_name': 'ContributorType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecrepo | rts.employer': {
'Meta': {'object_name': 'Employer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250 | '})
},
u'tx_tecreports.filer': {
'Meta': {'object_name': 'Filer'},
'filer_id': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'filer_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filers'", 'to': u"orm['tx_tecreports.FilerType']"}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_prefix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_suffix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'nickname': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'})
},
u'tx_tecreports.filertype': {
'Meta': {'object_name': 'FilerType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.filing': {
'Meta': {'object_name': 'Filing'},
'filer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.Filer']"}),
'filing_method': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.FilingMethod']"}),
'is_correction': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'report_due': ('django.db.models.fields.DateField', [], {}),
'report_filed': ('django.db.models.fields.DateField', [], {}),
'report_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250', 'primary_key': 'True'}),
'report_type': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.filingmethod': {
'Meta': {'object_name': 'FilingMethod'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.filingtype': {
'Meta': {'object_name': 'FilingType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.receipt': {
'Meta': {'ordering': "['date']", 'object_name': 'Receipt'},
'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'contributor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'receipts'", 'to': u"orm['tx_tecreports.Contributor']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN | .
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY | or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Integrate Sphinx documentation pages."""
|
class StadisticRouter(object):
"""A router to control all database operations on models in
the stadistic application"""
def db_for_read(self, model, **hints):
"Point all operations on myapp models to 'other'"
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def db_for_write(self, model, **hints):
"Point all operations on stadistic models to 'other'" |
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def allow_relation(self, obj1, obj2, **hints):
"Deny an | y relation if a model in stadistic is involved"
if obj1._meta.app_label == 'stadistic' or obj2._meta.app_label == 'stadistic':
return True
return True
def allow_syncdb(self, db, model):
"Make sure the stadistic app only appears on the 'nonrel' db"
if db == 'nonrel':
return model._meta.app_label == 'stadistic'
elif model._meta.app_label == 'stadistic':
return False
return True |
)]
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downloads_processed",
'packageDeleted' : "package_deleted" }
self.queue = ArchiveQueue(self, "Queue")
self.failed = ArchiveQueue(self, "Failed")
self.interval = 60
self.extracting = False
self.last_package = False
self.extractors = []
self.passwords = []
self.repair = False
def activate(self):
for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.pyload.pluginManager.loadModule("internal", p)
klass = getattr(module, p)
if klass.find():
self.extractors.append(klass)
if klass.REPAIR:
self.repair = self.get_config('repair')
except OSError, e:
if e.errno == 2:
self.log_warning(_("No %s installed") % p)
else:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
except Exception, e:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
if self.extractors:
self.log_debug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
self.extract_queued() #: Resume unfinished extractions
else:
self.log_info(_("No Extract plugins activated"))
@threaded
def extract_queued(self, thread):
if self.extracting: #@NOTE: doing the check here for safty (called by coreReady)
return
self.extracting = True
packages = self.queue.get()
while packages:
if self.last_package: #: Called from allDownloadsProcessed
self.last_package = False
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
self.manager.dispatchEvent("all_archives_extracted")
self.manager.dispatchEvent("all_archives_processed")
else:
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
pass
packages = self.queue.get() #: Check for packages added during extraction
self.extracting = False
#: Deprecated method, use `extract_package` instead
@Expose
def extractPackage(self, *args, **kwargs):
"""
See `extract_package`
"""
return self.extract_package(*args, **kwargs)
@Expose
def extract_package(self, *ids):
"""
Extract packages with given id
"""
for id in ids:
self.queue.add(id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def package_deleted(self, pid):
self.queue.remove(pid)
def package_finished(self, pypack):
self.queue.add(pypack.id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def all_downloads_processed(self):
self.last_package = True
if self.get_config('waitall') and not self.extracting:
self.extract_queued()
@Expose
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability
if not ids:
return False
processed = []
extracted = []
failed = []
toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
destination = self.get_config('destination')
subfolder = self.get_config('subfolder')
fullpath = self.get_config('fullpath')
overwrite = self.get_config('overwrite')
renice = self.get_config('renice')
recursive = self.get_config('recursive')
delete = self.get_config('delete')
keepbroken = self.get_config('keepbroken')
extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))]
excludefiles = toList(self.get_config('excludefiles'))
if extensions:
self.log_debug("Use for extensions: %s" % "|.".join(extensions))
#: Reload from txt file
self.reload_passwords()
download_folder = self.pyload.config.get("general", "download_folder")
#: Iterate packages -> extractors -> targets
for pid in ids:
pypack = self.pyload.files.getPackage(pid)
if not pypack:
self.queue.remove(pid)
continue
self.log_info(_("Check package: %s") % pypack.name)
#: Determine output folder
out = fs_join(download_folder, pypack.folder, destination, "") #: Force trailing slash
if subfolder:
out = fs_join(out, pypack.folder)
if not exists(out):
os.makedirs(out)
matched = False
success = True
files_ids = dict((pylink['name'], ((fs_join(download_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \
in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates
#: Check as long there are unseen files
while files_ids:
new_files_ids = []
if extensions:
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if filter(lambda ext: fname.lower().endswith(ext), extensions)]
for Extractor in self.extractors:
targets = Extractor.get_targets(files_ids)
if targets:
self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets))
matched = True
for fname, fid, fout in targets:
name = os.path.basename(fname)
if not exists(fname):
self.log_debug(name, "File not found")
continue
self.log_info(name, _("Extract to: %s") % fout)
try:
pyfile = self.pyload.files.getFile(fid)
archive = Extractor(self,
fname,
fout,
fullpath,
overwrite,
excludefiles,
renice,
| delete,
keepbroken,
fid)
thread.ad | dActive(pyfile)
archive.init()
try:
new_files = self._extract(pyfile, archive, pypack.password)
finally:
pyfile.setProgress(100)
thread.finishFile(pyfile)
except Exception, e:
self.log_error(name, e)
success = False
continue
#: Remove processed file and related multiparts from list
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if fname not in archive.get_delete_files()]
self.log_debug("Extracted files: %s" % new_files)
for file in new_files:
self.set_permissions(file)
for filename in new_files:
|
e', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'no | tification_subscriptions'", ' | to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
' |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.utils import shell
def checkout(url, dest):
'''
Checkout a url to a given destination
@param url: url to checkout
@type url: string
@param dest: path where to do the checkout
@type url: string
'''
shell.call('svn co %s %s' % (url, dest))
def update(repo, revision='HEAD'):
'''
Update a repositry to a given revision
@param repo: repository path
@type revision: str
@param revision: the revision to checkout
@type revision: str
'''
shell.call('svn up -r %s' % revision, repo)
def checkout_file(url | , out_path):
'''
Checkout a single file to out_path
@param url: file URL
@type url: str
@param out_path: output path
|
@type revision: str
'''
shell.call('svn export --force %s %s' % (url, out_path))
|
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer.CLI import template
from SoftLayer.CLI import virt
from SoftLayer import utils
import click
@click.command(epilog="See 'sl vs create-options' for valid options")
@click.option('--domain', '-D', help="Domain portion of the FQDN")
@click.option('--hostname', '-H', help="Host portion of the FQDN")
@click.option('--image',
help="Image GUID. See: 'sl image list' for reference")
@click.option('--cpu', '-c', help="Number of CPU cores", type=click.INT)
@click.option('--memory', '-m', help="Memory in mebibytes", type=virt.MEM_TYPE)
@click.option('--os', '-o',
help="OS install code. Tip: you can specify <OS>_LATEST")
@click.option('--billing',
type=click.Choice(['hourly', 'monthly']),
default='hourly',
help="""Billing rate""")
@click.option('--datacenter', '-d', help="Datacenter shortname")
@click.option('--dedicated/--public',
is_flag=True,
help="Create a dedicated Virtual Server (Private Node)")
@click.option('--san',
is_flag=True,
help="Use SAN storage instead of local disk.")
@click.option('--test',
is_flag=True,
help="Do not actually create the virtual server")
@click.option('--export',
type=click.Path(writable=True, resolve_path=True),
help="Exports options to a template file")
@click.option('--userfile', '-F',
help="Read userdata from file",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--postinstal | l', '-i', help="Post-install script to download")
@click.option('--key', '-k',
multiple=True,
help="SSH keys to add to the root user")
@click.option('--disk', multiple=True, help="Disk sizes")
@click.option('--private',
is_flag=True,
help="Forces the VS to only have access the private network")
@click.option('--like',
is_flag=True,
help="Use the configuration from an existing VS")
@clic | k.option('--network', '-n', help="Network port speed in Mbps")
@click.option('--tag', '-g', multiple=True, help="Tags to add to the instance")
@click.option('--template', '-t',
help="A template file that defaults the command-line options",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--userdata', '-u', help="User defined metadata string")
@click.option('--vlan-public',
help="The ID of the public VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--vlan-private',
help="The ID of the private VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--wait',
type=click.INT,
help="Wait until VS is finished provisioning for up to X "
"seconds before returning")
@environment.pass_env
def cli(env, **args):
"""Order/create virtual servers."""
template.update_with_template_args(args, list_args=['disk', 'key'])
vsi = SoftLayer.VSManager(env.client)
_update_with_like_args(env.client, args)
_validate_args(args)
# Do not create a virtual server with test or export
do_create = not (args['export'] or args['test'])
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
data = _parse_create_args(env.client, args)
output = []
if args.get('test'):
result = vsi.verify_create_instance(**data)
total_monthly = 0.0
total_hourly = 0.0
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
for price in result['prices']:
total_monthly += float(price.get('recurringFee', 0.0))
total_hourly += float(price.get('hourlyRecurringFee', 0.0))
if args.get('billing') == 'hourly':
rate = "%.2f" % float(price['hourlyRecurringFee'])
elif args.get('billing') == 'monthly':
rate = "%.2f" % float(price['recurringFee'])
table.add_row([price['item']['description'], rate])
total = 0
if args.get('billing') == 'hourly':
total = total_hourly
elif args.get('billing') == 'monthly':
total = total_monthly
billing_rate = 'monthly'
if args.get('hourly'):
billing_rate = 'hourly'
table.add_row(['Total %s cost' % billing_rate, "%.2f" % total])
output.append(table)
output.append(formatting.FormattedItem(
None,
' -- ! Prices reflected here are retail and do not '
'take account level discounts and are not guaranteed.'))
if args['export']:
export_file = args.pop('export')
template.export_to_template(export_file, args,
exclude=['wait', 'test'])
return 'Successfully exported options to a template file.'
if do_create:
if env.skip_confirmations or formatting.confirm(
"This action will incur charges on your account. Continue?"):
result = vsi.create_instance(**data)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
table.add_row(['id', result['id']])
table.add_row(['created', result['createDate']])
table.add_row(['guid', result['globalIdentifier']])
output.append(table)
if args.get('wait'):
ready = vsi.wait_for_ready(
result['id'], int(args.get('wait') or 1))
table.add_row(['ready', ready])
else:
raise exceptions.CLIAbort('Aborting virtual server order.')
return output
def _validate_args(args):
"""Raises an ArgumentError if the given arguments are not valid."""
if all([args['userdata'], args['userfile']]):
raise exceptions.ArgumentError(
'[-u | --userdata] not allowed with [-F | --userfile]')
image_args = [args['os'], args['image']]
if all(image_args):
raise exceptions.ArgumentError(
'[-o | --os] not allowed with [--image]')
if not any(image_args):
raise exceptions.ArgumentError(
'One of [--os | --image] is required')
def _update_with_like_args(env, args):
"""Update arguments with options taken from a currently running VS.
:param VSManager args: A VSManager
:param dict args: CLI arguments
"""
if args['like']:
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, args.pop('like'), 'VS')
like_details = vsi.get_instance(vs_id)
like_args = {
'hostname': like_details['hostname'],
'domain': like_details['domain'],
'cpu': like_details['maxCpu'],
'memory': like_details['maxMemory'],
'hourly': like_details['hourlyBillingFlag'],
'datacenter': like_details['datacenter']['name'],
'network': like_details['networkComponents'][0]['maxSpeed'],
'user-data': like_details['userData'] or None,
'postinstall': like_details.get('postInstallScriptUri'),
'dedicated': like_details['dedicatedAccountHostOnlyFlag'],
'private': like_details['privateNetworkOnlyFlag'],
}
tag_refs = like_details.get('tagReferences', None)
if tag_refs is not None and len(tag_refs) > 0:
like_args['tag'] = [t['tag']['name'] for t in tag_refs]
# Handle mutually exclusive options
like_image = utils.lookup(like_details,
'blockDeviceTemplateGroup',
'globalIdentifier')
like_os = utils.lookup(like_details,
'operatingSystem',
|
ort Calendar, vCalAddress, vText
import icalendar
from datetime import timedelta
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
#from django.contrib.syndication.views import feed
from django.utils import feedgenerator
from django.template.loader import render_to_string
from django.http import HttpResponse
from evesch.org.models import Organization
from evesch.event.models import Event
from evesch.core.feed.feeds import OrgFeed
from evesch.euser.models import eUser, get_current_user
def org_rss(request,org_short_name,org_feed_hash):
try:
"""
"""
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_org.org_name,
link="http://%s%s" % (host, reverse('event_events_list',kwargs={'org_short_name':current_org.org_short_name,})),
description=current_org.org_desc, language='en',
)
for event in events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def org_ics(request,org_short_name,org_feed_hash):
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgical = Calendar()
orgical['summary'] = "Calendar for organization %s" % (current_org.org_name)
orgical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
orgical.add('version', '2.0')
for event in events:
cal_event = icalendar.Event()
cal_event.add('summary', event.event_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://%s%s" % (host, reverse('event_event_view',kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash,})))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
orgical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.write(orgical.to_ical())
#template_name = "error.html"
return response
def user_rss(request,username,user_feed_hash):
try:
""" """
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse('home'))
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_user.username,
link="http://%s%s" % (host, reverse('euser_user_view', kwargs={'username':current_user.username})) ,
description=current_user.about, language='en',
)
for event in user_events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def user_ics(request,username,user_feed_hash):
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse('home'))
#user_events = Event.objects.all()
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
userical = Calendar()
userical['summary'] = "Calendar for user %s" % (current_user.username)
userical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
userical.add('version', '2.0')
for event in user_events:
| cal_event = icalendar.Event()
cal_event.add('summary', event.eve | nt_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://" + host + reverse('event_event_view',kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash,}))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
userical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.write(userical.as_st |
"""SCons.Tool.sgiar
Tool-specific initialization for SGI ar (library archive). If CC
exists, static libraries should be built with it, so the prelinker has
a chance to resolve C++ template instantiations.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY | , WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sgiar.py 4369 2009/09/19 15:58:29 scons"
import SCons.Defaults
import SCons.Tool
import SCons.Util
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createStaticLibBuilder(env)
if env. | Detect('CC'):
env['AR'] = 'CC'
env['ARFLAGS'] = SCons.Util.CLVar('-ar')
env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES'
else:
env['AR'] = 'ar'
env['ARFLAGS'] = SCons.Util.CLVar('r')
env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
def exists(env):
return env.Detect('CC') or env.Detect('ar')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
import unittest
from tow.dockerfile import Dockerfile
class DockerfileTest(unittest.TestCase):
def test_parse_spaced_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1"]
envs = d.envs()
self.assertEqual(envs, {"test": "1"})
def test_parse_many_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1", "ENV test2=2", "ENV test3 3"]
envs = d.envs()
self.assertEqual(envs, {"test": "1", "test2": "2", "test3": "3"})
def test_parse_multiline(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['ENV myName="John Doe" myDog=Rex\\ The\\ Dog \\',
'myCat=fluffy']
envs = d.envs()
self.assertEqual(envs, {"myName": "John Doe",
"myDog": "Rex\\ The\\ Dog", "myCat": "fluffy"})
def test_add_copy(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END"])
def test_add_copy_after_from(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "ENTRYPOINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_add_copy_after_maintainer(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "MAINTAINER test","ENTRYPOINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"MAINTAINER test",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_find_entrypoint_or_cmd(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_shell_style(self):
d = Dockerfile("Dockerfile | ")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT /bin/sh', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_cmd_only(self):
d = Docker | file("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'CMD ["/bin/sh", "-c", "-x"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, ["/bin/sh", "-c", "-x"]))
def test_find_entrypoint_or_cmd_entrypoint_only(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], None))
def test_find_entrypoint_or_cmd_none(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, None))
|
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS AN INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# 2of2 test for combining transactions
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx2['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal( | self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# decoderawtransaction tests
# witness transacti | on
encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
# non-witness transaction
encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of ra |
from inspect import getmembers
from django.shortcuts import render
from utilities import get_wps_service_engine, list_wps_service_engines, abstract_is_link
def home(request):
"""
Home page for Tethys WPS tool. Lists all the WPS services that are linked.
"""
wps_services = list_wps_service_engines()
context = {'wps_services': wps_services}
return render(request, 'tethys_wps/home.html', context)
def service(request, service):
"""
View that lists the processes for a given | service.
"""
wps = get_wps_service_engine(service)
context = {'wps': wps,
'service': service}
return render(request, 'tethys_wps/service.html', context)
def process(request, service, identifier):
"""
View that displays a detailed description for a WPS process.
"""
wps = get_wps_service_engine(service)
wps_process = wps.describeprocess(identifier)
context = {'process': wps_p | rocess,
'service': service,
'is_link': abstract_is_link(wps_process)}
return render(request, 'tethys_wps/process.html', context)
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsComposerEffects.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2012 by Dr. Horst Düster / Dr. Marco Hugentobler'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import qgis
from PyQt4.QtCore import QFileInfo
from PyQt4.QtXml import QDomDocument
from PyQt4.QtGui import (QPainter, QColor)
from qgis.core import (QgsComposerShape,
QgsRectangle,
QgsComposition,
QgsMapRenderer
)
from utilities import (unitTestDataPath,
getQgisTestApp,
TestCase,
unittest,
expectedFailure
)
from qgscompositionchecker import QgsCompositionChecker
QGISAPP, CANVAS, IFACE, PARENT = getQgisTestApp()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsComposerEffects(TestCase):
def __init__(self, methodName):
"""Run once on class initialisation."""
unittest.TestCase.__init__(self, methodName)
# create composition
self.mMapRenderer = QgsMapRenderer()
self.mComposition = QgsComposition(self.mMapRenderer)
self.mComposition.setPaperSize(297, 210)
self.mComposerRect1 = QgsComposerShape(20, 20, 150, 100, self.mComposition)
self.mComposerRect1.setShapeType(QgsComposerShape.Rect | angle)
self.mComposerRect1.setBackgroundColor(QColor.fromRgb(255, 150, 0))
self.mComposition.addComposerShape(self.mComposerRect1)
self.mComposerRect2 = | QgsComposerShape(50, 50, 150, 100, self.mComposition)
self.mComposerRect2.setShapeType(QgsComposerShape.Rectangle)
self.mComposerRect2.setBackgroundColor(QColor.fromRgb(0, 100, 150))
self.mComposition.addComposerShape(self.mComposerRect2)
def testBlendModes(self):
"""Test that blend modes work for composer items."""
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_Multiply)
checker = QgsCompositionChecker('composereffects_blend', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_SourceOver)
assert myTestResult == True, myMessage
def testTransparency(self):
"""Test that transparency works for composer items."""
self.mComposerRect2.setTransparency( 50 )
checker = QgsCompositionChecker('composereffects_transparency', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setTransparency( 100 )
assert myTestResult == True, myMessage
if __name__ == '__main__':
unittest.main()
|
import traceback
class EnsureExceptionHandledGuard:
"""Helper for ensuring that Future's exceptions were handled.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__( | ) method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and | its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _EnsureExceptionHandledGuard,
and then the _EnsureExceptionHandledGuard would be included in a cycle,
which is what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ['exc', 'tb', 'hndl', 'cls']
def __init__(self, exc, handler):
self.exc = exc
self.hndl = handler
self.cls = type(exc)
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
self.hndl(self.cls, self.tb)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'exampleLoaderTemplate.ui'
#
# Created: Sat Dec 17 23:46:27 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(762, 302)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayou | t.setMargin(0)
self.gri | dLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.splitter = QtGui.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.exampleTree = QtGui.QTreeWidget(self.layoutWidget)
self.exampleTree.setObjectName(_fromUtf8("exampleTree"))
self.exampleTree.headerItem().setText(0, _fromUtf8("1"))
self.exampleTree.header().setVisible(False)
self.verticalLayout.addWidget(self.exampleTree)
self.loadBtn = QtGui.QPushButton(self.layoutWidget)
self.loadBtn.setObjectName(_fromUtf8("loadBtn"))
self.verticalLayout.addWidget(self.loadBtn)
self.codeView = QtGui.QTextBrowser(self.splitter)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Monospace"))
font.setPointSize(10)
self.codeView.setFont(font)
self.codeView.setObjectName(_fromUtf8("codeView"))
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.loadBtn.setText(QtGui.QApplication.translate("Form", "Load Example", None, QtGui.QApplication.UnicodeUTF8))
|
# -*- coding: utf | -8 -*-
# Generated by Django 1.10.5 on 2017-03-22 11:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cotacao', '0003_auto_20170312_2049'),
]
operations = [
migrations.RemoveField(
model_name='item',
name='pedido',
),
migrations.AddField(
model_name='pedido',
name | ='itens',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='itens', to='cotacao.Item'),
),
]
|
'''Python sys.excepthook hook to generate apport crash dumps.'''
# Copyright (c) 2006 - 2009 Canonical Ltd.
# Authors: Robert Collins <robert@ubuntu.com>
# Martin Pitt <martin.pitt@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
import os
import sys
CONFIG = '/etc/default/apport'
def enabled():
'''Return whether Apport should generate crash reports.'''
# This doesn't use apport.packaging.enabled() because it is too heavyweight
# See LP: #528355
import re
try:
with open(CONFIG) as f:
conf = f.read()
return re.search('^\s*enabled\s*=\s*0\s*$', conf, re.M) is None
except IOError:
# if the file does not exist, assume it's enabled
return True
def apport_excepthook(exc_type, exc_obj, exc_tb):
'''Catch an uncaught exception and make a traceback.'''
# create and save a problem report. Note that exceptions in this code
# are bad, and we probably need a per-thread reentrancy guard to
# prevent that happening. However, on Ubuntu there should never be
# a reason for an exception here, other than [say] a read only var
# or some such. So what we do is use a try - finally to ensure that
# the original excepthook is invoked, and until we get bug reports
# ignore the other issues.
# import locally here so that there is no routine overhead on python
# startup time - only when a traceback occurs will this trigger.
try:
# ignore 'safe' exit types.
if exc_type in (KeyboardInterrupt, ):
return
# do not do anything if apport was disabled
if not enabled():
return
try:
from cStringIO import StringIO
StringIO # pyflakes
except ImportError:
from io import StringIO
import re, traceback
from apport.fileutils import likely_packaged, get_recent_crashes
# apport will look up the package from the executable path.
try:
binary = os.path.realpath(os.path.join(os.getcwd(), sys.argv[0]))
except (TypeError, AttributeError, IndexError):
# the module has mutated sys.argv, plan B
try:
binary = os.readlink('/proc/%i/exe' % os.getpid())
except OSError:
return
# for interactive python sessions, sys.argv[0] == ''; catch that and
# other irregularities
if not os.access(binary, os.X_OK) or not os.path.isfile(binary):
return
# filter out binaries in user accessible paths
if not likely_packaged(binary):
return
import apport.report
pr = apport.report.Report()
# special handling of dbus-python exceptions
if hasattr(exc_obj, 'get_dbus_name'):
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.NoReply':
# NoReply is an useless crash, we do not even get the method it
# was trying to call; needs actual crash from D-BUS backend (LP #914220)
return
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.ServiceUnknown':
dbus_service_unknown_analysis(exc_obj, pr)
# append a basic traceback. In future we may want to include
# additional data such as the local variables, loaded modules etc.
tb_file = StringIO()
traceback.print_exception(exc_type, exc_obj, exc_tb, file=tb_file)
pr['Traceback'] = tb_file.getvalue().strip()
pr.add_proc_info(extraenv=['PYTHONPATH', 'PYTHONHOME'])
pr.add_user_info()
# override the ExecutablePath with the script that was actually running
pr['ExecutablePath'] = binary
if 'ExecutableTimestamp' in pr:
pr['ExecutableTimestamp'] = str(int(os.stat(binary).st_mtime))
try:
pr['PythonArgs'] = '%r' % sys.argv
except AttributeError:
pass
if pr.check_ignored():
return
mangled_program = re.sub('/', '_', binary)
# get the uid for now, user name later
user = os.getuid()
pr_filename = '%s/%s.%i.crash' % (os.environ.get(
'APPORT_REPORT_DIR', '/var/crash'), mangled_program, user)
crash_counter = 0
if os.path.exists(pr_filename):
if apport.fileutils.seen_report(pr_filename):
# flood protection
with open(pr_filename, 'rb') as f:
crash_counter = get_recent_crashes(f) + 1
if crash_counter > 1:
return
# remove the old file, so that we can create the new one with
# os.O_CREAT|os.O_EXCL
os.unlink(pr_filename)
else:
# don't clobber existing report
return
if crash_counter:
pr['CrashCounter'] = str( | crash_counter)
with os.fdopen(os.open(pr_filename,
os.O_WRONLY | os.O_CREAT | | os.O_EXCL, 0o640), 'wb') as f:
pr.write(f)
finally:
# resume original processing to get the default behaviour,
# but do not trigger an AttributeError on interpreter shutdown.
if sys:
sys.__excepthook__(exc_type, exc_obj, exc_tb)
def dbus_service_unknown_analysis(exc_obj, report):
from glob import glob
import subprocess, re
try:
from configparser import ConfigParser, NoSectionError, NoOptionError
(ConfigParser, NoSectionError, NoOptionError) # pyflakes
except ImportError:
# Python 2
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
# determine D-BUS name
m = re.search('name\s+(\S+)\s+was not provided by any .service',
exc_obj.get_dbus_message())
if not m:
if sys.stderr:
sys.stderr.write('Error: cannot parse D-BUS name from exception: '
+ exc_obj.get_dbus_message())
return
dbus_name = m.group(1)
# determine .service file and Exec name for the D-BUS name
services = [] # tuples of (service file, exe name, running)
for f in glob('/usr/share/dbus-1/*services/*.service'):
cp = ConfigParser(interpolation=None)
cp.read(f, encoding='UTF-8')
try:
if cp.get('D-BUS Service', 'Name') == dbus_name:
exe = cp.get('D-BUS Service', 'Exec')
running = (subprocess.call(['pidof', '-sx', exe], stdout=subprocess.PIPE) == 0)
services.append((f, exe, running))
except (NoSectionError, NoOptionError):
if sys.stderr:
sys.stderr.write('Invalid D-BUS .service file %s: %s' % (
f, exc_obj.get_dbus_message()))
continue
if not services:
report['DbusErrorAnalysis'] = 'no service file providing ' + dbus_name
else:
report['DbusErrorAnalysis'] = 'provided by'
for (service, exe, running) in services:
report['DbusErrorAnalysis'] += ' %s (%s is %srunning)' % (
service, exe, ('' if running else 'not '))
def install():
'''Install the python apport hook.'''
sys.excepthook = apport_excepthook
|
from fabric.api import run
from fabric.decorators import with_settings
from fabric.colors import green, yellow
|
from deployer.tasks.requirements import install_requirements
@with_settings(warn_only=True)
def setup_virtualenv(python_version='', app_name='', app_dir='', repo_url=''):
print(green("Setting up virtualenv on {}".format(app_dir)))
print(green('C | reating virtualenv'))
if run("pyenv virtualenv {0} {1}-{0}".format(python_version, app_name)).failed:
print(yellow("Virtualenv already exists"))
install_requirements(app_name, python_version)
|
#! usr/bin/python3
# -*- coding: utf-8 -*-
#
# Flicket - copyright Paul Bourne: evereux@gmail.com
import datetime
from flask import redirect, url_for, flash, g
from flask_babel import gettext
from flask_login import login_required
from . import flicket_bp
from application import app, db
from application.flicket.models.flicket_models import FlicketTicket, FlicketStatus
from application.flicket.scripts.email import FlicketMail
from application.flicket.scripts.flicket_functions import add_action
# view to release a ticket user has been assigned.
@flicket_bp.route(app.config['FLICKET'] + 'release/<int:ticket_id>/', methods=['GET', 'POST'])
@login_required
def release(ticket_id=False):
if ticket_id:
ticket = FlicketTicket.query.filter_by(id=ticket_id).first()
# is ticket assigned.
if not ticket.assigned:
flash(gettext('Ticket has not been assigned'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
# check ticket is owned by user or user is admin
if (ticket.assigned.id != g.user.id) and (not g.user.is_admin):
flash(gettext('You can not release a ticket you are not working on.'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', t | icket_id=ticket_id))
# set status to open
status = FlicketStatus.query.filter_by(status='Open').first()
ticket.current_status = status
ticket.last_updated = datetime.datetime.now()
user = ticket.assigned
ticket.assigned = None
user.total_ | assigned -= 1
db.session.commit()
# add action record
add_action(ticket, 'release')
# send email to state ticket has been released.
f_mail = FlicketMail()
f_mail.release_ticket(ticket)
flash(gettext('You released ticket: %(value)s', value=ticket.id), category='success')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket.id))
return redirect(url_for('flicket_bp.tickets'))
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import PeeringManagementClientConfiguration
from .operations import PeeringManagementClientOperationsMixin
from .operations import LegacyPeeringsOperations
from .operations import Operations
from .operations import PeerAsnsOperations
from .operations import PeeringLocationsOperations
from .operations import PeeringsOperations
from .operations import PeeringServiceLocationsOperations
from .operations import PeeringServicePrefixesOperations
from .operations import PrefixesOperations
from .operations import PeeringServiceProvidersOperations
from .operations import PeeringServicesOperations
from .. import models
class PeeringManagementClient(PeeringManagementClientOperationsMixin):
"""Peering Client.
:ivar legacy_peerings: LegacyPeeringsOperations operations
:vartype legacy_peerings: azure.mgmt.peering.aio.operations.LegacyPeeringsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.peering.aio.operations.Operations
:ivar peer_asns: PeerAsnsOperations operations
:vartype peer_asns: azure.mgmt.peering.aio.operations.PeerAsnsOperations
:ivar peering_locations: PeeringLocationsOperations operations
:vartype peering_locations: azure.mgmt.peering.aio.operations.PeeringLocationsOperations
:ivar peerings: PeeringsOperations operations
:vartype peerings: azure.mgmt.peering.aio. | operations.PeeringsOperations
:ivar peering_service_locations: PeeringServiceLocationsOperations operations
:vartype peering_service_locations: azure.mgmt.peering.aio.operations.PeeringServiceLocationsOperations
:ivar peering_service_prefixes: PeeringServicePrefixesOperations operations
:vartype peering_service_prefixes: azure.mgmt.peering.aio.operations.PeeringServicePr | efixesOperations
:ivar prefixes: PrefixesOperations operations
:vartype prefixes: azure.mgmt.peering.aio.operations.PrefixesOperations
:ivar peering_service_providers: PeeringServiceProvidersOperations operations
:vartype peering_service_providers: azure.mgmt.peering.aio.operations.PeeringServiceProvidersOperations
:ivar peering_services: PeeringServicesOperations operations
:vartype peering_services: azure.mgmt.peering.aio.operations.PeeringServicesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Azure subscription ID.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = PeeringManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.legacy_peerings = LegacyPeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.peer_asns = PeerAsnsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_locations = PeeringLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peerings = PeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_locations = PeeringServiceLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_prefixes = PeeringServicePrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.prefixes = PrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_providers = PeeringServiceProvidersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_services = PeeringServicesOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "PeeringManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
#!python3
"""
This script downloads the favicons
Usage:
python3 update_alexa path/to/data.csv
"""
import os
import requests
favicon_path = os.path.join(os.path.dirname(__file__), "..", "icons")
def download_favicons(links):
for link in links:
netloc = link['netloc']
url = 'http://' + netloc
new_favicon_path = os.path.join(favicon_path, netloc + ".ico")
if not os.path.exists(new_favicon_path):
try:
| print(url)
response = requests.get(
"https | ://realfavicongenerator.p.rapidapi.com/favicon/icon",
params={'platform': 'desktop', "site": url},
headers={'X-Mashape-Key': os.environ.get("mashape_key")}
)
except:
pass
else:
if response:
with open(new_favicon_path, 'wb') as f:
f.write(response.content)
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyid | f.daylighting import OutputControlIlluminanceMapStyle
log = logging.getLogger(__name__)
class TestOutputControlIlluminanceMapStyle(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
| def tearDown(self):
os.remove(self.path)
def test_create_outputcontrolilluminancemapstyle(self):
pyidf.validation_level = ValidationLevel.error
obj = OutputControlIlluminanceMapStyle()
# alpha
var_column_separator = "Comma"
obj.column_separator = var_column_separator
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.outputcontrolilluminancemapstyles[0].column_separator, var_column_separator) |
'''
Created on 24.03.2011
@author: michi
'''
from PyQt4.QtGui import QItemDelegate
from sqlalchemy import Table
from sqlalchemy.sql import Alias,Select
from ems import qt4
class ColumnSectionMapper(object):
def __init__(self,alchemySelect=None, parent=None):
self.__columnConfigs = []
self.__columnConfigIdByName = {}
self.__alchemySelect = alchemySel | ect
self.__delegate = MapperDelegate(self,parent)
pass
def addColumn(self,columnName,translatedName=None, delegate=None):
if self.__columnConfigIdByName.has_key(columnName):
raise KeyError("Column %s already assigned" % columnName)
index = len(self.__columnConfigs)
self.__columnConfigs.append({'name':columnName,
| 'translatedName':translatedName,
'delegate':delegate})
self.__columnConfigIdByName[columnName] = index
@property
def translatedColumnNames(self):
names = {}
for config in self.__columnConfigs:
names[config['name']] = config['translatedName']
return names
def __extractTablesFormSelect(self,alchemySelect):
tableDict = {}
for fromCond in alchemySelect.locate_all_froms():
if isinstance(fromCond, Table):
tableDict[str(fromCond.name)] = fromCond
elif isinstance(fromCond,Alias):
if isinstance(fromCond.original,Table):
tableDict[str(fromCond.name)] = fromCond
return tableDict
def getDelegate(self):
return self.__delegate
def getColConfig(self, column):
if isinstance(column, int):
index = column
else:
index = self.__columnConfigIdByName[unicode(column)]
return self.__columnConfigs[index]
def getSelectColumns(self, alchemySelect=None):
if alchemySelect is None:
alchemySelect = self.__alchemySelect
if not isinstance(alchemySelect, Select):
raise TypeError("alchemySelect has to be instanceof sqlalchemy.select")
tableDict = self.__extractTablesFormSelect(alchemySelect)
columnList = []
for config in self.__columnConfigs:
tableName,colName = config['name'].split('.')
if tableDict.has_key(tableName):
columnList.append(tableDict[tableName].c[colName])
return columnList
class MapperDelegate(QItemDelegate):
def __init__(self, mapper, parent=None):
super(MapperDelegate, self).__init__(parent)
self.__mapper = mapper
def getDelegate(self, index):
colName = index.data(qt4.ColumnNameRole).toString()
delegate = self.__mapper.getColConfig(colName)['delegate']
return delegate
def paint(self, painter, option, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.paint(painter, option, index)
else:
QItemDelegate.paint(self, painter, option, index)
def createEditor(self, parent, option, index):
delegate = self.getDelegate(index)
if delegate is not None:
return delegate.createEditor(parent, option, index)
else:
return QItemDelegate.createEditor(self, parent, option,
index)
def setEditorData(self, editor, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setEditorData(editor, index)
else:
QItemDelegate.setEditorData(self, editor, index)
def setModelData(self, editor, model, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setModelData(editor, model, index)
else:
QItemDelegate.setModelData(self, editor, model, index)
|
'sahara_proxy_domain')
job, job_exec = u.create_job_exec(job_type, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>""", res)
def test_build_workflow_for_job_mapreduce(self):
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, streaming=True)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, proxy=True)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, streaming=True,
proxy=True)
def test_build_workflow_for_job_java(self):
# If args include swift paths, user and password values
# will have to be supplied via configs instead of being
# lifted from input or output data sources
configs = {sw.HADOOP_SWIFT_USERNAME: 'admin',
sw.HADOOP_SWIFT_PASSWORD: 'admin1'}
configs = {
'configs': configs,
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<main-class>%s</main-class>
<java-opts>%s</java-opts>
<arg>swift://ex.sahara/i</arg>
<arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
configs = {
'configs': {},
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs,
proxy=True)
res = workflow_factory.get_workflow_xml(job, u.create_cluster(),
job_exec)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>
<main-class>%s</main-class>
<java-opts>%s</java-opts>
<arg>swift://ex.sahara/i</arg>
<arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_for_job_hive(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
job_binary.return_value = {"name": "script.q"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
def test_update_job_dict(self):
w = workflow_factory.BaseFactory()
job_dict = {'configs': {'default1': 'value1',
'default2': 'value2'},
'params': {'param1': 'value1',
'param2': 'value2'},
'args': ['replace this', 'and this']}
edp_configs = {'edp.streaming.mapper': '/usr/bin/cat',
'edp.streaming.reducer': '/usr/bin/wc'}
configs = {'default2': 'changed'}
configs.update(edp_configs)
params = {'param1': 'changed'}
exec_job_dict = {'configs': configs,
'params': params,
'args': ['replaced']}
orig_exec_job_dict = copy.deepcopy(exec_job_dict)
w.update_j | ob_dict(job_dict, exec_job_dict)
self.assertEqual(job_dict,
{'edp_configs': edp_configs,
'configs': {'default1': 'value1',
'default2': 'changed'},
'params': {'param1': 'changed',
| 'param2': 'value2'},
'args': ['replaced']})
self.assertEqual(orig_exec_job_dict, exec_job_dict)
def test_inject_swift_url_suffix(self):
w = workflow_factory.BaseFactory()
self.assertEqual(w.inject_swift_url_suffix("swift://ex/o"),
"swift://ex.sahara/o")
self.assertEqual(w.inject_swift_url_suffix("swift://ex.sahara/o"),
"swift://ex.sahara/o")
self.assertEqual(w.inject_swift_url_suffix("hdfs://my/path"),
"hdfs://my/path")
@mock.patch('sahara.conductor.API.job_execution_update')
@mock.patch('sahara.service.edp.job_manager._run_job')
def test_run_job_handles_exceptions(self, runjob, job_ex_upd):
runjob.side_effect = ex.SwiftClientException("Unauthor |
def __load():
import im | p, os, sys
ext = 'pygame/font.so'
for path in sys.path:
if not path.endswith('lib-dynload'):
continue
ext_path = os.path.join(path, ext)
if os.path.exists(ext_path):
mod = imp.load_dyn | amic(__name__, ext_path)
break
else:
raise ImportError(repr(ext) + " not found")
__load()
del __load
|
import re
source = [
('assert', 0x00, False, 'vreg'),
('raise', 0x05, False, 'vreg'),
('constant', 0x10, True, 'constant'),
('list', 0x20, True, 'vreg*'),
('move', 0x30, False, 'vreg vreg'),
('call', 0x40, True, 'vreg vreg*'),
('not', 0x41, True, 'vreg'),
('contains', 0x42, True, 'vreg vreg'),
('callv', 0x45, True, 'vreg vreg vreg*'),
('isnull', 0x48, True, 'vreg'),
('return', 0x50, False, 'vreg'),
('yield', 0x51, False, 'vreg'),
('jump', 0x60, False, 'block'),
('cond', 0x70, False, 'vreg block block'),
('func', 0x80, True, 'function'),
('iter', 0xC0, True, 'vreg'),
#('next', 0xC1, True, 'vreg'),
#('iterstop', 0xC2, False, 'block'),
('next', 0xC3, True, 'vreg block'),
('getattr', 0xD0, True, 'vreg string'),
('setattr', 0xD1, True, 'vreg string vreg'),
('getitem', 0xD2, True, 'vreg vreg'),
('setitem', 0xD3, True, 'vreg vreg vreg'),
('getloc', 0xE0, True, 'index'),
('setloc', 0xE1, True, 'index vreg'),
('getupv', 0xE2, True, 'index index'),
('setupv', 0xE3, True, 'index index vre | g'),
('getglob', 0xF0, True, 'string'),
('setglob', 0xF1, True, 'string vreg'),
('loglob', 0xFF, False, 'vreg'),
]
enc = {}
dec = {}
names = {}
for opname, opcode, has_result, form in source:
assert opcode not in dec, opcode
pattern = re.split(r"\s+", form.rstrip('*'))
if form.endswith('*'):
variadic | = pattern.pop()
else:
variadic = None
enc[opname] = opcode, has_result, pattern, variadic
dec[opcode] = opname, has_result, pattern, variadic
names[opcode] = opname
|
from __future__ import (absolute_import, division, print_function)
import unittest
from testhelpers import Work | spaceCreationHelper
class SpectrumInfoTest(unittest.TestCase):
_ws = None
def setUp(self):
if self.__class__._ws is None:
self.__class__._ws = WorkspaceCreationHelper.create2DWorkspace | WithFullInstrument(2, 1, False) # no monitors
self.__class__._ws.getSpectrum(0).clearDetectorIDs()
def test_hasDetectors(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.hasDetectors(0), False)
self.assertEquals(info.hasDetectors(1), True)
def test_isMasked(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.isMasked(1), False)
if __name__ == '__main__':
unittest.main()
|
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from ..models import MyUser, Profile
from ..utils import perform_reputation_check
class CreateUserSerializer(serializers.ModelSerializer):
password = serializers.CharField(
style={'input_type': 'password'}
)
class Meta:
model = MyUser
fields = ('email', 'password', 'first_name', 'last_name')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = MyUser
fields = [
'id',
'email',
'first_name',
'last_name',
]
extra_kwargs = {'id': {'read_only': True}, 'email': {'read_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
def update(self, instance, validated_data):
instance.first_name = validated_data.get('first_name', instance.first_name)
instance.last_name = validated_data.get('last_name', instance.last_name)
instance.save()
return instance
class FollowSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = ['user_id', 'full_name', 'url']
def get_full_name(self, obj):
return obj.user.get_full_name()
class CreateProfileSerializer(serializers.ModelSerializer):
user = CreateUserSerializer()
class Meta:
model = Profile
fields = [
'user',
'follows'
]
def create(self, validated_data):
new_user = CreateUserSerializer().create(validated_data.pop('user'))
new_profile = Profile.objects.get(user_id=new_user.id)
new_profile.save()
return new_profile
| class ProfileSerializer(ser | ializers.ModelSerializer):
user = UserSerializer(read_only=True)
reputation = serializers.CharField(max_length=8, read_only=True)
follows = FollowSerializer(read_only=True, many=True)
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
questions_count = serializers.SerializerMethodField()
answers_count = serializers.SerializerMethodField()
followed_by = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = [
'url',
'user',
'reputation',
'follows',
'questions_count',
'answers_count',
'followed_by'
]
def get_questions_count(self, obj):
return obj.user.questions.count()
def get_answers_count(self, obj):
return obj.user.answers.count()
def get_followed_by(self, obj):
return obj.profile_set.count()
class UpdateProfileSerializer(serializers.ModelSerializer):
user = UserSerializer()
class Meta:
model = Profile
fields = [
'user',
'reputation',
'follows',
]
def validate_follows(self, value):
if self.instance in value:
raise serializers.ValidationError(_('User cannot follow self'))
return value
def validate_reputation(self, value):
if value != perform_reputation_check(self.instance.user):
raise serializers.ValidationError(_('Selected reputation is not valid for this user'))
return value
def update(self, instance, validated_data):
UserSerializer().update(instance.user, validated_data.pop('user'))
instance.reputation = validated_data.get('reputation', instance.reputation)
if validated_data['follows']:
instance.follows.add(*validated_data['follows'])
instance.save()
return instance
class AuthorSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = MyUser
fields = [
'id',
'email',
'url',
'full_name',
]
def get_full_name(self, obj):
return obj.get_full_name()
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login
import notifications.urls
import not | ifications.tests.views
urlpatterns = [
url(r'^login/$', login, name='login'), # needed for Django 1.6 tests
url(r'^admin/', include(admin.site.urls)),
url(r'^test_make/', notifications.tests.views.make_notification),
url(r'^test/', notifications.tests.views.live_tester),
url(r'^', include(notificati | ons.urls, namespace='notifications')),
]
|
ed Ut dolor exercitation consequat. qui Duis velit aliquip nulla culpa non consequat. qui elit, amet, esse velit ea ad veniam, Excepteur aliqua. ut deserunt Ut aliquip deserunt elit, occaecat ullamco dolore aliquip voluptate laborum. elit, sit in dolore est.
Ullamco ut velit non culpa veniam, in consequat. nostrud sint amet, nulla in labore cillum non sed Ut veniam, dolor occaecat in do occaecat voluptate nostrud deserunt nisi labore in deserunt voluptate consectetur do quis exercitation nisi laboris eiusmod anim Ut reprehenderit occaecat magna nisi occaecat aliquip sed irure exercitation exercitation undefined adipisicing sint cupidatat eu labore sunt amet, officia Excepteur mollit sint magna sunt.
Aliqua. consectetur elit, Ut est officia veniam, nulla sint in ipsum dolore do aute fugiat exercitation aliquip commodo consequat. consectetur et do officia reprehenderit aute ut laboris quis culpa eu incididunt tempor reprehenderit ipsum aute veniam, aliqua. culpa Duis nostrud aute aliqua. id amet, sit aute id sunt laborum. velit nulla minim adipisicing tempor Duis est sint exercitation quis consequat. ut aliqua. eu reprehenderit.
Culpa Duis incididunt est elit, ea dolore tempor in occaecat non in pariatur. commodo sint commodo ut ut sit commodo reprehenderit ex eu laborum. magna non aliqua. eiusmod Excepteur enim deserunt velit veniam, et dolore eu cupidatat deserunt do irure eu sunt irure voluptate est officia undefined in occaecat labore dolor est mollit dolor incididunt in Excepteur.
Commodo nostrud pariatur. sit Excepteur est sunt culpa in dolore ex tempor cillum ut sint laboris Excepteur ut adipisicing laborum. enim pariatur. reprehenderit ut consectetur ad in dolore Excepteur velit ipsum adipisicing ex deserunt aliqua. cupidatat aliquip nisi sint consectetur laboris velit aliquip ex ullamco dolore in pariatur. non dolor ad velit nostrud veniam, laborum. esse laborum. dolor fugiat aute consequat. velit cillum qui in dolor.
Dolore ex nulla incididunt aute ut aute pariatur. est ipsum sunt occaecat quis ea dolor culpa aute esse id ex incididunt ad consectetur do ex mollit voluptate dolor enim dolor reprehenderit ut anim Duis dolor pariatur. aute velit consequat. in consequat. aliqua. aliquip est fugiat voluptate ad sit esse in adipisicing elit, dolor magna in dolor ullamco occaecat eu aliquip reprehenderit fugiat.
Ex qui Excepteur eiusmod Excepteur officia cillum id aliqua. ad in in minim quis ut culpa in irure nisi pariatur. magna nostrud aliquip eiusmod non occaecat dolor do quis non et ea quis dolor ut incididunt ea proident, Excepteur in tempor nisi sunt eu mollit consectetur mollit nostrud dolor in tempor ut nulla exercitation et dolore reprehenderit consectetur irure tempor sunt tempor elit, eiusmod in sit.
Sit commodo minim id eiusmod in Duis laboris Excepteur ut ea minim magna Duis deserunt velit veniam, proi | dent, aliqua. d | olore anim commodo ullamco do labore non ullamco non enim ipsum consectetur irure sint Lorem deserunt dolor commodo cillum velit dolore Excepteur laborum. in tempor anim mollit magna in quis consequat. non ex Duis undefined eiusmod pariatur. dolore dolor dolore pariatur. incididunt eiusmod Excepteur non id Duis et adipisicing in ea eu.
Sit aute nostrud ex laboris ad aliqua. est dolor commodo pariatur. anim Duis velit veniam, incididunt ullamco ad non dolore nisi esse pariatur. Excepteur ut mollit in aute sit anim tempor aliqua. cupidatat dolore ea cupidatat est consectetur Lorem nulla dolor velit ea commodo do officia incididunt nostrud in nostrud pariatur. occaecat anim.
Quis adipisicing fugiat sit sit tempor sit irure elit, consequat. non est est Ut non aute Duis magna eu labore ullamco et fugiat in veniam, dolor dolor sed tempor cupidatat proident, in ut eiusmod ad quis labore ad ipsum officia amet, non dolore nisi aute proident, deserunt Duis nulla Duis proident, sed est irure Ut minim dolor magna proident, magna ullamco commodo.
Dolor mollit ullamco aliqua. eu labore aliqua. sed officia enim qui nostrud eiusmod Excepteur aliquip quis officia in aliquip nostrud tempor proident, ea sed consequat. dolor aliqua. aliqua. in dolor in do ut eu mollit commodo nostrud amet, id Duis qui dolor velit sit cillum sit officia dolor cillum sunt in dolore consectetur tempor irure in sit dolore amet, fugiat nisi nulla sint exercitation cillum officia.
Sit velit ipsum commodo laboris cillum dolore aliquip sint laboris laborum. fugiat anim ipsum cupidatat est qui deserunt irure sit aliqua. veniam, id nisi sunt nisi occaecat mollit eiusmod et sint exercitation id Duis non sit irure cupidatat qui aliqua. do id tempor id in quis elit, fugiat dolore proident, irure do Excepteur qui non irure proident, nulla aliquip minim velit velit dolor voluptate adipisicing incididunt magna incididunt ad ad.
Laborum. in ullamco non consequat. Excepteur pariatur. fugiat eiusmod esse consectetur ea ex velit esse est voluptate tempor in dolor voluptate commodo magna consequat. nisi tempor et sit commodo ut aute cupidatat incididunt ut incididunt elit, ut ad veniam, mollit aute adipisicing consectetur ex id Excepteur ullamco esse laboris sit ad anim in amet, sunt.
Ut incididunt qui reprehenderit dolor Ut mollit tempor pariatur. tempor non commodo laboris Excepteur quis adipisicing aliqua. dolor incididunt Excepteur qui est esse sunt quis ex culpa ad consequat. voluptate sint cupidatat eiusmod minim enim sed aute Excepteur dolore incididunt cillum culpa cillum tempor pariatur. ipsum laborum. reprehenderit aliqua. Ut amet, ipsum amet, sunt veniam, sint Ut sint.
Ut in cillum consectetur adipisicing dolore Ut magna exercitation mollit pariatur. minim consequat. et in veniam, nulla enim ullamco sint Excepteur cupidatat consequat. ut sint fugiat tempor Duis eiusmod Excepteur officia qui anim eu proident, aute qui quis magna pariatur. tempor veniam, non exercitation irure dolor non proident, nisi qui pariatur. enim sint cupidatat fugiat elit, magna culpa in Duis exercitation deserunt et voluptate nostrud anim enim nisi proident, amet.
Est id ad elit, minim nulla velit incididunt ipsum deserunt sunt pariatur. sunt mollit voluptate laborum. mollit laboris voluptate dolore culpa ipsum labore in undefined voluptate cupidatat amet, sed in aliquip dolor tempor dolore in Ut dolor amet, eiusmod cupidatat in aliqua. ullamco incididunt aute Excepteur ad ullamco sit amet, mollit ex officia Duis.
Ex irure labore dolor aute reprehenderit ullamco elit, sit consectetur aliqua. non consectetur veniam, in dolor ipsum exercitation Lorem sed pariatur. laborum. consequat. culpa aliqua. Ut Duis laborum. Ut proident, aliquip adipisicing consectetur culpa magna do irure aute tempor quis incididunt cupidatat consequat. aliqua. dolor amet, dolor id nostrud est sit Excepteur mollit do mollit proident, veniam.
Ut non do deserunt irure incididunt veniam, veniam, occaecat nisi eiusmod ipsum nisi voluptate in officia undefined ex nisi velit nulla cillum est esse tempor consequat. do Ut qui cupidatat commodo fugiat culpa in ea cillum incididunt dolore pariatur. cupidatat Excepteur magna Ut proident, dolor Ut dolore id nostrud nulla mollit dolor veniam, consectetur tempor deserunt enim.
Sit in ut voluptate dolor deserunt occaecat enim aliqua. cupidatat anim enim sed labore qui culpa cillum cupidatat dolor esse dolor dolore commodo exercitation adipisicing qui consectetur adipisicing commodo fugiat nostrud proident, incididunt officia nisi eiusmod enim veniam, elit, occaecat in non in Ut laborum. ad ut reprehenderit ut commodo ex ex minim ad exercitation sint ex ullamco consequat. commodo culpa occaecat commodo nisi nulla labore Duis.
Amet, in ipsum occaecat exercitation qui laborum. undefined amet, officia reprehenderit laboris reprehenderit nulla ad velit dolor sint ex consequat. nulla enim occaecat in deserunt cupidatat cupidatat esse consectetur exercitation exercitation labore nisi consectetur exercitation nostrud nulla aliqua. esse laboris Lorem est mollit magna dolore sunt qui Lorem fugiat mollit consequat. ea reprehenderit reprehenderit et sed voluptate incididunt ullamco nisi consectetur non eiusmod qui nulla cupidatat.
Tempor qui voluptate mollit sed reprehenderit |
y = lambda x: x["Number"])
self.FieldNameList = [x["Name"] for x in sortedFields] # *ordered*
class FITMessageGenerator:
def __init__(self):
self._types = {}
self._messageTemplates = {}
self._definitions = {}
self._result = []
# All our convience functions for preparing the field types to be packed.
def stringFormatter(input):
raise Exception("Not implemented")
def dateTimeFormatter(input):
# UINT32
# Seconds since UTC 00:00 Dec 31 1989. If <0x10000000 = system time
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
delta = round((input - datetime(hour=0, minute=0, month=12, day=31, year=1989)).total_seconds())
return struct.pack("<I", delta)
def msecFormatter(input):
# UINT32
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round((input if type(input) is not timedelta else input.total_seconds()) * 1000))
def mmPerSecFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 1000))
def cmFormatter(input):
# UINT32
if input is None:
| return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round(input * 100))
def altitudeFormatter(input):
| # UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round((input + 500) * 5)) # Increments of 1/5, offset from -500m :S
def semicirclesFormatter(input):
# SINT32
if input is None:
return struct.pack("<i", 0x7FFFFFFF) # FIT-defined invalid value
return struct.pack("<i", round(input * (2 ** 31 / 180)))
def versionFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 100))
def defType(name, *args, **kwargs):
aliases = [name] if type(name) is not list else name
# Cheap cheap cheap
for alias in aliases:
self._types[alias] = FITMessageDataType(alias, *args, **kwargs)
defType(["enum", "file"], 0x00, 1, "B", 0xFF)
defType("sint8", 0x01, 1, "b", 0x7F)
defType("uint8", 0x02, 1, "B", 0xFF)
defType("sint16", 0x83, 2, "h", 0x7FFF)
defType(["uint16", "manufacturer"], 0x84, 2, "H", 0xFFFF)
defType("sint32", 0x85, 4, "i", 0x7FFFFFFF)
defType("uint32", 0x86, 4, "I", 0xFFFFFFFF)
defType("string", 0x07, None, None, 0x0, formatter=stringFormatter)
defType("float32", 0x88, 4, "f", 0xFFFFFFFF)
defType("float64", 0x89, 8, "d", 0xFFFFFFFFFFFFFFFF)
defType("uint8z", 0x0A, 1, "B", 0x00)
defType("uint16z", 0x0B, 2, "H", 0x00)
defType("uint32z", 0x0C, 4, "I", 0x00)
defType("byte", 0x0D, 1, "B", 0xFF) # This isn't totally correct, docs say "an array of bytes"
# Not strictly FIT fields, but convenient.
defType("date_time", 0x86, 4, None, 0xFFFFFFFF, formatter=dateTimeFormatter)
defType("duration_msec", 0x86, 4, None, 0xFFFFFFFF, formatter=msecFormatter)
defType("distance_cm", 0x86, 4, None, 0xFFFFFFFF, formatter=cmFormatter)
defType("mmPerSec", 0x84, 2, None, 0xFFFF, formatter=mmPerSecFormatter)
defType("semicircles", 0x85, 4, None, 0x7FFFFFFF, formatter=semicirclesFormatter)
defType("altitude", 0x84, 2, None, 0xFFFF, formatter=altitudeFormatter)
defType("version", 0x84, 2, None, 0xFFFF, formatter=versionFormatter)
def defMsg(name, *args):
self._messageTemplates[name] = FITMessageTemplate(name, *args)
defMsg("file_id", 0,
0, "type", "file",
1, "manufacturer", "manufacturer",
2, "product", "uint16",
3, "serial_number", "uint32z",
4, "time_created", "date_time",
5, "number", "uint16")
defMsg("file_creator", 49,
0, "software_version", "uint16",
1, "hardware_version", "uint8")
defMsg("activity", 34,
253, "timestamp", "date_time",
1, "num_sessions", "uint16",
2, "type", "enum",
3, "event", "enum", # Required
4, "event_type", "enum",
5, "local_timestamp", "date_time")
defMsg("session", 18,
253, "timestamp", "date_time",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
59, "total_moving_time", "duration_msec",
5, "sport", "enum",
6, "sub_sport", "enum",
0, "event", "enum",
1, "event_type", "enum",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
14, "avg_speed", "mmPerSec",
15, "max_speed", "mmPerSec",
16, "avg_heart_rate", "uint8",
17, "max_heart_rate", "uint8",
18, "avg_cadence", "uint8",
19, "max_cadence", "uint8",
20, "avg_power", "uint16",
21, "max_power", "uint16",
22, "total_ascent", "uint16",
23, "total_descent", "uint16",
49, "avg_altitude", "altitude",
50, "max_altitude", "altitude",
71, "min_altitude", "altitude",
57, "avg_temperature", "sint8",
58, "max_temperature", "sint8")
defMsg("lap", 19,
253, "timestamp", "date_time",
0, "event", "enum",
1, "event_type", "enum",
25, "sport", "enum",
23, "intensity", "enum",
24, "lap_trigger", "enum",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
52, "total_moving_time", "duration_msec",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
13, "avg_speed", "mmPerSec",
14, "max_speed", "mmPerSec",
15, "avg_heart_rate", "uint8",
16, "max_heart_rate", "uint8",
17, "avg_cadence", "uint8", # FIT rolls run and bike cadence into one
18, "max_cadence", "uint8",
19, "avg_power", "uint16",
20, "max_power", "uint16",
21, "total_ascent", "uint16",
22, "total_descent", "uint16",
42, "avg_altitude", "altitude",
43, "max_altitude", "altitude",
62, "min_altitude", "altitude",
50, "avg_temperature", "sint8",
51, "max_temperature", "sint8"
)
defMsg("record", 20,
253, "timestamp", "date_time",
0, "position_lat", "semicircles",
1, "position_long", "semicircles",
2, "altitude", "altitude",
3, "heart_rate", "uint8",
4, "cadence", "uint8",
5, "distance", "distance_cm",
6, "speed", "mmPerSec",
7, "power", "uint16",
13, "temperature", "sint8",
33, "calories", "uint16",
)
defMsg("event", 21,
253, "timestamp", "date_time",
0, "event", "enum",
1, "event_type", "enum")
defMsg("device_info", 23,
253, "timestamp", "date_time",
0, "device_index", "uint8",
1, "device_type", "uint8",
2, "manufacturer", "manufacturer",
3, "serial_number", "uint32z",
4, "product", "uint16",
5, "software_version", "version"
)
def _write(self, contents):
self._result.append(contents)
def GetResult(self):
return b''.join(self._result)
def _defineMessage(self, local_no, global_message, field_names):
assert local_no < 16 and local_no >= 0
if set(field_names) - set(global_message.FieldNameList):
raise ValueError("Attempting to use undefined fields %s" % (set(field_names) - set(global_message.FieldNameList)))
messageHeader = 0b01000000
messageHeader = messageHeader | local_no
local_fields = {}
arch = 0 # Little-endian
global_no = global_message.Number
field_count = len(field_names)
pack_tuple = (messageHeader, 0, arch, global_no, field_count)
for field_name in global_message.FieldNameList:
if field_name in field_names:
field = global_message.Fields[field_name]
field_type = self._types[field["Type"]]
pack_tuple += (field["Number"], field_type.Size, field_type.TypeField)
local_fields[field_name] = field
self._definitions[local_no] = FITMessageTemplate(global_message.Name, local_no, local_fields)
self._write(struct.pack("<BBBHB" + ("BBB" * field_count), *pack_tuple))
return self._definitions[local_no]
def GenerateMessage(self, name, **kwargs):
globalDefn = self._messageTemplates[name]
# Create a subset of the global message's fields
localFieldNamesSet = set()
for fieldName in kwargs:
localFieldNamesSet.add(fieldName)
# I'll look at this later
compressTS = False
# Are these fields covered by an |
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkPeering"]
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.u | rl("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscrip | tionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkPeeringListResult"]
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
|
remove deselect and select the text in the new tab
self.deselectText()
#display
texts_widget = Viewer.ListTexts(element, lvalued,
self.listeObjetsTextes, self)
#TODO sorting by date/score, filter
for sem, tri in texts_widget.sort():
txt = self.listeObjetsTextes[sem]
texts_widget.add(sem, txt.getResume())
texts_widget.corpus.itemSelectionChanged.connect(self.onSelectText)
QtCore.QObject.connect(texts_widget.corpus.action_sentences,
QtCore.SIGNAL("triggered()"), self.teste_wording)
texts_widget.anticorpus.itemSelectionChanged.connect(self.onSelectText)
#insert tab and give focus
self.del_tab_text_doubl(element)
index = self.SOT1.addTab(texts_widget, texts_widget.title)
self.SOT1.setCurrentIndex(index)
self.SOTs.setCurrentIndex(0)
self.SOT1.tabBar().setTabToolTip(index, texts_widget.title)
def del_tab_text_doubl(self, element):
"""delete text tab if exists"""
for i in range(1, self.SOT1.count()):
tab_element = re.sub(" \(\d*\)$", "", self.SOT1.tabText(i))
if (tab_element == element):
self.SOT1.removeTab(i)
def teste_wording(self):
#FIXME info must come from text list
if (self.lexicon_or_concepts() == "lexicon"):
sem, item = self.recup_element_lexicon()
elif (self.lexicon_or_concepts() == "concepts"):
sem, item = self.recup_element_concepts(lvl)
print "C1690", sem, item
score, item = Controller.sp_el(item)
#score, item = re.search("^(\d*) (.*)", item).group(1, 2)
#self.activity("%s double click" % (item))
print "C1691", score, item
if (int(score)):
ask = "$ph.+%s"%(item)
result = self.client.eval_var(ask)
if (not hasattr(self, "tab_sentences_index")):
#FIXME make it closable, only the sentences of the text selected
self.tab_sentences_index = self.SETs.addTab(self.tab_sentences,
| self.tr("Sentences"))
for | i in range(0, self.tab_sentences.count()):
if (self.tab_sentences.tabText(i) == item):
self.tab_sentences.removeTab(i)
show_sentences_widget = QtGui.QWidget()
show_sentences_box = QtGui.QVBoxLayout()
# on prend toute la place
show_sentences_box.setContentsMargins(0,0,0,0)
show_sentences_box.setSpacing(0)
show_sentences_widget.setLayout(show_sentences_box)
index = self.tab_sentences.addTab(show_sentences_widget, item)
self.tab_sentences.setTabToolTip(index, item)
sentence_text = QtGui.QTextEdit()
show_sentences_box.addWidget(sentence_text)
sentence_text.append(result)
#give focus
self.tab_sentences.setCurrentIndex(index)
self.SOTs.setCurrentIndex(self.tab_sentences_index)
def explorer(self):
self.explorer_widget.liste.listw.clear()
self.motif = self.explorer_widget.saisie.text()
if (self.motif != ""):
types = [u"$search.pre", u"$search.suf", u"$search.rac"]
type_search = types[self.explorer_widget.select_fix.currentIndex()]
if (self.explorer_widget.sensitivity.isChecked()):
type_search = re.sub("search", "searchcs", type_search)
if (self.motif == "abracadabri"): self.explorer_widget.explo_result_count.setText("abracadabra!")
if (self.motif != "" and hasattr(self, "client")):
ask = self.client.creer_msg_search(type_search, self.motif, "[0:]")
result = self.client.eval(ask)
#print "C25712", ask, result
if (result != ''):
liste_result = re.split(", ", result)
self.activity(self.tr("Searching for {%s}: %d results")%(self.motif,
len(liste_result)))
self.explorer_widget.explo_result_count.setText("Found %d results"% len(liste_result))
self.PrgBar.perc(len(liste_result))
for i in range(len(liste_result)):
ask = self.client.creer_msg_search(type_search,
self.motif, "%d"%i, val=True)
r = self.client.eval(ask)
#print "C25713", ask, r
self.PrgBar.percAdd(1)
self.explorer_widget.liste.listw.addItem("%s %s"% (r,
liste_result[i]))
else :
#if nothing found with the pattern
self.activity(self.tr("Searching for {%s}: no result") % (self.motif))
self.explorer_widget.explo_result_count.setText("Nothing found")
def contexts_contents(self):
self.CTXs.cont.clear()
if (self.CTXs.l.currentItem()):
champ = self.CTXs.l.currentItem().text()
result = self.client.eval_var(u"$ctx.%s[0:]" % champ)
result = re.split("(?<!\\\), ", result)#negative lookbehind assertion
dic_CTX = {}
for r in result:
if r in dic_CTX.keys():
dic_CTX[r] = dic_CTX[r] + 1
else:
dic_CTX[r] = 1
for el in sorted(dic_CTX.items(), key= lambda (k, v) : (-v, k)):
self.CTXs.cont.addItem(u"%d %s"%(el[1], re.sub("\\\,", ",", el[0])))
def maj_metadatas(self):
string_ctx = self.client.eval_var("$ctx")
#self.client.add_cache_var(sem_txt +".ctx."+field, val)
current = self.CTXs.l.currentItem()
self.CTXs.cont.clear()
if (current):
self.CTXs.l.setCurrentItem(current)
self.contexts_contents()
def to_clipboard(self, l):
clipboard = QtGui.QApplication.clipboard()
clipboard.setText("\n".join(l))
self.activity(u"%d elements copied to clipboard" % (len(l)))
def copy_lw(self, listw):
n = listw.count()
liste = []
if (n):
for row in range(n):
element = re.sub("^(\d{1,}) (.*)$", "\\2\t\\1",
listw.item(row).text(), 1) #on inverse pour excel
liste.append(element)
self.to_clipboard(liste)
def send_codex_ViewListeTextes(self):
Items = self.param_corpus.ViewListeTextes.selectedItems()
if (Items):
codex_w = codex_window(self)
codex_w.show()
l = []
for item in Items:
l.append(item.text())
codex_w.appendItems(l)
def launchPRC(self):
self.param_corpus.launchPRC_button.setEnabled(False)
#self.NETs.setTabEnabled(3, False)
PRC = self.param_corpus.nameCorpus.text()
if (os.name == 'nt'):
server_path = "server/prospero-II-serveur-64.exe"
else:
server_path = os.path.join(os.getcwd(), "server/prospero-server")
port = 60000
commande = '"%s" -e -d 1 -p %s -f "%s"' % (server_path, port, PRC)
local_server = subprocess.Popen(commande, shell=True)
#FIXME
#only connect when server is ready
time.sleep(5)
self.connect_server("localhost", port)
#FIXME
#kill the server when the gui is closed
atexit.register(local_server.terminate)
###FORMULES###
def recupFormules(self):
ask = "$gescdf.mesFormules0[0:]"
result = self.client.eval(ask)
print "C25713", ask, result
#TODO move to Viewer
class codex_window(QtGui.QWidget):
def __init__(self, parent=None):
super(codex_window, self).__init__(parent, QtCore.Qt.Window)
self.codex_dic = Controller.edit_codex()
if self.codex_dic.cherche_codex():
self.codex_dic.parse_codex_xml("codex.xml")
L = QtGui.QVBoxLayout()
self.setLayout(L)
H2 = Qt |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self, name):
"""
Parameters:
- name
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot != None:
self._oprot = oprot
self._seqid = 0
def ping(self, name):
"""
Parameters:
- name
"""
self.send_ping(name)
return self.recv_ping()
def send_ping(self, name):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
result.success = self._handler.ping(args.name)
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ' | , '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self._ | _dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
self.request_id,
self.host_id,
self.region))
# Common error responses listed here
# http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.htmlRESTErrorResponses
class KnownResponseError(MinioError):
def __init__(self, response_error, **kwargs):
super(KnownResponseError, self).__init__(message=self.message, **kwargs)
self.response_error = response_error
class AccessDenied(KnownResponseError):
message = 'Access Denied'
class AccountProblem(KnownResponseError):
message = 'There is a problem with your account that prevents the ' \
'operation from completing successfully.'
class AmbiguousGrantByEmailAddress(KnownResponseError):
message = 'The email address you provided is associated with ' \
'more than one account.'
class BadDigest(KnownResponseError):
message = 'The Content-MD5 you specified did not match what we received.'
class BucketAlreadyExists(KnownResponseError):
message = 'The requested bucket name is not available. The ' \
'bucket namespace is shared by all users of the system. ' \
'Please select a different name and try again.'
class BucketAlreadyOwnedByYou(KnownResponseError):
message = 'Your previous request to create the named bucket ' \
'succeeded and you already own it.'
class BucketNotEmpty(KnownResponseError):
message = 'The bucket you tried to delete is not empty.'
class CredentialNotSupported(KnownResponseError):
message = 'This request does not support credentials.'
class CrossLocationLoggingProhibited(KnownResponseError):
message = 'Cross-location logging not allowed. Buckets in one ' \
'geographic location cannot log information to a bucket ' \
'in another location.'
class EntityTooSmall(KnownResponseError):
message = 'Your proposed upload is smaller than the minimum a' \
'llowed object size.'
|
class En | tityTooLarge(KnownResponseError):
message = 'Your proposed upload exceeds the maximum allowed object size.'
class ExpiredToken(KnownResponseError):
message = 'The provided token has expired.'
class IllegalVersioningConfigurationException(KnownResponseError):
message = 'Indicates that the versioning configuration specified ' \
'in the request is invalid.'
class IncompleteBody(KnownResponseError):
message = 'You did not provide the number of bytes specified by the ' \
'Content-Length HTTP header'
class IncorrectNumberOfFilesInPostRequest(KnownResponseError):
message = 'POST requires exactly one file upload per request.'
class InlineDataTooLarge(KnownResponseError):
message = 'Inline data exceeds the maximum allowed size.'
class InternalError(KnownResponseError):
message = 'We encountered an internal error. Please try again.'
class InvalidAccessKeyId(KnownResponseError):
message = 'The access key Id you provided does not exist in our records.'
class InvalidAddressingHeader(KnownResponseError):
message = 'You must specify the Anonymous role.'
class InvalidArgument(KnownResponseError):
message = 'Invalid Argument'
class InvalidBucketName(KnownResponseError):
message = 'The specified bucket is not valid.'
class InvalidBucketState(KnownResponseError):
message = 'The request is not valid with the current state of the bucket.'
class InvalidDigest(KnownResponseError):
message = 'The Content-MD5 you specified is not valid.'
class InvalidEncryptionAlgorithmError(KnownResponseError):
message = 'The encryption request you specified is not valid. ' \
'The valid value is AES256.'
class InvalidLocationConstraint(KnownResponseError):
message = 'The specified location constraint is not valid.'
class InvalidObjectState(KnownResponseError):
message = 'The operation is not valid for the current state of the object.'
class InvalidPart(KnownResponseError):
message = 'One or more of the specified parts could not be found. ' \
'The part might not have been uploaded, or the specified ' \
'entity tag might not have matched the part\'s entity tag'
class InvalidPartOrder(KnownResponseError):
message = 'The list of parts was not in ascending order.Parts list ' \
'must specified in order by part number.'
class InvalidPayer(KnownResponseError):
message = 'All access to this object has been disabled.'
class InvalidPolicyDocument(KnownResponseError):
message = 'The content of the form does not meet the conditions ' \
'specified in the policy document.'
class InvalidRange(KnownResponseError):
message = 'The requested range cannot be satisfied.'
class InvalidRequest(KnownResponseError):
message = 'Invalid Request'
class InvalidSecurity(KnownResponseError):
message = 'The provided security credentials are not valid.'
class InvalidSOAPRequest(KnownResponseError):
message = 'The SOAP request body is invalid.'
class InvalidStorageClass(KnownResponseError):
message = 'The storage class you specified is not valid.'
class InvalidTargetBucketForLogging(KnownResponseError):
message = 'The target bucket for logging does not exist, ' \
'is not owned by you, or does not have the appropriate ' \
'grants for the log-delivery group.'
class InvalidToken(KnownResponseError):
message = 'The provided token is malformed or otherwise invalid.'
class InvalidURI(KnownResponseError):
message = 'Couldn\'t parse the specified URI.'
class KeyTooLong(KnownResponseError):
message = 'Your key is too long.'
class MalformedACLError(KnownResponseError):
message = 'The XML you provided was not well-formed ' \
'or did not validate against our published schema.'
class MalformedPOSTRequest(KnownResponseError):
message = 'The body of your POST request is not ' \
'well-formed multipart/form-data.'
class MalformedXML(KnownResponseError):
message = 'This happens when the user sends malformed xml (xml that ' \
'doesn\'t conform to the published xsd) for the configuration.'
class MaxMessageLengthExceeded(KnownResponseError):
message = 'Your request was too big.'
class MaxPostPreDataLengthExceededError(KnownResponseError):
message = 'Your POST request fields preceding the ' \
'upload file were too large.'
class MetadataTooLarge(KnownResponseError):
message = 'Your metadata headers exceed the maximum allowed metadata size.'
class MethodNotAllowed(KnownResponseError):
message = 'The specified method is not allowed against this resource'
class MissingAttachment(KnownResponseError):
message = 'A SOAP attachment was expected, but none were found.'
class MissingContentLength(KnownResponseError):
message = 'You must provide the Content-Length HTTP header.'
class MissingRequestBodyError(KnownResponseError):
message = 'This happens when the user sends an empty xml document ' \
'as a request. The error message is, "Request body is empty."'
class MissingSecurityElement(KnownResponseError):
message = 'The SOAP 1.1 request is missing a security element.'
class MissingSecurityHeader(KnownResponseError):
message = 'Your request is missing a required header.'
class NoLoggingStatusForKey(KnownResponseError):
message = 'There is no such thing as a logging ' \
'status subresource for a key.'
class NoSuchBucket(KnownResponseError):
message = 'The specified bucket does not exist.'
class NoSuchKey(KnownResponseError):
message = 'The specified key does not exist.'
class NoSuchLifecycleConfiguration(KnownResponseError):
message = 'The lifecycle configuration does not exist.'
class NoSuchUpload(KnownResponseError):
message = 'The specified multipart upload does not exist. ' \
'The upload ID might be invalid, or the multipart \
upload might have been aborted or completed.'
class NoSuchVersion(KnownResponseError):
message = 'Ind |
# coding: utf-8
from mongomock import MongoClient as MockMongoClient
from .base import *
# For tests, don't use KoBoCAT's DB
DATABASES = {
'default': dj_database_url.config(default='sqlite:///%s/db.sqlite3' % BASE_DIR),
}
|
DATABASE_ROUTERS = ['kpi.db_routers.TestingDatabaseRouter']
TESTING = True
# Decrease prod value to speed-up tests
SUBMISSION_LIST_LIMIT = 100
ENV = 'testing'
# Run all Celery tasks synchronously during testing
CELERY_TASK_ALWAYS_EAGER = True
MONGO_CONNECTION_URL = 'mongodb://fakehost/formhub_test'
MONGO_CONNECTION = MockMongoClient(
MONGO_CONNECTION_URL, j=True, tz_aware= | True)
MONGO_DB = MONGO_CONNECTION['formhub_test']
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015, P | hilipp Klaus. All rights reserved.
License: GPLv3
"""
from distutils.core import setup
setup(name='netio230a',
version = '1.1.9',
description = 'Python package to control the Koukaam NETIO-230A',
long_description = 'Python software to access the Koukaam NETIO-230A and NETIO-230B: power distribution units / controllable power outlets with Ethernet | interface',
author = 'Philipp Klaus',
author_email = 'philipp.l.klaus@web.de',
url = 'https://github.com/pklaus/netio230a',
license = 'GPL3+',
packages = ['netio230a'],
scripts = ['scripts/netio230a_cli', 'scripts/netio230a_discovery', 'scripts/netio230a_fakeserver'],
zip_safe = True,
platforms = 'any',
keywords = 'Netio230A Koukaam PDU',
classifiers = [
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'License :: OSI Approved :: GPL License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
stances to interact with the same
SpiNNaker boards.
A very simple protocol is used between the client and server. Clients may send
the following new-line delimited commands to the server:
* ``VERSION,[versionstring]\n`` The server will disconnect any client with an
incompatible version number reported for ``[versionstring]``. Returns
``OK\n``.
* ``LED,[c],[f],[b],[lednum],[state]\n`` Turn on or off the specified LED. Note
that the LED remains switched on while *any* client wants it to be on.
Returns ``OK\n``.
* ``TARGET,[c],[f],[b],[link]\n`` Discover what link is at the other end of the
supplied link. Returns ``[c],[f],[b],[link]\n`` or ``None\n`` if no link is
connected. Note that links are represented by their number, not their name.
"""
import traceback
import socket
import select
from collections import defaultdict
import logging
from six import iteritems
from spinner.version import __version__
from spinner.topology import Direction
DEFAULT_PORT = 6512
class ProxyError(Exception):
"""Exception raised when the proxy cannot connect."""
pass
class ProxyServer(object):
"""A proxy server enabling multiple wiring guide instances to interact with
the same SpiNNaker boards.
"""
def __init__(self, bmp_controller, wiring_probe,
hostname="", port=DEFAULT_PORT):
self.bmp_controller = bmp_controller
self.wiring_probe = wiring_probe
# Open a TCP socket
self.server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.server_sock.bind((hostname, port))
self.server_sock.listen(5)
self.client_socks = []
# A buffer for unprocessed data received from each client
self.client_buffer = {}
# For each LED, maintains a set of clients which have turned it on
self.led_setters = defaultdict(set)
def add_client(self, sock, addr):
"""Register a new client."""
logging.info("New connection {} from {}".format(sock, addr))
self.client_socks.append(sock)
# Create buffer for received data (and schedule its deletion upon
# disconnection)
self.client_buffer[sock] = b""
def remove_client(self, sock):
"""Disconnect and cleanup after a particular child."""
logging.info("Closing socket {}".format(sock))
# Remove buffer
self.client_buffer.pop(sock)
# Turn off any LEDs left on by the client
for (c, f, b, led), socks in iteritems(self.led_setters):
if sock in socks:
self.set_led(sock, c, f, b, led, False)
# Close socket
self.client_socks.remove(sock)
sock.close()
def set_led(self, sock, c, f, b, led, state):
"""Set the state of a diagnostic LED.
An LED is turned on if at least one client has turned it on. An LED is only
turned off if all clients which have turned the LED on have also turned it
off again.
"""
setters = self.led_setters[(c, f, b, led)]
cur_led_state = bool(setters)
if state:
setters.add(sock)
else:
setters.discard(sock)
new_led_state = bool(setters)
if cur_led_state != new_led_state:
self.bmp_controller.set_led(led, new_led_state, c, f, b)
def handle_version(self, sock, args):
"""Handle "VERSION" commands.
This command contains, as the argument, the SpiNNer version number of the
remote client. If the version of the client does not match the server, the
client is disconnected.
Arguments: vX.Y.Z
Returns: OK
"""
# Check for identical version
assert args.decode("asci | i") == __version__
sock.send(b"OK\n")
def handle_led(self, sock, args):
"""Handle "LED" commands.
Set the state of a diagnostic LED on a board.
Arguments: c,f,b,led,state
Returns: OK
"""
c, f, b, led, state = map(int, args.split(b","))
self.se | t_led(sock, c, f, b, led, state)
sock.send(b"OK\n")
def handle_target(self, sock, args):
"""Handle "TARGET" commands.
Determine what is at the other end of a given link.
Arguments: c,f,b,d
Returns: c,f,b,d or None
"""
c, f, b, d = map(int, args.split(b","))
target = self.wiring_probe.get_link_target(c, f, b, d)
if target is None:
sock.send(b"None\n")
else:
sock.send("{},{},{},{}\n".format(*map(int, target)).encode("ascii"))
def process_data(self, sock, data):
"""Process data received from a socket."""
# Prepend any previously unprocessed data
data = self.client_buffer[sock] + data
# Handle any received commands. If a command fails (or is invalid) the
# connection is dropped.
try:
while b"\n" in data:
line, _, data = data.partition(b"\n")
logging.debug("Handling command {} from {}".format(line, sock))
cmd, _, args = line.partition(b",")
# If an unrecognised command arrives, this lookup will fail and get
# caught by the exception handler, printing an error and disconnecting
# the client.
{
b"VERSION": self.handle_version,
b"LED": self.handle_led,
b"TARGET": self.handle_target,
}[cmd](sock, args)
except Exception as e:
logging.exception(
"Disconnected client {} due to bad command (above)".format(sock))
self.remove_client(sock)
return
# Retain any remaining unprocessed data
self.client_buffer[sock] = data
def main(self):
logging.info("Starting proxy server...")
try:
while True:
ready, _1, _2 = select.select([self.server_sock] + self.client_socks, [], [])
for sock in ready:
if sock is self.server_sock:
# New client connected!
self.add_client(*self.server_sock.accept())
else:
# Data arrived from a client
try:
data = sock.recv(1024)
except (IOError, OSError) as exc:
logging.error(
"Socket {} failed to receive: {}".format(sock, exc))
# Cause socket to get closed
data = b"" # pragma: no branch
if len(data) == 0:
# Connection closed
self.remove_client(sock)
else:
self.process_data(sock, data)
except KeyboardInterrupt:
# Disconnect all clients (also cleans up LED states, etc.)
for sock in self.client_socks:
self.remove_client(sock)
logging.info("Proxy server terminated cleanly.")
class ProxyClient(object):
"""A client for the ProxyServer object defined above.
This object implements a BMPController-compatible ``set_led`` method and
WiringProbe compatible ``get_link_target`` method and thus may be substituted
for the above when these functions are all that are required, e.g. for the
InteractiveWiringGuide.
"""
def __init__(self, hostname, port=DEFAULT_PORT):
"""Connect to a running ProxyServer."""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((hostname, port))
# A receive buffer
self.buf = b""
# Check for protocol version compatibility.
self.check_version()
def recvline(self):
"""Wait for a full line to be received from the server."""
while b"\n" not in self.buf:
data = self.sock.recv(1024)
self.buf += data
if len(data) == 0:
raise ProxyError("Remote server closed the connection.")
line, _, self.buf = self.buf.partition(b"\n")
return line
def check_version(self):
"""Check that the remote server has a compatible protocol version."""
self.sock.send("VERSION,{}\n".format(__version__).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Remote server has incompatible protocol version")
def set_led(self, led, state, c, f, b):
"""Set the state of an LED on the remote machine."""
self.sock.send("LED,{},{},{},{},{}\n".format(
c, f, b, led, int(state)).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Got unexpected response to LED command.")
def get_link_target(self, c, f, b, d):
"""Discover the other end of a specified link on a remote machine."""
self.sock.send("TARGET,{},{},{},{}\n".format(
c, f, b, int(d)).encode("ascii"))
response = self.recvline()
if response == b"None":
return None
else:
try:
c, f, b, d = map(int, response.split(b","))
return (c, f, b, Direction(d))
except ValueError:
raise ProxyError |
from . import Renderer
from PIL import Image, ImageFont, ImageQt, ImageDraw
from PyQt5 import QtGui
'''
Renders a single line of text at a given position.
'''
class TextRenderer(Renderer):
MSFACTOR = 8
def __init__(self, gl, text, pos, size = 64):
super().__init__(gl)
self.text = text
self.pos = pos
if size > 64:
self.MSFACTOR = 4
if size > 128:
self.sizeAdjust = size / 128
self.fSize = 128
else:
self.fSize = size
self.sizeAdjust = 1
self.callList = self.genSymbolCallList()
def genSymbolCallList(self):
genList = self.gl.glGenLists(1)
try:
font = ImageFont.truetype('resources/interface/Roboto.ttf', self.fSize * self.MSFACTOR)
except OSError:
print("Font not found, loading failsafe.")
font = ImageFont.truetype('arial.ttf', self.fSize * self.MSFACTOR)
# works on Windows; may still fail on Linux and OSX. Documentation uncl | ear.
textSize = font.getsize(self.text)
border = 5
image = Image.new("RGBA", (textSize[0] + 2*border, textSize[1] + 2*border), None)
draw = ImageDraw.Draw(image)
draw.text((border, border), self.text, font=font, fill="white")
del draw
imgWidth = float(self.sizeAdjust * image.size[0] / self.MSFACTOR)
imgHeight = float(self.sizeAdjust * image.size[1] / self.MSFACTOR)
self.vertice | s =[0.0, self.fSize - imgHeight, 2.0,
0.0, float(self.fSize), 2.0,
imgWidth, float(self.fSize), 2.0,
imgWidth, self.fSize - imgHeight, 2.0]
self.texCoords=[0.0, 0.0, 2.0,
0.0, 1.0, 2.0,
1.0, 1.0, 2.0,
1.0, 0.0, 2.0]
self.texture = QtGui.QOpenGLTexture(ImageQt.ImageQt(image), True)
self.texture.setMinMagFilters(QtGui.QOpenGLTexture.LinearMipMapLinear, QtGui.QOpenGLTexture.Linear)
self.gl.glNewList(genList, self.gl.GL_COMPILE)
self.gl.glColor4f(1.0, 1.0, 1.0, 0.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW)
self.gl.glPushMatrix()
self.gl.glTranslated(self.pos.x - self.sizeAdjust * (image.size[0] / (2 * self.MSFACTOR) - border), self.pos.y - image.size[1] / (2 * self.MSFACTOR), 0)
self.texture.bind()
self.gl.glEnableClientState(self.gl.GL_VERTEX_ARRAY)
self.gl.glEnableClientState(self.gl.GL_TEXTURE_COORD_ARRAY)
self.gl.glVertexPointer(3, self.gl.GL_FLOAT, 0, self.vertices)
self.gl.glTexCoordPointer(3, self.gl.GL_FLOAT, 0, self.texCoords)
self.gl.glEnable(self.gl.GL_TEXTURE_2D)
self.gl.glDrawArrays(self.gl.GL_QUADS, 0, 4)
self.gl.glDisable(self.gl.GL_TEXTURE_2D)
self.texture.release()
self.gl.glPopMatrix()
self.gl.glEndList()
return genList |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module is a special module to define functions or other resources
which need to be imported outside of openstack_dashboard.api.nova
(like cinder.py) to avoid cyclic imports.
"""
from django.conf import settings
from glanceclient import exc as glance_exceptions
from novaclient import api_versions
from novaclient import client as nova_client
from horizon import exceptions as horizon_exceptions
from horizon.utils import memoized
from openstack_dashboard.api import base
from openstack_dashboard.api import glance
from openstack_dashboard.api import microversions
from openstack_dashboard.contrib.developer.profiler import api as profiler
# Supported compute versions
VERSIONS = base.APIVersionManager("compute", preferred_version=2)
VERSIONS.load_supported_version(1.1, {"client": nova_client, "version": 1.1})
VERSIONS.load_supported_version(2, {"client": nova_client, "version": 2})
INSECURE = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
CACERT = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links', 'description',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'locked',
'OS-EXT-STS:power_state', 'OS-EXT-STS:task_state',
'OS-EXT-SRV-ATTR:instance_name', 'OS-EXT-SRV-ATTR:host',
'OS-EXT-AZ:availability_zone', 'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
if not self.image:
return None
elif hasattr(self.image, 'name'):
return self.image.name
elif 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
self.image['name'] = image.name
return image.name
except (glance_exceptions.ClientException,
horizon_exceptions.ServiceCatalogException):
self.image['name'] = None
return None
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
@property
def host_server(self):
return getattr(self, 'OS-EXT-SRV-ATTR:host', '')
@memoized.memoized
def get_microversion(request, features):
client = novaclient(request)
min_ver, max_ver = api_versions._get_server_version_range(client)
return (microversions.get_microversion_for_features(
'nova', features, api_versions.APIVersion, min_ver, max_ver))
def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (
request.user.username,
request.user.token.id,
request.user.tenant_id,
request.user.token.project.get('domain_id'),
base.url_for(request, 'compute'),
base.url_for(request, 'identity')
)
@memoized.memoized
d | ef cached_novaclient(request, version=None):
(
username,
token_id,
project | _id,
project_domain_id,
nova_url,
auth_url
) = get_auth_params_from_request(request)
if version is None:
version = VERSIONS.get_active_version()['version']
c = nova_client.Client(version,
username,
token_id,
project_id=project_id,
project_domain_id=project_domain_id,
auth_url=auth_url,
insecure=INSECURE,
cacert=CACERT,
http_log_debug=settings.DEBUG,
auth_token=token_id,
endpoint_override=nova_url)
return c
def novaclient(request, version=None):
if isinstance(version, api_versions.APIVersion):
version = version.get_string()
return cached_novaclient(request, version)
def get_novaclient_with_instance_desc(request):
microversion = get_microversion(request, "instance_description")
return novaclient(request, version=microversion)
@profiler.trace
def server_get(request, instance_id):
return Server(get_novaclient_with_instance_desc(request).servers.get(
instance_id), request)
|
import doctest
from insig | hts.parsers import ls_var_cache_pulp
from insights.parsers.ls_var_cache_pulp import LsVarCachePulp
from insights.tests import context_wrap
LS_VAR_CACHE_PULP = """
total 0
drwxrwxr-x. 5 48 1000 216 Jan 21 12:56 .
drwxr-xr-x. 10 0 0 121 Jan 20 13:57 ..
lrwxrwxr | wx. 1 0 0 19 Jan 21 12:56 cache -> /var/lib/pulp/cache
drwxr-xr-x. 2 48 48 6 Jan 21 13:03 reserved_resource_worker-0@dhcp130-202.gsslab.pnq2.redhat.com
drwxr-xr-x. 2 48 48 6 Jan 21 02:03 reserved_resource_worker-1@dhcp130-202.gsslab.pnq2.redhat.com
drwxr-xr-x. 2 48 48 6 Jan 20 14:03 resource_manager@dhcp130-202.gsslab.pnq2.redhat.com
"""
def test_ls_var_cache_pulp():
ls_var_cache_pulp = LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp"))
assert ls_var_cache_pulp.files_of('/var/cache/pulp') == ['cache']
cache_item = ls_var_cache_pulp.dir_entry('/var/cache/pulp', 'cache')
assert cache_item is not None
assert '/var/lib/pulp/' in cache_item['link']
def test_ls_var_lib_mongodb_doc_examples():
env = {
'ls_var_cache_pulp': LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp")),
}
failed, total = doctest.testmod(ls_var_cache_pulp, globs=env)
assert failed == 0
|
)
def set_terminated(self, status):
self.terminated = True
self.status = status
MockExperiment = namedtuple("MockExperiment", ["name", "experiment_id"])
class MockMlflowClient:
def __init__(self, tracking_uri=None, registry_uri=None):
self.tracking_uri = tracking_uri
self.registry_uri = registry_uri
self.experiments = [MockExperiment("existing_experiment", 0)]
self.runs = {0: []}
self.active_run = None
def set_tracking_uri(self, tracking_uri):
self.tracking_uri = tracking_uri
def get_experiment_by_name(self, name):
try:
index = self.experiment_names.index(name)
return self.experiments[index]
except ValueError:
return None
def get_experiment(self, experiment_id):
experiment_id = int(experiment_id)
try:
return self.experiments[experiment_id]
except IndexError:
return None
def create_experiment(self, name):
experiment_id = len(self.experiments)
self.experiments.append(MockExperiment(name, experiment_id))
self.runs[experiment_id] = []
return experiment_id
def create_run(self, experiment_id, tags=None):
experiment_runs = self.runs[experiment_id]
run_id = (experiment_id, len(experiment_runs))
run = MockRun(run_id=run_id, tags=tags)
experiment_runs.append(run)
return run
def start_run(self, experiment_id, run_name):
# Creates new run and sets it as active.
run = self.create_run(experiment_id)
self.active_run = run
def get_mock_run(self, run_id):
return self.runs[run_id[0]][run_id[1]]
def log_param(self, run_id, key, value):
run = self.get_mock_run(run_id)
run.log_param(key, value)
def log_metric(self, run_id, key, value, step):
run = self.get_mock_run(run_id)
run.log_metric(key, value)
def log_artifacts(self, run_id, local_dir):
run = self.get_mock_run(run_id)
run.log_artifact(local_dir)
def set_terminated(self, run_id, status):
run = self.get_mock_run(run_id)
run.set_terminated(status)
@property
def experiment_names(self):
return [e.name for e in self.experiments]
def clear_env_vars():
if "MLFLOW_EXPERIMENT_NAME" in os.environ:
del os.environ["MLFLOW_EXPERIMENT_NAME"]
if "MLFLOW_EXPERIMENT_ID" in os.environ:
del os.environ["MLFLOW_EXPERIMENT_ID"]
class MLflowTest(unittest.TestCase):
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerCallbackConfig(self):
# Explicitly pass in all args.
logger = MLflowLoggerCallback(
tracking_uri="test1",
registry_uri="test2",
experiment_name="test_exp")
logger.setup()
self.assertEqual(logger.client.tracking_uri, "test1")
self.assertEqual(logger.client.registry_uri, "test2")
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Check if client recognizes already existing experiment.
logger = MLflowLoggerCallback(experiment_name="existing_experiment")
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Pass in existing experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "e | xisting_experiment"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = M | LflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, "0")
# Pass in non existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "500"
with self.assertRaises(ValueError):
logger = MLflowLoggerCallback()
logger.setup()
# Experiment name env var should take precedence over id env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerLogging(self):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
trial = MockTrial(trial_config, "trial1", 0, "artifact")
logger = MLflowLoggerCallback(
experiment_name="test1", save_artifact=True)
logger.setup()
# Check if run is created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
# New run should be created for this trial with correct tag.
mock_run = logger.client.runs[1][0]
self.assertDictEqual(mock_run.tags, {"trial_name": "trial1"})
self.assertTupleEqual(mock_run.run_id, (1, 0))
self.assertTupleEqual(logger._trial_runs[trial], mock_run.run_id)
# Params should be logged.
self.assertListEqual(mock_run.params, [{"par1": 4}, {"par2": 9}])
# When same trial is started again, new run should not be created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
self.assertEqual(len(logger.client.runs[1]), 1)
# Check metrics are logged properly.
result = {
"metric1": 0.8,
"metric2": 1,
"metric3": None,
"training_iteration": 0
}
logger.on_trial_result(0, [], trial, result)
mock_run = logger.client.runs[1][0]
# metric3 is not logged since it cannot be converted to float.
self.assertListEqual(mock_run.metrics, [{
"metric1": 0.8
}, {
"metric2": 1.0
}, {
"training_iteration": 0
}])
# Check that artifact is logged on termination.
logger.on_trial_complete(0, [], trial)
mock_run = logger.client.runs[1][0]
self.assertListEqual(mock_run.artifacts, ["artifact"])
self.assertTrue(mock_run.terminated)
self.assertEqual(mock_run.status, "FINISHED")
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLegacyLoggerConfig(self):
mlflow = MockMlflowClient()
with patch.dict("sys.modules", mlflow=mlflow):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
trial = MockTrial(trial_config, "trial1", 0, "artifact")
# No experiment_id is passed in config, should raise an error.
with self.assertRaises(ValueError):
logger = MLflowLogger(trial_config, "/tmp", trial)
trial_config.update({
"logger_config": {
"mlflow_tracking_uri": "test_tracking_uri",
"mlflow_experiment_id": 0
}
})
trial = MockTrial(trial_config, "trial2", 1, "artifact")
logger = MLflowLogger(trial_config, "/tmp", trial)
experiment_logger = logger._trial_experiment |
# __main__.py is used when a package is exec | uted | as a module, i.e.: `python -m pptx_downsizer`
if __name__ == '__main__':
from .pptx_downsizer import cli
cli()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.