prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
# Copyright (c) 2015, Simone Margaritelli <evilsocket at gmail dot com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of ARM Inject nor the names of its contributors may be used # to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from pyadb.adb import ADB import sys if len(sys.argv) != 2: print "Usage: python %s <pid>" % sys.argv[0] quit() pid = int(sys.argv[1]) try: adb = ADB() print "@ Pushing files to /data/local/tmp ..." adb.sh( "rm -rf /data/local/tmp/injector /data/local/tmp/libhook.so" ) adb
.push( "libs/armeabi-v7a/injector", "/data/local/tmp/injector" ) adb.push( "libs/armeabi-v7a/libhook.so", "/data/local/tmp/libhook.so" ) adb.sh( "chmod 777 /data/local/tmp/injector" ) # we need to set selinux to permissive in order to make ptrace work adb.set_selinux_level( 0 ) adb.clear_log() print "@ Injection into PID %d starting ..." % pid adb.sudo( "/data/local/tmp/injector %d /data/local/tmp/libhoo
k.so" % pid ) adb.logcat("LIBHOOK") except KeyboardInterrupt: pass
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-11-30 15:02 from __future__ import unicode_literals from django.db import migrations, utils from django.db import transaction def copy_previous_pgrm_to_current_lunit(apps, model): base = apps.get_app_config('base') ProgramManager = base.get_model('programmanager') OfferYear = base.get_model('offeryear') previous_pgrm = ProgramMana
ger.objects.filter(offer_year__academic_year__year=2016)\
.select_related('offer_year__offer') for pgrm in previous_pgrm: new_offer_year = OfferYear.objects.filter(offer=pgrm.offer_year.offer, academic_year__year=2017).first() if new_offer_year: try: with transaction.atomic(): pgrm.pk = None pgrm.offer_year = new_offer_year pgrm.save() except utils.IntegrityError: print("Duplicated.") class Migration(migrations.Migration): dependencies = [ ('base', '0197_auto_20171130_0823'), ] operations = [ migrations.RunPython(copy_previous_pgrm_to_current_lunit), ]
# Copyright 2017 Cisco Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific la
nguage governing permissions and limitations # under the License. from django.utils.translation import ugettext_lazy as _ impo
rt horizon class SecurityGroups(horizon.Panel): name = _("Security Groups") slug = 'security_groups'
import os import pytest @pytest.mark.bashcomp(cmd="pkgconf") class TestPkgconf: @pytest.mark.complete("pkgconf ") def test_1(self, completion): assert completion @pytest.mark.complete("pkgconf -", require_cmd=True) def test_2(self, completion): assert completion @pytest.mark.complete( "pkgconf %s/bash-completion.pc --variable=" % os.getenv("ABS_TOP_BUILDDIR", "..
/.."), require_cmd=True, ) def test_variable(self, completion): assert "completionsdir" in comp
letion
_msg = "Experimental functionality. Behaviour might change/be removed later" if EXPERIMENTAL: msg = common_msg + ': ' + msg self.warning(msg, *args, **kwargs) else: msg = common_msg + " (use --experimental option to enable): " + msg raise EasyBuildError(msg, *args) def deprecated(self, msg, ver, max_ver=None, more_info=None, silent=False, *args, **kwargs): """ Print deprecation warning or raise an exception, depending on specified version(s) :param: msg: deprecation message :param ver: if max_ver is None: threshold for EasyBuild version to determine warning vs exception else: version to check against max_ver to determine warning vs exception :param max_ver: version threshold for warning vs exception (compared to 'ver') :param more_info: additional message with instructions where to get more information :param silent: stay silent (don't *print* deprecation warnings, only log them) """ # provide log_callback function that both logs a warning and prints to stderr def log_callback_warning_and_print(msg): """Log warning message, and also print it to stderr.""" self.warning(msg) print_warning(msg, silent=silent) kwargs['log_callback'] = log_callback_warning_and_print # always raise an EasyBuildError, nothing else kwargs['exception'] = EasyBuildError if max_ver is None: if more_info: msg += more_info else: msg += "; see %s for more information" % DEPRECATED_DOC_URL fancylogger.FancyLogger.deprecated(self, msg, str(CURRENT_VERSION), ver, *args, **kwargs) else: fancylogger.FancyLogger.deprecated(self, msg, ver, max_ver, *args, **kwargs) def nosupport(self, msg, ver): """Print error message for no longer supported behaviour, and raise an EasyBuildError.""" nosupport_msg = "NO LONGER SUPPORTED since v%s: %s; see %s for more information" raise EasyBuildError(nosupport_msg, ver, msg, DEPRECATED_DOC_URL) def error(self, msg, *args, **kwargs): """Print error message and raise an EasyBuildError.""" ebmsg = "EasyBuild crashed with an error %s: " % self.caller_info() fancylogger.FancyLogger.error(self, ebmsg + msg, *args, **kwargs) def devel(self, msg, *args, **kwargs): """Print development log message""" self.log(DEVEL_LOG_LEVEL, msg, *args, **kwargs) def exception(self, msg, *args): """Print exception message and raise EasyBuildError.""" # don't raise the exception from within error ebmsg = "EasyBuild encountered an exception %s: " % self.caller_info() fancylogger.FancyLogger.exception(self, ebmsg + msg, *args) # set format for logger LOGGING_FORMAT = EB_MSG_PREFIX + ' %(asctime)s %(filename)s:%(lineno)s %(levelname)s %(message)s' fancylogger.setLogFormat(LOGGING_FORMAT) # set the default LoggerClass to EasyBuildLog fancylogger.logging.setLoggerClass(EasyBuildLog) # you can't easily set another LoggerClass before fancylogger calls getLogger on import _init_fancylog = fancylogger.getLogger(fname=False) del _init_fancylog.manager.loggerDict[_init_fancylog.name] # we need to make sure there is a handler fancylogger.logToFile(filename=os.devnull, max_bytes=0) # EasyBuildLog _init_easybuildlog = fancylogger.getLogger(fname=False) def init_logging(logfile, logtostdout=False, silent=False, colorize=fancylogger.Colorize.AUTO, tmp_logdir=None): """Initialize logging.""" if logtostdout: fancylogger.logToScreen(enable=True, stdout=True, colorize=colorize) else: if logfile is None: # if logdir is specified but doesn't exist yet, create it first if tmp_logdir and not os.path.exists(tmp_logdir): try: os.makedirs(tmp_logdir) except (IOError, OSError) as err: raise EasyBuildError("Failed to create temporary log directory %s: %s", tmp_logdir, err) # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-', dir=tmp_logdir) os.close(fd) fancylogger.logToFile(logfile, max_bytes=0) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=silent) log = fancylogger.getLogger(fname=False) return log, logfile def log_start(log, eb_command_line, eb_tmpdir): """Log startup info.""" log.info(this_is_easybuild()) # log used command line log.info("Command line: %s", ' '.join(eb_command_line)) log.info("Using %s as temporary directory", eb_tmpdir) def stop_logging(logfile, logtostdout=False): """Stop logging.""" if logtostdout: fancylogger.logToScreen(enable=False, stdout=True) if logfile is not None: fancylogger.logToFile(logfile, enable=False) def print_msg(msg, *args, **kwargs): """ Print a message. :param log: logger instance to also message to :param silent: be silent (only log, don't print) :param prefix: include message prefix characters ('== ') :param newline: end message with newline :param stderr: print to stderr rather than stdout """ if args: msg = msg % args log = kwargs.pop('log', None) silent = kwargs.pop('silent', False) prefix = kwargs.pop('prefix', True) newline = kwargs.pop('newline', True) stderr = kwargs.pop('stderr', False) if kwargs: raise EasyBuildError("Unknown named arguments passed to print_msg: %s", kwargs) if log: log.info(msg) if not silent: if prefix: msg = ' '.join([EB_MSG_PREFIX, msg]) if newline: msg += '\n' if stderr: sys.stderr.write(msg) else: sys.stdout.write(msg) def dry_run_set_dirs(prefix, builddir, software_installdir, module_installdir): """ Initialize for printing dry run messages. Define DRY_RUN_*DIR constants, so they can be used in dry_run_msg to replace fake build/install dirs. :param prefix: prefix of fake build/install dirs, that can be stripped off when printing :param builddir: fake build dir :param software_installdir: fake software install directory :param module_installdir: fake module install directory """ global DRY_RUN_BUILD_DIR
DRY_RUN_BUILD_DIR = (re.compile(re.escape(builddir)), builddir[len(prefix):]) global DRY_RUN_MODULES_INSTALL_DIR DRY_RUN_MODULES_INSTALL_DIR = (re.compile(re.escape(module_installdir)), module_installdir[len(prefix):]) glo
bal DRY_RUN_SOFTWARE_INSTALL_DIR DRY_RUN_SOFTWARE_INSTALL_DIR = (re.compile(re.escape(software_installdir)), software_installdir[len(prefix):]) def dry_run_msg(msg, *args, **kwargs): """Print dry run message.""" # replace fake build/install dir in dry run message with original value if args: msg = msg % args silent = kwargs.pop('silent', False) if kwargs: raise EasyBuildError("Unknown named arguments passed to dry_run_msg: %s", kwargs) for dry_run_var in [DRY_RUN_BUILD_DIR, DRY_RUN_MODULES_INSTALL_DIR, DRY_RUN_SOFTWARE_INSTALL_DIR]: if dry_run_var is not None: msg = dry_run_var[0].sub(dry_run_var[1], msg) print_msg(msg, silent=silent, prefix=False) def dry_run_warning(msg, *args, **kwargs): """Print dry run message.""" if args: msg = msg % args silent = kwargs.pop('silent', False) if kwargs: raise EasyBuildError("Unknown named arguments passed to dry_run_warning: %s", kwargs) dry_run_msg("\n!!!\n!!! WARNING: %s\n!!!\n" % msg, silent=silent) def print_error(msg, *args, **kwargs): """ Print error message and exit EasyBuild """ if args: msg = msg % args log = kwargs.pop('log', None) exitCode = kwargs.pop('exitCode', 1) opt_parser = kwargs.pop('opt_parser', None) exit_on_error = kwargs.pop('exit_on_error', Tr
stance_cache is not None: instance_cache[instance.io_loop] = instance return instance def initialize(self, io_loop, defaults=None): self.io_loop = io_loop self.defaults = dict(HTTPRequest._DEFAULTS) if defaults is not None: self.defaults.update(defaults) self._closed = False def close(self): """Destroys this HTTP client, freeing any file descriptors used. This method is **not needed in normal use** due to the way that `AsyncHTTPClient` objects are transparently reused. ``close()`` is generally only necessary when either the `.IOLoop` is also being closed, or the ``force_instance=True`` argument was used when creating the `AsyncHTTPClient`. No other methods may be called on the `AsyncHTTPClient` after ``close()``. """ if self._closed: return self._closed = True if self._instance_cache is not None: if self._instance_cache.get(self.io_loop) is not self: raise RuntimeError("inconsistent AsyncHTTPClient cache") del self._instance_cache[self.io_loop] def fetch(self, request, callback=None, raise_error=True, **kwargs): """Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` if the request returned a non-200 response code (other errors may also be raised if the server could not be contacted). Instead, if ``raise_error`` is set to False, the response will always be returned regardless of the response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) else: if kwargs: raise ValueError("kwargs can't be used if request is an HTTPRequest object") # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request = _RequestProxy(request, self.defaults) future = TracebackFuture() if callback is not None: callback = stack_context.wrap(callback) def handle_future(future): exc = future.exceptio
n() if isinstance(exc, HTTPError) and exc.response is not None: response = exc.response elif exc is not None: response = HTTPResponse( request, 599, error=exc, request_time=time.time() - request.start_time) else: response = future.result() self.io_loop.add_callback(callback, response) future.add_done_callback(handle_future) def handle_response(response): if raise_error and response.error: future.set_exception(response.error) else: future.set_result(response) self.fetch_impl(request, handle_response) return future def fetch_impl(self, request, callback): raise NotImplementedError() @classmethod def configure(cls, impl, **kwargs): """Configures the `AsyncHTTPClient` subclass to use. ``AsyncHTTPClient()`` actually creates an instance of a subclass. This method may be called with either a class object or the fully-qualified name of such a class (or ``None`` to use the default, ``SimpleAsyncHTTPClient``) If additional keyword arguments are given, they will be passed to the constructor of each subclass instance created. The keyword argument ``max_clients`` determines the maximum number of simultaneous `~AsyncHTTPClient.fetch()` operations that can execute in parallel on each `.IOLoop`. Additional arguments may be supported depending on the implementation class in use. Example:: AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") """ super(AsyncHTTPClient, cls).configure(impl, **kwargs) class HTTPRequest(object): """HTTP client request object.""" # Default values for HTTPRequest parameters. # Merged with the values on the request object by AsyncHTTPClient # implementations. _DEFAULTS = dict( connect_timeout=20.0, request_timeout=20.0, follow_redirects=True, max_redirects=5, decompress_response=True, proxy_password='', allow_nonstandard_methods=False, validate_cert=True) def __init__(self, url, method="GET", headers=None, body=None, auth_username=None, auth_password=None, auth_mode=None, connect_timeout=None, request_timeout=None, if_modified_since=None, follow_redirects=None, max_redirects=None, user_agent=None, use_gzip=None, network_interface=None, streaming_callback=None, header_callback=None, prepare_curl_callback=None, proxy_host=None, proxy_port=None, proxy_username=None, proxy_password=None, proxy_auth_mode=None, allow_nonstandard_methods=None, validate_cert=True, ca_certs=None, allow_ipv6=None, client_key=None, client_cert=None, body_producer=None, expect_100_continue=False, decompress_response=None, ssl_options=None): r"""All parameters except ``url`` are optional. :arg string url: URL to fetch :arg string method: HTTP method, e.g. "GET" or "POST" :arg headers: Additional HTTP headers to pass on the request :type headers: `~tornado.httputil.HTTPHeaders` or `dict` :arg body: HTTP request body as a string (byte or unicode; if unicode the utf-8 encoding will be used) :arg body_producer: Callable used for lazy/asynchronous request bodies. It is called with one argument, a ``write`` function, and should return a `.Future`. It should call the write function with new data as it becomes available. The write function returns a `.Future` which can be used for flow control. Only one of ``body`` and ``body_producer`` may be specified. ``body_producer`` is not supported on ``curl_httpclient``. When using ``body_producer`` it is recommended to pass a ``Content-Length`` in the headers as otherwise chunked encoding will be used, and many servers do not support chunked encoding on requests. New in Tornado 4.0 :arg string auth_username: Username for HTTP authentication :arg string auth_password: Password for HTTP authentication :arg string auth_mode: Authentication mode; default is "basic". Allowed values are implementation-defined; ``curl_httpclient`` supports "basic" and "digest"; ``simple_httpclient`` only supports "basic" :arg float connect_timeout: Timeout for initial connection in seconds, default 20 seconds :arg float request_timeout: Timeout for entire request in seconds, default 20 seconds :arg if_modified_since: Timestamp for ``If-Modified-Since`` header :type if_modifi
# -*- coding: utf-8 -*- # Copyright 2018 Lorenzo Battistini - Agile Business Group # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). { "name": "Causali pagamento per ritenute d'acconto", "version": "10.0.1.0.0", "development_status": "Beta", "category": "Hidden",
"website": "https://github.com/OCA/l10n-italy", "author": "Agile Business Group, Odoo Community Association (OCA)", "license": "LGPL-3", "application": False, "installable": True, "depends": [ "l10n_it_withholding_tax", "l10n_it_causali_pagamento", ], "data": [ "views/
withholding_tax.xml", ], 'auto_install': True, }
(filename, 'w+') as file_obj: file_obj.write('random') file_obj.flush() actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', []) assert actual['format'] == 1 plugin_release_found = False for manifest_entry in actual['files']: assert manifest_entry['name'] != 'namespace-collection-0.0.0.tar.gz' if manifest_entry['name'] == 'plugins/namespace-collection-0.0.0.tar.gz': plugin_release_found = True assert plugin_release_found expected_msgs = [ "Skipping '%s/galaxy.yml' for collection build" % to_text(input_dir), "Skipping '%s' for collection build" % to_text(release_file) ] assert mock_display.call_count == 2 assert mock_display.mock_calls[0][1][0] in expected_msgs assert mock_display.mock_calls[1][1][0] in expected_msgs def test_build_ignore_patterns(collection_input, monkeypatch): input_dir = collection_input[0] mock_display = MagicMock() monkeypatch.setattr(Display, 'vvv', mock_display) actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', ['*.md', 'plugins/action', 'playbooks/*.j2']) assert actual['format'] == 1 expected_missing = [ 'README.md', 'docs/My Collection.md', 'plugins/action', 'playbooks/templates/test.conf.j2', 'playbooks/templates/subfolder/test.conf.j2', ] # Files or dirs that are close to a match but are not, make sure they are present expected_present = [ 'docs', 'roles/common/templates/test.conf.j2', 'roles/common/templates/subfolder/test.conf.j2', ] actual_files = [e['name'] for e in actual['files']] for m in expected_missing: assert m not in actual_files for p in expected_present: assert p in actual_files expected_msgs = [ "Skipping '%s/galaxy.yml' for collection build" % to_text(input_dir), "Skipping '%s/README.md' for collection build" % to_text(input_dir), "Skipping '%s/docs/My Collection.md' for collection build" % to_text(input_dir), "Skipping '%s/plugins/action' for collection build" % to_text(input_dir), "Skipping '%s/playbooks/templates/test.conf.j2' for collection build" % to_text(input_dir), "Skipping '%s/playbooks/templates/subfolder/test.conf.j2' for collection build" % to_text(input_dir), ] assert mock_display.call_count == len(expected_msgs) assert mock_display.mock_calls[0][1][0] in expected_msgs assert mock_display.mock_calls[1][1][0] in expected_msgs assert mock_display.mock_calls[2][1][0] in expected_msgs assert mock_display.mock_calls[3][1][0] in expected_msgs assert mock_display.mock_calls[4][1][0] in expected_msgs assert mock_display.mock_calls[5][1][0] in expected_msgs def test_build_ignore_symlink_target_outside_collection(collection_input, monkeypatch): input_dir, outside_dir = collection_input mock_display = MagicMock() monkeypatch.setattr(Display, 'warning', mock_display) link_path = os.path.join(input_dir, 'plugins', 'connection') os.symlink(outside_dir,
link_path) actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', []) for manifest_entry in actual['files']: assert manifest_entry['name'] != 'plugins/connection' assert mock_display.call_count == 1 assert mock_display.mock_calls[0][1][0] == "Skipping '%s' as it is a symbolic link to a directory outside " \ "the collection" % to_text(link_path) def test_build_copy_symlink_target_insi
de_collection(collection_input): input_dir = collection_input[0] os.makedirs(os.path.join(input_dir, 'playbooks', 'roles')) roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked') roles_target = os.path.join(input_dir, 'roles', 'linked') roles_target_tasks = os.path.join(roles_target, 'tasks') os.makedirs(roles_target_tasks) with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main: tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:") tasks_main.flush() os.symlink(roles_target, roles_link) actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', []) linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')] assert len(linked_entries) == 1 assert linked_entries[0]['name'] == 'playbooks/roles/linked' assert linked_entries[0]['ftype'] == 'dir' def test_build_with_symlink_inside_collection(collection_input): input_dir, output_dir = collection_input os.makedirs(os.path.join(input_dir, 'playbooks', 'roles')) roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked') file_link = os.path.join(input_dir, 'docs', 'README.md') roles_target = os.path.join(input_dir, 'roles', 'linked') roles_target_tasks = os.path.join(roles_target, 'tasks') os.makedirs(roles_target_tasks) with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main: tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:") tasks_main.flush() os.symlink(roles_target, roles_link) os.symlink(os.path.join(input_dir, 'README.md'), file_link) collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False) output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz') assert tarfile.is_tarfile(output_artifact) with tarfile.open(output_artifact, mode='r') as actual: members = actual.getmembers() linked_folder = next(m for m in members if m.path == 'playbooks/roles/linked') assert linked_folder.type == tarfile.SYMTYPE assert linked_folder.linkname == '../../roles/linked' linked_file = next(m for m in members if m.path == 'docs/README.md') assert linked_file.type == tarfile.SYMTYPE assert linked_file.linkname == '../README.md' linked_file_obj = actual.extractfile(linked_file.name) actual_file = secure_hash_s(linked_file_obj.read()) linked_file_obj.close() assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81' def test_publish_no_wait(galaxy_server, collection_artifact, monkeypatch): mock_display = MagicMock() monkeypatch.setattr(Display, 'display', mock_display) artifact_path, mock_open = collection_artifact fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234' mock_publish = MagicMock() mock_publish.return_value = fake_import_uri monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish) collection.publish_collection(artifact_path, galaxy_server, False, 0) assert mock_publish.call_count == 1 assert mock_publish.mock_calls[0][1][0] == artifact_path assert mock_display.call_count == 1 assert mock_display.mock_calls[0][1][0] == \ "Collection has been pushed to the Galaxy server %s %s, not waiting until import has completed due to " \ "--no-wait being set. Import task results can be found at %s" % (galaxy_server.name, galaxy_server.api_server, fake_import_uri) def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch): mock_display = MagicMock() monkeypatch.setattr(Display, 'display', mock_display) artifact_path, mock_open = collection_artifact fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234' mock_publish = MagicMock() mock_publish.return_value = fake_import_uri monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish) mock_wait = MagicMock() monkeypatch.setattr(galaxy_server, 'wait_import_task', mock_wait) collection.publish_collection(artifact_path, galaxy_server, True, 0) assert mock_publish.call_count == 1 assert mock_publish.mock_calls[0][1][0] == artifact_path assert moc
#!/usr/bin/env python # Tag Highlighter: # Author: A. S. Budden <abudden _at_ gmail _dot_ com> # Copyright: Copyright (C) 2009-2011 A. S. Budden # Permission is hereby granted to use and distribute this code, # with or without modifications, provided that this copyright # notice is copied with it. Like anything else that's free, # the TagHighlight plugin is provided *as is* and comes with no # warranty of any kind, either expressed or implied. By using # this plugin, you agree that in no event will the copyright # holder be liable for any damages resulting from the use # of this software. # --------------------------------------------------------------------- from __future__ import print_function import time import re # Used for timing a function; from http://www.daniweb.com/code/snippet368.html # decora
tor: put @print_timing before a function to time it. def print_timing(func): def wrapper(*arg): t1 = time.time() res = func(*arg)
t2 = time.time() print('{name} took {time:0.3f} ms'.format(name=func.__name__, time=(t2-t1)*1000.0)) return res return wrapper class TagHighlightOptionDict(dict): """Customised version of a dictionary that allows access by attribute.""" def __getattr__(self, name): return self[name] def __getitem__(self, name): if name not in self: from .options import AllOptions for option in AllOptions.keys(): if option == name: return AllOptions[option]['Default'] return super(TagHighlightOptionDict, self).__getitem__(name) def __setattr__(self, name, value): self[name] = value class SetDict(dict): """Customised version of a dictionary that auto-creates non-existent keys as sets.""" def __getitem__(self, key): if key not in self: self[key] = set() return super(SetDict, self).__getitem__(key) def __setitem__(self, key, value): if isinstance(value, set): super(SetDict, self).__setitem__(key, value) else: super(SetDict, self).__setitem__(key, set([value])) class DictDict(dict): """Customised version of a dictionary that auto-creates non-existent keys as SetDicts.""" def __getitem__(self, key): if key not in self: self[key] = SetDict() return super(DictDict, self).__getitem__(key) def __setitem__(self, key, value): if isinstance(value, SetDict): super(DictDict, self).__setitem__(key, value) else: raise NotImplementedError def GenerateValidKeywordRange(iskeyword): # Generally obeys Vim's iskeyword setting, but # only allows characters in ascii range ValidKeywordSets = iskeyword.split(',') rangeMatcher = re.compile('^(?P<from>(?:\d+|\S))-(?P<to>(?:\d+|\S))$') falseRangeMatcher = re.compile('^^(?P<from>(?:\d+|\S))-(?P<to>(?:\d+|\S))$') validList = [] for valid in ValidKeywordSets: m = rangeMatcher.match(valid) fm = falseRangeMatcher.match(valid) if valid == '@': for ch in [chr(i) for i in range(0,128)]: if ch.isalpha(): validList.append(ch) elif m is not None: # We have a range of ascii values if m.group('from').isdigit(): rangeFrom = int(m.group('from')) else: rangeFrom = ord(m.group('from')) if m.group('to').isdigit(): rangeTo = int(m.group('to')) else: rangeTo = ord(m.group('to')) validRange = list(range(rangeFrom, rangeTo+1)) # Restrict to ASCII validRange = [i for i in validRange if i < 128] for ch in [chr(i) for i in validRange]: validList.append(ch) elif fm is not None: # We have a range of ascii values: remove them! if fm.group('from').isdigit(): rangeFrom = int(fm.group('from')) else: rangeFrom = ord(fm.group('from')) if fm.group('to').isdigit(): rangeTo = int(fm.group('to')) else: rangeTo = ord(fm.group('to')) validRange = range(rangeFrom, rangeTo+1) for ch in [chr(i) for i in validRange]: for i in range(validList.count(ch)): validList.remove(ch) elif len(valid) == 1: # Just a char if ord(valid) < 128: validList.append(valid) else: raise ValueError('Unrecognised iskeyword part: ' + valid) return validList def IsValidKeyword(keyword, iskeyword): for char in keyword: if not char in iskeyword: return False return True if __name__ == "__main__": import pprint test_obj = SetDict() # Should be able to add an item to the list pprint.pprint(test_obj) test_obj['MyIndex'].add('Hello') test_obj['SetList'] = ['This', 'Is', 'A', 'List'] test_obj['SetString'] = 'This is a string' # These should all be lists: pprint.pprint(test_obj)
sser General Public License version 2 as published by the # Free Software Foundation; # # openWNS is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import openwns import openwns.logger from openwns.pyconfig import attrsetter import openwns.interface class NeedsFilename(openwns.interface.Interface): @openwns.interface.abstractmethod def setFilename(self, filename): pass class MeasurementSource(object): def __init
__(self): object.__init__(self) self.observers = [] def addObserver(self, probeBus): self.observers.append(probeBus) return probeBus class ProbeBus(MeasurementSource): def __init__(self): MeasurementSource.__init__(self) def observe(self, probeBus): probeBus.addObserver(self) return probeBus class ProbeBusRegistry(object): def __init__(self): super(ProbeBusRegistry, self).__init__() self.measurementSource
s = {} self.logger = openwns.logger.Logger("WNS", "ProbeBusRegistry", True) def getMeasurementSource(self, probeBusID): if not self.measurementSources.has_key(probeBusID): self.measurementSources[probeBusID] = MeasurementSource() return self.measurementSources[probeBusID] def removeMeasurementSource(self, probeBusID): self.measurementSources.pop(probeBusID) def getMeasurementSources(self): return self.measurementSources class PassThroughProbeBus(ProbeBus): """ The PassThroughProbeBus always accepts and always forwards. """ nameInFactory = "PassThroughProbeBus" def __init__(self): ProbeBus.__init__(self) class SettlingTimeGuardProbeBus(ProbeBus): """ The SettlingTimeGuardProbeBus only accepts if the global settling time (transient phase) has elapsed""" nameInFactory = "SettlingTimeGuardProbeBus" def __init__(self, settlingTime): ProbeBus.__init__(self) self.settlingTime = settlingTime class LoggingProbeBus(ProbeBus): """ The LoggingProbeBus always accepts and logs the message to the logging subsystem. """ nameInFactory = "LoggingProbeBus" def __init__(self, probeName='', parentLogger=None): ProbeBus.__init__(self) if len(probeName) > 0: probeName = '.' + probeName self.logger = openwns.logger.Logger("WNS", "LoggingProbeBus"+probeName, True, parentLogger) class PythonProbeBus(ProbeBus): """ Use the PythonProbeBus to do all your probing work in python. Specify what to do in accepts, onMeasurement, output from within your configuration file.""" nameInFactory = "PythonProbeBus" def _dummyOnMeasurement(timestamp, value, reg): pass def _dummyOutput(): pass def __init__(self, acceptsFunction, onMeasurementFunction = _dummyOnMeasurement, outputFunction = _dummyOutput): ProbeBus.__init__(self) self.accepts = acceptsFunction self.onMeasurement = onMeasurementFunction self.output = outputFunction self.reportErrors = True class TimeWindowProbeBus(ProbeBus): """ Only accepts for a certain time window given by start and end time""" nameInFactory = "TimeWindowProbeBus" def __init__(self, start, end): ProbeBus.__init__(self) self.start = start self.end = end class TimeSeriesProbeBus(ProbeBus): """ The LogEval ProbeBus always accepts and logs the values into a file. """ nameInFactory = "TimeSeriesProbeBus" outputFilename = None format = None timePrecision = None valuePrecision = None name = None description = None contextKeys = None def __init__(self, outputFilename, format, timePrecision, valuePrecision, name, desc, contextKeys): ProbeBus.__init__(self) self.outputFilename = outputFilename self.format = format self.timePrecision = timePrecision self.valuePrecision = valuePrecision self.name = name self.description = desc self.contextKeys = contextKeys class ContextFilterProbeBus(ProbeBus): nameInFactory = "ContextFilterProbeBus" idName = None idValues = None def __init__(self, _idName, _idValues, _outputName = None): ProbeBus.__init__(self) self.idName = _idName self.idValues = _idValues class ConstantContextProvider(object): __plugin__ = "wns.ProbeBus.ConstantContextProvider" """ Name in the static factory """ key = None """ The name of the context """ value = None """ A constant integer value """ def __init__(self, key, value): super(ConstantContextProvider, self).__init__() self.key = key self.value = value class StatEvalProbeBus(ProbeBus): nameInFactory = "StatEvalProbeBus" statEval = None appendFlag = None def __init__(self, outputFilename, statEvalConfig): ProbeBus.__init__(self) self.outputFilename = outputFilename self.statEval = statEvalConfig if (statEvalConfig.appendFlag == None): self.appendFlag = False else: self.appendFlag = statEvalConfig.appendFlag class TabPar: """ Helper Class to configure the TableProbeBus. Configure one of these for each dimension of your table. Parameters: idName: the name in the IDregistry/Context under which the value for this axis should be searched minimum: min value of the axis maximum: max value of the axis resolution: number of equidistant intervals into which the range from min to max will be divided. Note that the maximum value will be counted into the last interval """ idName = None minimum = None maximum = None resolution = None def __init__(self, idName, minimum, maximum, resolution): self.idName = idName self.minimum = minimum self.maximum = maximum self.resolution = resolution class TableProbeBus(ProbeBus): """ The TableProbeBus consumes measurement values and sorts them into n-dimensional tables of statistical evaluation objects. Parameters: axisParams: list of TabPar objecst, one for each dimension of the desired table outputFilename: base name of the output files produced by the TableProbeBus evals: list of strings with the requested statistics, possible values are: 'mean', 'variance', 'relativeVariance', 'coeffOfVariation', 'M2', 'M3', 'Z3', 'skewness', 'deviation', 'relativeDeviation', 'trials', 'min', 'max' formats: list of strings with the requested output formats, possible values are: 'HumanReadable', 'PythonReadable', 'MatlabReadable', 'MatlabReadableSparse' """ nameInFactory = "TableProbeBus" axisParams = None outputFilename = None evals = None formats = None def __init__(self, axisParams, outputFilename, evals = ['mean'], formats = ['HumanReadable']): ProbeBus.__init__(self) self.axisParams = axisParams self.outputFilename = outputFilename self.evals = list(set(evals)) # filter out potential duplicates self.formats = list(set(formats)) # filter out potential duplicates class TextProbeBus(ProbeBus): """ Wrapper for a ProbeText StatEval """ nameInFactory = "TextProbeBus" k
Also the histogram. If 2 it also plots the linear correlation between the median and the std_dev. full_output : {False,True}, optional If true it returns mean, std_dev, median, if false just the mean. Returns ------- If full_out is true it returns the sum, mean, std_dev, median. If false only the mean. """ if arr.ndim == 2: if xy is not None: x, y = xy
circle = get_circle(arr, radius, output_values=True, cy=y, cx=x) else: circle = get_circle(arr, radius, output_values=True) maxi = circle.max() mean = circle.mean() std_dev = circle.std
() median = np.median(circle) if arr.ndim == 3: n = arr.shape[0] mean = np.empty(n) std_dev = np.empty(n) median = np.empty(n) maxi = np.empty(n) values_circle = [] for i in range(n): if xy is not None: x, y = xy circle = get_circle(arr[i], radius, output_values=True, cy=y, cx=x) else: circle = get_circle(arr[i], radius, output_values=True) values_circle.append(circle) maxi[i] = circle.max() mean[i] = circle.mean() std_dev[i] = circle.std() median[i] = np.median(circle) values_circle = np.array(values_circle).flatten() if plot==1 or plot==2: plt.figure('Image crop (first slice)', figsize=(12,3)) if xy is not None: x, y = xy temp = get_circle(arr[0], radius, cy=y, cx=x) else: temp = get_circle(arr[0], radius) ax1 = plt.subplot(1, 2, 1) ax1.imshow(arr[0], origin = 'lower', interpolation="nearest", cmap = plt.get_cmap('gray'), alpha=0.8) ax1.imshow(temp, origin = 'lower', interpolation="nearest", cmap = plt.get_cmap('CMRmap'), alpha=0.6) plt.axis('on') ax2 = plt.subplot(1, 2, 2) ax2.hist(values_circle, bins=int(np.sqrt(values_circle.shape[0])), alpha=0.5, histtype='stepfilled', label='Histogram') ax2.legend() ax2.tick_params(axis='x', labelsize=8) fig = plt.figure('Stats in annulus', figsize=(12, 6)) fig.subplots_adjust(hspace=0.15) ax1 = plt.subplot(3, 1, 1) std_of_means = np.std(mean) median_of_means = np.median(mean) lab = 'mean (median={:.1f}, stddev={:.1f})'.format(median_of_means, std_of_means) ax1.axhline(median_of_means, alpha=0.5, color='gray', lw=2, ls='--') ax1.plot(mean, '.-', label=lab, lw = 0.8, alpha=0.6, marker='o', color='b') ax1.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax1.grid(True) plt.setp(ax1.get_xticklabels(), visible=False) ax2 = plt.subplot(3, 1, 2, sharex=ax1) ax2.plot(std_dev, '.-', label='std_dev', lw = 0.8, alpha=0.6, marker='o', color='r') ax2.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax2.grid(True) plt.setp(ax2.get_xticklabels(), visible=False) ax3 = plt.subplot(3, 1, 3, sharex=ax1) ax3.plot(maxi, '.-', label='max', lw=0.8, alpha=0.6, marker='o', color='g') ax3.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax3.grid(True) if plot==2: plt.figure('Std_dev - mean in annulus', figsize=(4, 4)) plt.scatter(std_dev, mean, alpha=0.6) m, b = np.polyfit(std_dev, mean, 1) corr, _ = scipy.stats.pearsonr(mean, std_dev) plt.plot(std_dev, m*std_dev + b, '-', label=corr, alpha=0.6) plt.xlabel('Mean') plt.ylabel('Standard deviation') plt.legend() if full_output: return mean, std_dev, median, maxi else: return mean def cube_stats_annulus(array, inner_radius, size, plot=None, full_out=False): """Calculates statistics in a centered annulus on a 2D or 3D array and plots the variation of the mean, median and standard deviation as a functions of time. Parameters ---------- array : array_like Input array. inner_radius : int Annulus inner radius. size : int How many pixels in radial direction contains the annulus. plot : None,1,2, optional If 1 or True it plots the mean, std_dev and max. Also the histogram. If 2 it also plots the linear correlation between the median and the std_dev. full_out : {False,True}, optional If true it returns mean, std_dev, median, if false just the mean. Returns ------- If full_out is true it returns mean, std_dev, median, if false only the mean. """ if array.ndim==2: arr = array.copy() annulus = get_annulus(arr, inner_radius, size, output_values=True) mean = annulus.mean() std_dev = annulus.std() median = np.median(annulus) maxi = annulus.max() if array.ndim==3: n = array.shape[0] mean = np.empty(n) std_dev = np.empty(n) median = np.empty(n) maxi = np.empty(n) for i in range(n): arr = array[i].copy() annulus = get_annulus(arr, inner_radius, size, output_values=True) mean[i] = annulus.mean() std_dev[i] = annulus.std() median[i] = np.median(annulus) maxi[i] = annulus.max() if plot==1 or plot==2: plt.figure('Image crop (first slice)', figsize=(12,3)) temp = get_annulus_cube(array, inner_radius, size) ax1 = plt.subplot(1, 2, 1) ax1.imshow(array[0], origin = 'lower', interpolation="nearest", cmap = plt.get_cmap('gray'), alpha=0.8) ax1.imshow(temp[0], origin = 'lower', interpolation="nearest", cmap = plt.get_cmap('CMRmap'), alpha=0.6) plt.axis('on') ax2 = plt.subplot(1, 2, 2) values = temp[np.where(temp>0)] ax2.hist(values.ravel(), bins=int(np.sqrt(values.shape[0])), alpha=0.5, histtype='stepfilled', label='Histogram') ax2.legend() ax2.tick_params(axis='x', labelsize=8) fig = plt.figure('Stats in annulus', figsize=(12, 6)) fig.subplots_adjust(hspace=0.15) ax1 = plt.subplot(3, 1, 1) std_of_means = np.std(mean) median_of_means = np.median(mean) lab = 'mean (median={:.1f}, stddev={:.1f})'.format(median_of_means, std_of_means) ax1.axhline(median_of_means, alpha=0.5, color='gray', lw=2, ls='--') ax1.plot(mean, '.-', label=lab, lw = 0.8, alpha=0.6, marker='o', color='b') ax1.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax1.grid(True) plt.setp(ax1.get_xticklabels(), visible=False) ax2 = plt.subplot(3, 1, 2, sharex=ax1) ax2.plot(std_dev, '.-', label='std_dev', lw = 0.8, alpha=0.6, marker='o', color='r') ax2.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax2.grid(True) plt.setp(ax2.get_xticklabels(), visible=False) ax3 = plt.subplot(3, 1, 3, sharex=ax1) ax3.plot(maxi, '.-', label='max', lw=0.8, alpha=0.6, marker='o', color='g') ax3.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5) ax3.grid(True)
#!/usr/bin/env python from requests.exceptions import HTTPError from datetime import datetime import ystockquote from m2
x.client import M2XClient def post_stock_price(symbol, apikey, devicename): ''' Retrieve the stock price for the given ticker symbol ("T" for AT&T) and post it in the correct M2X data stream. ''' client = M2XClient(key=apikey) # Find the correct device if it exists, if not create it. try: device = [d for d in client.devices(q=devicename) if d.name == devicename][0] except IndexError: device = client.crea
te_device(name=devicename, description="Stockreport Example Device", visibility="private") # Get the stream if it exists, if not create the stream. try: stream = device.stream(symbol) except HTTPError: stream = device.create_stream(symbol) device.update_stream(symbol, unit={'label': 'Dollars', 'symbol': '$'}) postime = datetime.now() stock_price = ystockquote.get_price(symbol).encode('utf-8') stream.add_value(stock_price, postime)
IsSo = text_type("".join(perluniprops.chars("IsSo"))) IsAlpha = text_type( "".join(perluniprops.chars("IsAlpha")) + "".join(VIRAMAS) + "".join(NUKTAS) ) IsLower = text_type("".join(perluniprops.chars("IsLower"))) # Remove ASCII junk. DEDUPLICATE_SPACE = r"\s+", r" " ASCII_JUNK = r"[\000-\037]", r"" # Neurotic Perl heading space, multi-space and trailing space chomp. # These regexes are kept for reference purposes and shouldn't be used!! MID_STRIP = r" +", r" " # Use DEDUPLICATE_SPACE instead. LEFT_STRIP = r"^ ", r"" # Uses text.lstrip() instead. RIGHT_STRIP = r" $", r"" # Uses text.rstrip() instead. # Pad all "other" special characters not in IsAlnum. PAD_NOT_ISALNUM = r"([^{}\s\.'\`\,\-])".format(IsAlnum), r" \1 " # Splits all hyphens (regardless of circumstances), e.g. # 'foo-bar' -> 'foo @-@ bar' AGGRESSIVE_HYPHEN_SPLIT = ( r"([{alphanum}])\-(?=[{alphanum}])".format(alphanum=IsAlnum), r"\1 @-@ ", ) # Make multi-dots stay together. REPLACE_DOT_WITH_LITERALSTRING_1 = r"\.([\.]+)", " DOTMULTI\1" REPLACE_DOT_WITH_LITERALSTRING_2 = r"DOTMULTI\.([^\.])", "DOTDOTMULTI \1" REPLACE_DOT_WITH_LITERALSTRING_3 = r"DOTMULTI\.", "DOTDOTMULTI" # Separate out "," except if within numbers (5,300) # e.g. A,B,C,D,E > A , B,C , D,E # First application uses up B so rule can't see B,C # two-step version here may create extra spaces but these are removed later # will also space digit,letter or letter,digit forms (redundant with next section) COMMA_SEPARATE_1 = r"([^{}])[,]".format(IsN), r"\1 , " COMMA_SEPARATE_2 = r"[,]([^{}])".format(IsN), r" , \1" COMMA_SEPARATE_3 = r"([{}])[,]$".format(IsN), r"\1 , " # Attempt to get correct directional quotes. DIRECTIONAL_QUOTE_1 = r"^``", r"`` " DIRECTIONAL_QUOTE_2 = r'^"', r"`` " DIRECTIONAL_QUOTE_3 = r"^`([^`])", r"` \1" DIRECTIONAL_QUOTE_4 = r"^'", r"` " DIRECTIONAL_QUOTE_5 = r'([ ([{<])"', r"\1 `` " DIRECTIONAL_QUOTE_6 = r"([ ([{<])``", r"\1 `` " DIRECTIONAL_QUOTE_7 = r"([ ([{<])`([^`])", r"\1 ` \2" DIRECTIONAL_QUOTE_8 = r"([ ([{<])'", r"\1 ` " # Replace ... with _ELLIPSIS_ REPLACE_ELLIPSIS = r"\.\.\.", r" _ELLIPSIS_ " # Restore _ELLIPSIS_ with ... RESTORE_ELLIPSIS = r"_ELLIPSIS_", r"\.\.\." # Pad , with tailing space except if within numbers, e.g. 5,300 COMMA_1 = r"([^{numbers}])[,]([^{numbers}])".format(numbers=IsN), r"\1 , \2" COMMA_2 = r"([{numbers}])[,]([^{numbers}])".format(numbers=IsN), r"\1 , \2" COMMA_3 = r"([^{numbers}])[,]([{numbers}])".format(numbers=IsN), r"\1 , \2" # Pad unicode symbols with spaces. SYMBOLS = r"([;:@#\$%&{}{}])".format(IsSc, IsSo), r" \1 " # Separate out intra-token slashes. PTB tokenization doesn't do this, so # the tokens should be merged prior to parsing with a PTB-trained parser. # e.g. "and/or" -> "and @/@ or" INTRATOKEN_SLASHES = ( r"([{alphanum}])\/([{alphanum}])".format(alphanum=IsAlnum), r"$1 \@\/\@ $2", ) # Splits final period at end of string. FINAL_PERIOD = r"""([^.])([.])([\]\)}>"']*) ?$""", r"\1 \2\3" # Pad all question marks and exclamation marks with spaces. PAD_QUESTION_EXCLAMATION_MARK = r"([?!])", r" \1 " # Handles parentheses, brackets and converts them to PTB symbols. PAD_PARENTHESIS = r"([\]\[\(\){}<>])", r" \1 " CONVERT_PARENTHESIS_1 = r"\(", "-LRB-" CONVERT_PARENTHESIS_2 = r"\)", "-RRB-" CONVERT_PARENTHESIS_3 = r"\[", "-LSB-" CONVERT_PARENTHESIS_4 = r"\]", "-RSB-" CONVERT_PARENTHESIS_5 = r"\{", "-LCB-" CONVERT_PARENTHESIS_6 = r"\}", "-RCB-" # Pads double dashes with spaces. PAD_DOUBLE_DASHES = r"--", " -- " # Adds spaces to start and end of string to simplify further regexps. PAD_START_OF_STR = r"^", " " PAD_END_OF_STR = r"$", " " # Converts double quotes to two single quotes and pad with spaces. CONVERT_DOUBLE_TO_SINGLE_QUOTES = r'"', " '' " # Handles single quote in possessives or close-single-quote. HANDLES_SINGLE_QUOTES = r"([^'])' ", r"\1 ' " # Pad apostrophe in possessive or close-single-quote. APOSTROPHE = r"([^'])'", r"\1 ' " # Prepend space on contraction apostrophe. CONTRACTION_1 = r"'([sSmMdD]) ", r" '\1 " CONTRACTION_2 = r"'ll ", r" 'll " CONTRACTION_3 = r"'re ", r" 're " CONTRACTION_4 = r"'ve ", r" 've " CONTRACTION_5 = r"n't ", r" n't " CONTRACTION_6 = r"'LL ", r" 'LL " CONTRACTION_7 = r"'RE ", r" 'RE " CONTRACTION_8 = r"'VE ", r" 'VE " CONTRACTION_9 = r"N'T ", r" N'T " # Informal Contractions. CONTRACTION_10 = r" ([Cc])annot ", r" \1an not " CONTRACTION_11 = r" ([Dd])'ye ", r" \1' ye " CONTRACTION_12 = r" ([Gg])imme ", r" \1im me " CONTRACTION_13 = r" ([Gg])onna ", r" \1on na " CONTRACTION_14 = r" ([Gg])otta ", r" \1ot ta " CONTRACTION_15 = r" ([Ll])emme ", r" \1em me " CONTRACTION_16 = r" ([Mm])ore'n ", r" \1ore 'n "
CONTRACTION_17 = r" '([Tt])is ", r" '\1 is " CONTRACTION_18 = r" '([Tt])was ", r" '\1 was " CONTRACTION_19 = r" ([Ww])anna ", r" \1an na " # Clean out extra spaces CLEAN_EXTRA_SPACE_1 = r" *", r" " CLEAN_EXTRA_SPACE_2 = r"^ *", r"" CLEAN_EXTRA_SPACE_3 = r" *$", r"" # Neurotic Perl regexes to escape special characters. ESCAPE_AMPERSAND = r"&"
, r"&amp;" ESCAPE_PIPE = r"\|", r"&#124;" ESCAPE_LEFT_ANGLE_BRACKET = r"<", r"&lt;" ESCAPE_RIGHT_ANGLE_BRACKET = r">", r"&gt;" ESCAPE_SINGLE_QUOTE = r"\'", r"&apos;" ESCAPE_DOUBLE_QUOTE = r"\"", r"&quot;" ESCAPE_LEFT_SQUARE_BRACKET = r"\[", r"&#91;" ESCAPE_RIGHT_SQUARE_BRACKET = r"]", r"&#93;" EN_SPECIFIC_1 = r"([^{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2" EN_SPECIFIC_2 = ( r"([^{alpha}{isn}])[']([{alpha}])".format(alpha=IsAlpha, isn=IsN), r"\1 ' \2", ) EN_SPECIFIC_3 = r"([{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2" EN_SPECIFIC_4 = r"([{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1 '\2" EN_SPECIFIC_5 = r"([{isn}])[']([s])".format(isn=IsN), r"\1 '\2" ENGLISH_SPECIFIC_APOSTROPHE = [ EN_SPECIFIC_1, EN_SPECIFIC_2, EN_SPECIFIC_3, EN_SPECIFIC_4, EN_SPECIFIC_5, ] FR_IT_SPECIFIC_1 = r"([^{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2" FR_IT_SPECIFIC_2 = r"([^{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1 ' \2" FR_IT_SPECIFIC_3 = r"([{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2" FR_IT_SPECIFIC_4 = r"([{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1' \2" FR_IT_SPECIFIC_APOSTROPHE = [ FR_IT_SPECIFIC_1, FR_IT_SPECIFIC_2, FR_IT_SPECIFIC_3, FR_IT_SPECIFIC_4, ] NON_SPECIFIC_APOSTROPHE = r"\'", " ' " TRAILING_DOT_APOSTROPHE = r"\.' ?$", " . ' " BASIC_PROTECTED_PATTERN_1 = r"<\/?\S+\/?>" BASIC_PROTECTED_PATTERN_2 = r'<\S+( [a-zA-Z0-9]+\="?[^"]")+ ?\/?>' BASIC_PROTECTED_PATTERN_3 = r"<\S+( [a-zA-Z0-9]+\='?[^']')+ ?\/?>" BASIC_PROTECTED_PATTERN_4 = r"[\w\-\_\.]+\@([\w\-\_]+\.)+[a-zA-Z]{2,}" BASIC_PROTECTED_PATTERN_5 = r"(http[s]?|ftp):\/\/[^:\/\s]+(\/\w+)*\/[\w\-\.]+" MOSES_PENN_REGEXES_1 = [ DEDUPLICATE_SPACE, ASCII_JUNK, DIRECTIONAL_QUOTE_1, DIRECTIONAL_QUOTE_2, DIRECTIONAL_QUOTE_3, DIRECTIONAL_QUOTE_4, DIRECTIONAL_QUOTE_5, DIRECTIONAL_QUOTE_6, DIRECTIONAL_QUOTE_7, DIRECTIONAL_QUOTE_8, REPLACE_ELLIPSIS, COMMA_1, COMMA_2, COMMA_3, SYMBOLS, INTRATOKEN_SLASHES, FINAL_PERIOD, PAD_QUESTION_EXCLAMATION_MARK, PAD_PARENTHESIS, CONVERT_PARENTHESIS_1, CONVERT_PARENTHESIS_2, CONVERT_PARENTHESIS_3, CONVERT_PARENTHESIS_4, CONVERT_PARENTHESIS_5, CONVERT_PARENTHESIS_6, PAD_DOUBLE_DASHES, PAD_START_OF_STR, PAD_END_OF_STR, CONVERT_DOUBLE_TO_SINGLE_QUOTES, HANDLES_SINGLE_QUOTES, APOSTROPHE, CONTRACTION_1, C
import matplotlib.pyplot as plt import tensorflow as tf from softmax_del import Softmax from lib import mytool ''' gildong = MnistClassifier() gildong.learn(3, 100) # epoch, partial_size gildong.evaluate() # for all test data gildong.classify_random_image() # classify a randomly selected image #gildong.show_errors() ''' class MnistClassifier (Softmax): db = None learning_epoch = None #15 size_of_segment = None #100 def load_mnist(self): return mytool.load_mnist() def learn(self, epoch, partial): self.learning_epoch = epoch self.size_of_segment = partial self.db = self.load_mnist() super().learn(self.db, self.learning_epoch, self.size_of_segment) def get_number_of_segment(self): return int(self.db.train.num_examples / self.size_of_segment) #55,000 /
100 def get_next_segment(self): return self.db.train.next_batch(self.size_of_segment) def get_image(self, index): # Get one and predict image = self.db.test.images[index:
index+1] return image def get_label(self, index): label = self.db.test.labels[index:index+1] return label def get_class(self, index): label = self.db.test.labels[index:index+1] return self.sess.run(tf.arg_max(label, 1)) def classify(self, an_image): category = self.sess.run(tf.argmax(self.hypothesis, 1), feed_dict={self.X: an_image}) return category def classify_random_image(self): index = mytool.get_random_int(self.db.test.num_examples) image = self.get_image(index) label = self.get_class(index) category = self.classify(image) print('Label', label) print('Classified', category) self.show_image(image) def show_image(self, image): plt.imshow(image.reshape(28, 28), cmap='Greys', interpolation='nearest') plt.show() # 테스트 데이터로 평가 def evaluate(self): # Test model is_correct = tf.equal(tf.arg_max(self.hypothesis, 1), tf.arg_max(self.Y, 1)) # Calculate accuracy accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32)) # Test the model using test sets result = accuracy.eval(session=self.sess, feed_dict={self.X: self.db.test.images, self.Y: self.db.test.labels}) #result = self.sess.run(accuracy, feed_dict={self.X: db.test.images, self.Y: db.test.labels}) print("Recognition rate :", result)
# -*- coding: utf-8 -*- # tools, 5 juin 2017 import string import math import random import argparse from argparse import RawTextHelpFormatter CONSO_MAJ = u"C" CONSO_MIN = u"c" VOWEL_MAJ = u"V" VOWEL_MIN = u"v" LETTER_MAJ = u"L" LETTER_MIN = u"l" NUMBER = u"n" PONCTION = u"P" PONCTION_SMA = u"p" VALID_CHARSET = { CONSO_MAJ: u"BCDFGHJKLMNPQRSTVWXZ", CONSO_MIN: u"BCDFGHJKLMNPQRSTVWXZ".lower(), VOWEL_MAJ: u"AEIOUY", VOWEL_MIN: u"AEIOUY".lower(), LETTER_MAJ: string.ascii_uppercase, LETTER_MIN: string.ascii_lowercase, PONCTION: string.punctuation, PONCTION_SMA: u"!\"#$%&'*+,-.:;=?@_", NUMBER: string.digits, } class RandomPassword(): def _pattern_validation(self, pattern): for char in pattern: if char not in VALID_CHARSET.keys(): raise LookupError() def _get_charset_len(self, pattern): charset_len = 0 if CONSO_MIN in pattern and LETTER_MIN not in pattern: charset_len += len(VALID_CHARSET[CONSO_MIN]) if CONSO_MAJ in pattern and LETTER_MAJ not in pattern: charset_len += len(VALID_CHARSET[CONSO_MAJ]) if VOWEL_MIN in pattern and LETTER_MIN not in pattern: charset_len += len(VALID_CHARSET[VOWEL_MIN]) if VOWEL_MAJ in pattern and LETTER_MAJ not in pattern:
charset_len += len(VALID_CHARSET[VOWEL_MAJ]) if LETTER_MIN in pattern: charset_len += len(VALID_CHARSET[LETTER_MIN]) if LETTER_MAJ in pattern: charset_len += len(VALID_CHARSET[LETTER_MAJ]) if NUMBER in pattern: charset_len += len(VALID_CHARSET[NUMBER])
if PONCTION in pattern: charset_len += len(VALID_CHARSET[PONCTION]) if PONCTION_SMA in pattern: charset_len += len(VALID_CHARSET[PONCTION_SMA]) return charset_len def _get_password(self, pattern): try: self._pattern_validation(pattern) except LookupError: print(u"Le format n'est pas valide!") password = u'' for char in pattern: for charset_key in VALID_CHARSET.keys(): if char == charset_key: password += random.SystemRandom().choice(VALID_CHARSET[charset_key]) return password def get_passwords(self, pattern, iterations): passwords = [] for n in range(0, iterations): passwords.append(self._get_password(pattern)) return passwords def get_entropy(self, pattern): try: self._pattern_validation(pattern) except LookupError: print(u"Le format n'est pas valide!") charset_len = self._get_charset_len(pattern) pattern_len = len(pattern) entropy = math.log(math.pow(charset_len, pattern_len), 2) return round(entropy) def write_list(self, filepath, liste): with open(filepath, u'w') as f: for entry in list: f.write(entry) parser = argparse.ArgumentParser(description=u"Génération de mots de passe aléatoires.", formatter_class=RawTextHelpFormatter) parser.add_argument(u'pattern', help=u"format du mot de passe: \n \ %s pour les CONSONNES \n \ %s pour les consonnes \n \ %s pour les VOYELLES \n \ %s pour les voyelles \n \ %s pour les LETTRES \n \ %s pour les lettres \n \ %s pour les nombres \n \ %s pour la ponctuation étendue \n \ %s pour la ponctuation" % (CONSO_MAJ, CONSO_MIN, VOWEL_MAJ, VOWEL_MIN, LETTER_MAJ, LETTER_MIN, NUMBER, PONCTION, PONCTION_SMA)) parser.add_argument(u'-n', u'--number', type=int, default=1, help=u"nombre de mdp à générer") parser.add_argument(u'-o', u'--output', type=argparse.FileType('w'), help=u"fichier de sortie") parser.add_argument(u'-e', u'--entropy', action=u"store_true", help=u"affiche l'entropie des mdp générés en bits") args = parser.parse_args() #[u'Cvcvnnnn', u'-en', u'35', u'-o', u'pass.txt'] rand_p = RandomPassword() # ENTROPY if(args.entropy): print(u"L'entropie est de %s bits." % rand_p.get_entropy(args.pattern)) # LIST PASSWORDS passwords = rand_p.get_passwords(args.pattern, iterations=args.number) print(u"\nListe des mots de passe:") for entry in passwords: print(u" - %s" % entry) # WRITE PASSWORDS if(args.output): with args.output as f: for entry in passwords: f.write(u'%s\n' % entry)
nless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys import unittest from pathlib import Path from typing import List, Dict, Union, Optional from ..components.modelbase import ModelBase class TestModel1(ModelBase): _serialized_names = { 'prop_1': 'prop1', 'prop_2': 'prop 2',
'prop_3': '@@', } def __init__(self, prop_0: str, prop_1: Optional[str] = None, prop_2: Union[int, str, bool] = '', prop_3: 'TestModel1' = None, prop_4: O
ptional[Dict[str, 'TestModel1']] = None, prop_5: Optional[Union['TestModel1', List['TestModel1']]] = None, ): #print(locals()) super().__init__(locals()) class StructureModelBaseTestCase(unittest.TestCase): def test_handle_type_check_for_simple_builtin(self): self.assertEqual(TestModel1(prop_0='value 0').prop_0, 'value 0') with self.assertRaises(TypeError): TestModel1(prop_0=1) with self.assertRaises(TypeError): TestModel1(prop_0=None) with self.assertRaises(TypeError): TestModel1(prop_0=TestModel1(prop_0='value 0')) def test_handle_type_check_for_optional_builtin(self): self.assertEqual(TestModel1(prop_0='', prop_1='value 1').prop_1, 'value 1') self.assertEqual(TestModel1(prop_0='', prop_1=None).prop_1, None) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_1=1) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_1=TestModel1(prop_0='', prop_1='value 1')) def test_handle_type_check_for_union_builtin(self): self.assertEqual(TestModel1(prop_0='', prop_2='value 2').prop_2, 'value 2') self.assertEqual(TestModel1(prop_0='', prop_2=22).prop_2, 22) self.assertEqual(TestModel1(prop_0='', prop_2=True).prop_2, True) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_2=None) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_2=22.22) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_2=TestModel1(prop_0='', prop_2='value 2')) def test_handle_type_check_for_class(self): val3 = TestModel1(prop_0='value 0') self.assertEqual(TestModel1(prop_0='', prop_3=val3).prop_3, val3) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_3=1) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_3='value 3') with self.assertRaises(TypeError): TestModel1(prop_0='', prop_3=[val3]) def test_handle_type_check_for_dict_class(self): val4 = TestModel1(prop_0='value 0') self.assertEqual(TestModel1(prop_0='', prop_4={'key 4': val4}).prop_4['key 4'], val4) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_4=1) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_4='value 4') with self.assertRaises(TypeError): TestModel1(prop_0='', prop_4=[val4]) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_4={42: val4}) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_4={'key 4': [val4]}) def test_handle_type_check_for_union_dict_class(self): val5 = TestModel1(prop_0='value 0') self.assertEqual(TestModel1(prop_0='', prop_5=val5).prop_5, val5) self.assertEqual(TestModel1(prop_0='', prop_5=[val5]).prop_5[0], val5) self.assertEqual(TestModel1(prop_0='', prop_5=None).prop_5, None) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_5=1) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_5='value 5') with self.assertRaises(TypeError): TestModel1(prop_0='', prop_5={'key 5': 'value 5'}) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_5={42: val5}) with self.assertRaises(TypeError): TestModel1(prop_0='', prop_5={'key 5': [val5]}) def test_handle_from_to_dict_for_simple_builtin(self): struct0 = {'prop_0': 'value 0'} obj0 = TestModel1.from_dict(struct0) self.assertEqual(obj0.prop_0, 'value 0') self.assertDictEqual(obj0.to_dict(), struct0) with self.assertRaises(AttributeError): #TypeError: TestModel1.from_dict(None) with self.assertRaises(AttributeError): #TypeError: TestModel1.from_dict('') with self.assertRaises(TypeError): TestModel1.from_dict({}) with self.assertRaises(TypeError): TestModel1.from_dict({'prop0': 'value 0'}) def test_handle_from_to_dict_for_optional_builtin(self): struct11 = {'prop_0': '', 'prop1': 'value 1'} obj11 = TestModel1.from_dict(struct11) self.assertEqual(obj11.prop_1, struct11['prop1']) self.assertDictEqual(obj11.to_dict(), struct11) struct12 = {'prop_0': '', 'prop1': None} obj12 = TestModel1.from_dict(struct12) self.assertEqual(obj12.prop_1, None) self.assertDictEqual(obj12.to_dict(), {'prop_0': ''}) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': '', 'prop 1': ''}) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': '', 'prop1': 1}) def test_handle_from_to_dict_for_union_builtin(self): struct21 = {'prop_0': '', 'prop 2': 'value 2'} obj21 = TestModel1.from_dict(struct21) self.assertEqual(obj21.prop_2, struct21['prop 2']) self.assertDictEqual(obj21.to_dict(), struct21) struct22 = {'prop_0': '', 'prop 2': 22} obj22 = TestModel1.from_dict(struct22) self.assertEqual(obj22.prop_2, struct22['prop 2']) self.assertDictEqual(obj22.to_dict(), struct22) struct23 = {'prop_0': '', 'prop 2': True} obj23 = TestModel1.from_dict(struct23) self.assertEqual(obj23.prop_2, struct23['prop 2']) self.assertDictEqual(obj23.to_dict(), struct23) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': 'ZZZ', 'prop 2': None}) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': '', 'prop 2': 22.22}) def test_handle_from_to_dict_for_class(self): val3 = TestModel1(prop_0='value 0') struct31 = {'prop_0': '', '@@': val3.to_dict()} #{'prop_0': '', '@@': TestModel1(prop_0='value 0')} is also valid for from_dict, but this cannot happen when parsing for real obj31 = TestModel1.from_dict(struct31) self.assertEqual(obj31.prop_3, val3) self.assertDictEqual(obj31.to_dict(), struct31) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': '', '@@': 'value 3'}) def test_handle_from_to_dict_for_dict_class(self): val4 = TestModel1(prop_0='value 0') struct41 = {'prop_0': '', 'prop_4': {'val 4': val4.to_dict()}} obj41 = TestModel1.from_dict(struct41) self.assertEqual(obj41.prop_4['val 4'], val4) self.assertDictEqual(obj41.to_dict(), struct41) with self.assertRaises(TypeError): TestModel1.from_dict({'prop_0': '', 'prop_4': {44: val4.to_dict()}}) def test_handle_from_to_dict_for_union_dict_class(self): val5 = TestModel1(prop_0='value 0') struct51 = {'prop_0': '', 'prop_5': val5.to_dict()} obj51 = TestModel1.from_dict(struct51) self.assertEqual(obj51.prop_5, val5) self.assertDictEqual(obj51.to_dict(), struct51) struct52 = {'prop_0': '', 'prop_5': [val5.to_dict()]} obj52 = TestModel1.from_dict(struct52) self.assertListEqual(obj52.prop_5, [val5]) self.assertDictEqual(obj52.to_dict(), struct52) with self.asser
from django.con
f.urls import patt
erns, url urlpatterns = patterns('', url(r'^$', 'charts.views.charts', name='charts'), )
import plivo, plivoxml auth_id = "MAODU4MTK1MDC0NTBMMM" a
uth_token = "MWVkNWNlZWFlYjRmYmViNDBiZDAwNjA0NjA5OTQz" p = plivo.RestAPI(auth_id, auth_token) params = { 'to': '14153163136', # The phone numer to which the all has to be placed 'from' : '1111111111', # The phone number to be used as the caller id 'answer_url' : "http://morning-ocean-4669.herokuapp.com/speak/", # The URL invoked by Plivo when the outbound call is answered 'answer_method' : "GET", # The method used to call the answer_url # Example fo
r Asynchrnous request #'callback_url' : "http://morning-ocean-4669.herokuapp.com/callback/", # The URL notified by the API response is available and to which the response is sent. #'callback_method' : "GET" # The method used to notify the callback_url. } # Make an outbound call response = p.make_call(params) print str(response)
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin # admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'ssssss.views.home', name='home'), # url(r'^ssssss/', include('ssssss.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the adm
in: url(r'^admin/', include(admin.site.urls)), url(r'^$', 'conta
ctsync.views.home', name='home'), )
# coding=utf-8 #------------------------------------------------------------------------------# import os import time import fuse import errno from .item import RelFuseItem from .static_dir import StaticDirectory #------------------------------------------------------------------------------# class MountRoot(RelFuseItem): # -------------------------------------------------------------------------- def __init__(self): RelFuseItem.__init__(self) self._mount_time = time.time() self._relfs_dir = StaticDirectory() self._repos_backstage = self._relfs_dir.add("repos", StaticDirectory()) self._repos = dict() # -------------------------------------------------------------------------- def add_repo_root(self, name, item): self._repos[name] = item # -------------------------------------------------------------------------- def repos_backstage(self): return self._repos_backstage # -------------------------------------------------------------------------- def find_item(self, split_path): if not split_path or split_path == ["."]: return self if split_path[0] == ".relfs": return self._relfs_dir.find_item(split_path[1:]) try: repo = self._repos[split_path[0]] return repo.find_item(split_path[1:]) except KeyError: pass # -------------------------------------------------------------------------- def readdir(self, fh): yield ".." yield "." yield ".relfs" for name in self._repos: yield name # ------------------------------------------------------
-------------------- def _modify_time(self): return self._mount_time # -------------------------------------------------------------------------- def access(self, mode): if mode & os.X_OK: return 0 return RelFuseItem.access(self, mode) # -------------------------------------------------------------------------- def _get_mode(self): ret
urn 0o40550 #------------------------------------------------------------------------------#
import os def Dir_toStdName(path): if not (path[-1]=="/
" or path[-1] == "//"): path=path+"/" return path def Dir_getFiles(path): path=Dir_toStdName(path) allfiles=[] files=os.listdir(path) for f in files: abs_path = pat
h + f if os.path.isdir(abs_path): sub_files=Dir_getFiles(abs_path) sub_files=[ f+'/'+i for i in sub_files ] allfiles.extend(sub_files) else: allfiles.append(f) return allfiles class Dir: def __init__(self,dir_name): self.m_dir=Dir_toStdName(dir_name) def listDir(self): return os.listdir(self.m_dir) def listFiles(self): return Dir_getFiles(self.m_dir) if __name__ == "__main__": d=Dir("../../") print d.listFiles()
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-04 13:35 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('marketplace', '0011_auto_20170526_1215')
, ] operations = [ migrations.AlterField( model_name='externalpackage', name='project_page', field=models.URLField(help_text='URL to the project page of the package, for example to the PyPi location'), ), migrations.AlterField( model_name='packageresource', name='resource', field=models.TextField(help_text='Markdown allowed'), ), migrations.Al
terField( model_name='packageresource', name='url', field=models.URLField(blank=True, help_text='URL to resource (optional)', null=True), ), ]
# -*- coding: utf-8 -*- # © 2015 Grupo ESOC Ingeniería de Servicios, S.L.U. - Jairo Llopis # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from openerp import api, models class WebsiteMenu(models.Model): _inherit = "website.menu" @api.multi def get_parents(self, revert=False, include_self=False): """List current menu's parents. :param bool revert: Indicates if the result must be revert before returning. Activating this will mean that the result will be ordered from parent to child. :param bool include_self: Indicates if the current menu item must be included in the result. :return list:
Menu items ordered from child to parent, unless ``revert=True``. """ result = list()
menu = self if include_self else self.parent_id while menu: result.append(menu) menu = menu.parent_id return reversed(result) if revert else result
from examples import acquire_token_by_username_password from office365.graph_client import GraphClient client = GraphClient(acquire_token_by_username_password) groups = client.groups.get().top(1).exec
ute_query() for cur_grp in groups:
cur_grp.delete_object() client.execute_batch()
import logging import os.path import shutil import sys import tempfile import fiona def test_options(tmpdir=None): """Test that setting CPL_DEBUG=ON works""" if tmpdir is None: tempdir = tempfile.mkdtemp() logfile = os.path.join(tempdir, 'example.log') else: logfile = str(tmpdir.join('example.log')) logger = logging.getLogger('Fiona') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logfile) fh.setLevel(logging.DEBUG) logger.addHandler(fh) with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp") c.close() log = open(logfile).read() assert "Option CPL_DEBUG" in log if tempdir and tmpdir is None: shutil.rmtree(t
empdir)
# #START_LICENSE########################################################### # # # This file is part of the Environment for Tree Exploration program # (ETE). http://etetoolkit.org # # ETE is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ETE is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public # License for more details. # # You should have received a copy of the GNU General Public License # along with ETE. If not, see <http://www.gnu.org/licenses/>. # # # ABOUT THE ETE PACKAGE # ===================== # # ETE is distributed under the GPL copyleft license (2008-2015). # # If you make use of ETE in published work, please cite: # # Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon. # ETE: a python Environment for Tree Exploration. Jaime BMC # Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24 # # Note that extra references to the specific methods implemented in # the toolkit may be available in the documentation. # # More info at http://etetoolkit.org. Contact: huerta@embl.de # # # #END_LICENSE############################################################# # Note that the use of "from x import *" is safe here. Modules include # the __all__ variable. from warnings import warn try: import numpy except I
mportError, e: numpy = None #warn("Clustering module could not be loaded. Is numpy installed?") #warn(e) from ncbi_taxonomy import * from coretype.tree import * from coretype.seqgroup import * from phylo.phylotree import * from evol.evoltree import * from webplugin.webapp import * from phyloxml import Phyloxml, PhyloxmlTree from
nexml import Nexml, NexmlTree from evol import EvolTree from coretype.arraytable import * from clustering.clustertree import * try: from phylomedb.phylomeDB3 import * except ImportError, e: pass #warn("MySQLdb module could not be loaded") #warn(e) try: from treeview.svg_colors import * from treeview.main import * from treeview.faces import * from treeview import faces from treeview import layouts except ImportError, e: #print e pass #warn("Treeview module could not be loaded") #warn(e) try: from version import __version__, __installid__ except ImportError: __version__ = 'dev' __installid__ = None
INARY)" assert np.array_equal(lpf.hk[0].array,a1) assert np.array_equal(lpf.hk[1].array,a1) # bin read write test fname = os.path.join(out_dir, 'test.bin') u2d.write_bin((10, 10), fname, u2d.array) a3 = u2d.load_bin((10, 10), fname, u2d.dtype)[1] assert np.array_equal(a3, a1) # ascii read write test fname = os.path.join(out_dir, 'text.dat') u2d.write_txt((10, 10), fname, u2d.array) a4 = u2d.load_txt((10, 10), fname, u2d.dtype, "(FREE)") assert np.array_equal(a1, a4) # fixed format read/write with touching numbers - yuck! data = np.arange(100).reshape(10, 10) u2d_arange = Util2d(ml, (10, 10), np.float32, data, "test") u2d_arange.write_txt((10, 10), fname, u2d_arange.array, python_format=[7, "{0:10.4E}"]) a4a = u2d.load_txt((10, 10), fname, np.float32, "(7E10.6)") assert np.array_equal(u2d_arange.array, a4a) # test view vs copy with .array a5 = u2d.array a5 += 1 assert not np.array_equal(a5, u2d.array) # Util2d.__mul__() overload new_2d = u2d * 2 assert np.array_equal(new_2d.array, u2d.array * 2) # test the cnstnt application u2d.cnstnt = 2.0 a6 = u2d.array assert not np.array_equal(a1, a6) u2d.write_txt((10, 10), fname, u2d.array) a7 = u2d.load_txt((10, 10), fname, u2d.dtype, "(FREE)") assert np.array_equal(u2d.array, a7) return def stress_util2d(ml, nlay, nrow, ncol): dis = flopy.modflow.ModflowDis(ml, nlay=nlay, nrow=nrow, ncol=ncol) hk = np.ones((nlay, nrow, ncol)) vk = np.ones((nlay, nrow, ncol)) + 1.0 # save hk up one dir from model_ws fnames = [] for i, h in enumerate(hk): fname = os.path.join(out_dir, "test_{0}.ref".format(i)) fnames.append(fname) np.savetxt(fname, h, fmt="%15.6e", delimiter='') vk[i] = i + 1. lpf = flopy.modflow.ModflowLpf(ml, hk=fnames, vka=vk) # util2d binary check ml.lpf.vka[0].format.binary = True # util3d cnstnt propogation test ml.lpf.vka.cnstnt = 2.0 ml.write_input() # check that binary is being respect - it can't get no respect! vka_1 = ml.lpf.vka[0] a = vka_1.array vka_1_2 = vka_1 * 2.0 assert np.array_equal(a * 2.0,vka_1_2.array) if ml.external_path is not None: files = os.listdir(os.path.join(ml.model_ws, ml.external_path)) else: files = os.listdir(ml.model_ws) print("\n\nexternal files: " + ','.join(files) + '\n\n') ml1 = flopy.modflow.Modflow.load(ml.namefile, model_ws=ml.model_ws, verbose=True, forgive=False) print("testing load") assert ml1.load_fail == False # check that both binary and cnstnt are being respected through # out the write and load process. assert np.array_equal(ml1.lpf.vka.array, vk * 2.0) assert np.array_equal(ml1.lpf.vka.array, ml.lpf.vka.array) assert np.array_equal(ml1.lpf.hk.array, hk) assert np.array_equal(ml1.lpf.hk.array, ml.lpf.hk.array) print("change model_ws") ml.model_ws = out_dir ml.write_input() if ml.external_path is not None: files = os.listdir(os.path.join(ml.model_ws, ml.external_path)) else: files = os.listdir(ml.model_ws) print("\n\nexternal files: " + ','.join(files) + '\n\n') ml1 = flopy.modflow.Modflow.load(ml.namefile, model_ws=ml.model_ws, verbose=True, forgive=False) print("testing load") assert ml1.load_fail == False assert np.array_equal(ml1.lpf.vka.array, vk * 2.0) assert np.array_equal(ml1.lpf.hk.array, hk) # more binary testing ml.lpf.vka[0]._array[0,0] *= 3.0 ml.write_input() ml1 = flopy.modflow.Modflow.load(ml.namefile, model_ws=ml.model_ws, verbose=True, forgive=False) assert np.array_equal(ml.lpf.vka.array,ml1.lpf.vka.array) assert np.array_equal(ml.lpf.hk.array,ml1.lpf.hk.array) def stress_util2d_for_joe_the_file_king(ml, nlay, nrow, ncol): dis = flopy.modflow.ModflowDis(ml, nlay=nlay, nrow=nrow, ncol=ncol) hk = np.ones((nlay, nrow, ncol)) vk = np.ones((nlay, nrow, ncol)) + 1.0 # save hk up one dir from model_ws fnames = [] for i, h in enumerate(hk): fname = os.path.join("test_{0}.ref".format(i)) fnames.append(fname) np.savetxt(fname, h, fmt="%15.6e", delimiter='') vk[i] = i + 1. lpf = flopy.modflow.ModflowLpf(ml, hk=fnames, vka=vk) ml.lpf.vka[0].format.binary = True ml.lpf.vka.cnstnt = 2.0 ml.write_input() assert np.array_equal(ml.lpf.hk.array,hk) assert np.array_equal(ml.lpf.vka.array,vk * 2.0) ml1 = flopy.modflow.Modflow.load(ml.namefile, model_ws=ml.model_ws, verbose=True, forgive=False) print("testing load") assert ml1.load_fail == False assert np.array_equal(ml1.lpf.vka.array, vk * 2.0) assert np.array_equal(ml1.lpf.hk.array, hk) assert np.array_equal(ml1.lpf.vka.array, ml.lpf.vka.array) assert np.array_equal(ml1.lpf.hk.array, ml.lpf.hk.array) # more binary testing ml.lpf.vka[0]._array[0,0] *= 3.0 ml.write_input() ml1 = flopy.modflow.Modflow.load(ml.namefile, model_ws=ml.model_ws, verbose=True, forgive=False) assert np.array_equal(ml.lpf.vka.array,ml1.lpf.vka.array) assert np.array_equal(ml.lpf.hk.array,ml1.lpf.hk.array) def test_util2d_external_free(): model_ws = os.path.join(out_dir, "extra_temp") if os.path.exists(model_ws): shutil.rmtree(model_ws) os.mkdir(model_ws) ml = flopy.modflow.Modflow(model_ws=model_ws)
stress_util2d(ml, 1, 1, 1) stress_util2d(ml, 10, 1, 1) stress_util2d(ml, 1, 10, 1) stress_util2d(ml,
1, 1, 10) stress_util2d(ml, 10, 10, 1) stress_util2d(ml, 1, 10, 10) stress_util2d(ml, 10, 1, 10) stress_util2d(ml, 10, 10, 10) def test_util2d_external_free_nomodelws(): model_ws = os.path.join(out_dir) if os.path.exists(model_ws): shutil.rmtree(model_ws) os.mkdir(model_ws) base_dir = os.getcwd() os.chdir(out_dir) ml = flopy.modflow.Modflow() stress_util2d_for_joe_the_file_king(ml, 1, 1, 1) stress_util2d_for_joe_the_file_king(ml, 10, 1, 1) stress_util2d_for_joe_the_file_king(ml, 1, 10, 1) stress_util2d_for_joe_the_file_king(ml, 1, 1, 10) stress_util2d_for_joe_the_file_king(ml, 10, 10, 1) stress_util2d_for_joe_the_file_king(ml, 1, 10, 10) stress_util2d_for_joe_the_file_king(ml, 10, 1, 10) stress_util2d_for_joe_the_file_king(ml, 10, 10, 10) os.chdir(base_dir) def test_util2d_external_free_path(): model_ws = os.path.join(out_dir, "extra_temp") if os.path.exists(model_ws): shutil.rmtree(model_ws) os.mkdir(model_ws) ext_path = "ref" if os.path.exists(ext_path): shutil.rmtree(ext_path) ml = flopy.modflow.Modflow(model_ws=model_ws, external_path=ext_path) stress_util2d(ml, 1, 1, 1) stress_util2d(ml, 10, 1, 1) stress_util2d(ml, 1, 10, 1) stress_util2d(ml, 1, 1, 10) stress_util2d(ml, 10, 10, 1) stress_util2d(ml, 1, 10, 10) stress_util2d(ml, 10, 1, 10) stress_util2d(ml, 10, 10, 10) def test_util2d_external_free_path_nomodelws(): model_ws = os.path.join(out_dir) if os.path.exists(model_ws): shutil.rmtree(model_ws) os.mkdir(model_ws) ext_path = "ref" base_dir = os.getcwd() os.chdir(out_dir) if os.path.exists(ext_path): shutil.rmtree(ext_path) ml = flopy.modflow.Modflow(external_path=ext_path) stress_util2d_for_joe_the_file_king(ml, 1, 1, 1) stress_util2d_for_joe_the_file_king(ml, 10, 1, 1) stress_util2d_for_joe_the_file_king(ml, 1, 10, 1) stress_util2d_for_joe_the_file_king(ml, 1, 1, 10) stress_util2d_for_joe_the_file_king(ml, 10, 10, 1) stress_util2d_for_joe_the_file_king(ml, 1, 10, 10) stress_util2d_for_joe_the_file_king(ml, 10, 1, 10)
from o
doo import fields, models class SaleOrderLine(mod
els.Model): _inherit = 'sale.order.line' bom_id = fields.Many2one( comodel_name='mrp.bom', readonly=True )
from __future__ import absolute_import, division, print_function, unicode_literals from gratipay.elsewhere import PlatformOAuth2 from gratipay.elsewhere._extractors import any_key, key from gratipay.elsewhere._paginators import query_param_paginator class Google(PlatformOAuth2): # Platform attributes name = 'google' display_name = 'Google' account_url = 'https://plus.google.com/{user_id}' optional_user_name = True # Auth attributes auth_url = 'https://accounts.google.com/o/oauth2/auth' access_token_url = 'https://accounts.google.com/o/oauth2/token' oauth_default_scope = ['https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/plus.login'] # API attributes api_format = 'json' api_paginator = query_param_paginator('pageToke
n', next='nextPageToken', page='items', total='totalItems') api_url = 'https://www.googleapis.com/plus/v1' api_user_info_path = '/people/{user_id}' api_user_self_info_path = '/people/me' api_friends_path = '/people/{user_id}/people/visi
ble' api_friends_limited = True # User info extractors x_user_id = key('id') x_display_name = key('displayName') x_email = any_key(('emails', 0), clean=lambda d: d.get('value')) x_avatar_url = key('image', clean=lambda d: d.get('url')) def x_user_name(self, extracted, info, *default): url = info.get('url', '') return url[25:] if url.startswith('https://plus.google.com/+') else None
#!/usr/bin/env python # -*- coding: UTF-8 -*- import cgi, cgitb import sys sys.path.insert(0, "../planetoplot/modules") import ppplot import ppclass ######################################## import numpy as np xx = np.arange(25) yy = 3.*xx fig = ppplot.figuref(x=8,y=6) pl = ppplot.plot1d() pl.fig = fig # have to send to figure pl.f = yy pl.x = xx pl.make() ######################################## more sophisticated example ## RETRIEVE DATA #from
ppclass import pp #fifi = "/home/aspiga/soft/mcd_python/minimal_server/cgi-bin/wrfout_d01_2024-10-04_06z00z00_zabg" #ff,xx,yy,zz,tt = pp(file=fifi,var="HGT",z=0,t=0).getfd() #xx = pp(file=fifi,var="XLONG",z=0,t=0).getf() #yy =
pp(file=fifi,var="XLAT",z=0,t=0).getf() #uu = pp(file=fifi,var="Um",z=0,t=0).getf() #vv = pp(file=fifi,var="Vm",z=0,t=0).getf() # ## PLOT #pl = ppplot.plot2d() #pl.fig = fig # have to send to figure #pl.f = ff #pl.x = xx #pl.y = yy #pl.vx = uu #pl.vy = vv #pl.legend = "yorgl" #pl.marker = None #pl.nyticks = 20 #pl.ylabel = "YAARGL" #pl.proj = "laea" #pl.make() ######################################## # create figure ppplot.sendagg(fig,filename='webapp.png', dpi=150) # for debugging in web browser cgitb.enable() ## Create instance of FieldStorage #form = cgi.FieldStorage() ##### NOW WRITE THE HTML PAGE TO USER print "Content-type:text/html;charset=utf-8\n" print #Apache needs a space after content-type header="""<html><head><title>Mars Climate Database: The Web Interface</title></head><body>""" print header print "THIS IS A TEST!" print "<img src='../webapp.png'><br />" bottom = "</body></html>" print bottom
from __future__ import absolute_import from sentry.testutils import AcceptanceTestCase class AuthTest(AcceptanceTestCase): def enter_auth(self, username, password): # disable captcha as it makes these tests flakey (and requires waiting # on external resources) with self.settings(RECAPTCHA_PUBLIC_KEY=None): self.browser.get('/auth/login/') self.browser.find_element_by_id('id_username').send_keys(username) self.browser.find_element_by_id('id_password').se
nd_keys(password) self.browser
.find_element_by_xpath("//button[contains(text(), 'Login')]").click() def test_renders(self): self.browser.get('/auth/login/') self.browser.snapshot(name='login') def test_no_credentials(self): self.enter_auth('', '') self.browser.snapshot(name='login fields required') def test_invalid_credentials(self): self.enter_auth('bad-username', 'bad-username') self.browser.snapshot(name='login fields invalid') def test_success(self): email = 'dummy@example.com' password = 'dummy' user = self.create_user(email=email) user.set_password(password) user.save() self.enter_auth(email, password) self.browser.snapshot(name='login success')
# -*- coding: utf-8 -*- from pyload.plugin.internal.DeadHoster import DeadHoster class BayfilesCom(DeadHoster): __name = "BayfilesCom" __type = "hoster" __version = "0.09" __pattern = r'https?://(?:www\.)?bayfiles\.(com|net)/file/(?P<ID>\w+/\w+/[^/]+)' __config = [] #@TODO: Remove in 0.4.10 __descriptio
n = """Bayfiles.com hoster plugin""" __license
= "GPLv3" __authors = [("Walter Purcaro", "vuolter@gmail.com")]
""" sentry.utils.auth ~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import six import logging from django.conf import settings from django.contrib.auth import login as _login from django.contrib.auth.backends import ModelBackend from django.core.urlresolvers import reverse, resolve from time import time from sentry.models import User, Authenticator logger = logging.getLogger('sentry.auth') _LOGIN_URL = None class AuthUserPasswordExpired(Exception): def __init__(self, user): self.user = user def _make_key_value(val): return val.strip().split('=', 1) def parse_auth_header(header): try: return dict(map(_make_key_value, header.split(' ', 1)[1].split(','))) except Exception: return {} def get_auth_providers(): return [ key for key, cfg_names in six.iteritems(settings.AUTH_PROVIDERS) if all(getattr(settings, c, None) for c in cfg_names) ] def get_pending_2fa_user(request): rv = request.session.get('_pending_2fa') if rv is None: return user_id, created_at = rv if created_at < time() - 60 * 5: return None try: return User.objects.get(pk=user_id) except User.DoesNotExist: pass def has_pending_2fa(request): return request.session.get('_pending_2fa') is not None def get_login_url(reset=False): global _LOGIN_URL if _LOGIN_URL is None or reset: # if LOGIN_URL resolves force login_required to it instead of our own # XXX: this must be done as late as possible to avoid idempotent requirements try: resolve(settings.LOGIN_URL) except Exception: _LOGIN_URL = settings.SENTRY_LOGIN_URL else: _LOGIN_URL = settings.LOGIN_URL if _LOGIN_URL is None: _LOGIN_URL = reverse('sentry-login') return _LOGIN_URL def initiate_login(r
equest, next_url=None): try: del request.session['_after_2fa'] except KeyError: pass try: del request.session['_pending_2fa'] except KeyError: pass if next_url: request.session['_next'] = next_url else: try: del request.session['_next'] except KeyError: pass def get_login
_redirect(request, default=None): if default is None: default = get_login_url() # If there is a pending 2fa authentication bound to the session then # we need to go to the 2fa dialog. if has_pending_2fa(request): return reverse('sentry-2fa-dialog') # If we have a different URL to go after the 2fa flow we want to go to # that now here. after_2fa = request.session.pop('_after_2fa', None) if after_2fa is not None: return after_2fa login_url = request.session.pop('_next', None) or default if login_url.startswith(('http://', 'https://')): login_url = default elif login_url.startswith(get_login_url()): login_url = default return login_url def find_users(username, with_valid_password=True, is_active=None): """ Return a list of users that match a username and falling back to email """ qs = User.objects if is_active is not None: qs = qs.filter(is_active=is_active) if with_valid_password: qs = qs.exclude(password='!') try: # First, assume username is an iexact match for username user = qs.get(username__iexact=username) return [user] except User.DoesNotExist: # If not, we can take a stab at guessing it's an email address if '@' in username: # email isn't guaranteed unique return list(qs.filter(email__iexact=username)) return [] def login(request, user, passed_2fa=False, after_2fa=None): """This logs a user in for the sesion and current request. If 2FA is enabled this method will start the 2FA flow and return False, otherwise it will return True. If `passed_2fa` is set to `True` then the 2FA flow is set to be finalized (user passed the flow). Optionally `after_2fa` can be set to a URL which will be used to override the regular session redirect target directly after the 2fa flow. """ has_2fa = Authenticator.objects.user_has_2fa(user) if has_2fa and not passed_2fa: request.session['_pending_2fa'] = [user.id, time()] if after_2fa is not None: request.session['_after_2fa'] = after_2fa return False request.session.pop('_pending_2fa', None) # Check for expired passwords here after we cleared the 2fa flow. # While this means that users will have to pass 2fa before they can # figure out that their passwords are expired this is still the more # reasonable behavior. # # We also rememebr _after_2fa here so that we can continue the flow if # someone does it in the same browser. if user.is_password_expired: raise AuthUserPasswordExpired(user) # If there is no authentication backend, just attach the first # one and hope it goes through. This apparently is a thing we # have been doing for a long time, just moved it to a more # reasonable place. if not hasattr(user, 'backend'): user.backend = settings.AUTHENTICATION_BACKENDS[0] _login(request, user) log_auth_success(request, user.username) return True def log_auth_success(request, username): logger.info('user.auth.success', extra={ 'ip_address': request.META['REMOTE_ADDR'], 'username': username, }) def log_auth_failure(request, username=None): logger.info('user.auth.fail', extra={ 'ip_address': request.META['REMOTE_ADDR'], 'username': username, }) class EmailAuthBackend(ModelBackend): """ Authenticate against django.contrib.auth.models.User. Supports authenticating via an email address or a username. """ def authenticate(self, username=None, password=None): users = find_users(username) if users: for user in users: try: if user.password and user.check_password(password): return user except ValueError: continue return None
# -*- coding: utf-8 -*- from time import time from module.network.RequestFactory import getURL from module.plugins.Hook import Hook class AndroidPhoneNotify(Hook): __name__ = "AndroidPhoneN
otify" __type__ = "hook" __version__ = "0.04" __config__ = [("apikey" , "str" , "API key" , "" ), ("notifycaptcha" , "bool", "Notify captcha request" , True ), ("notifypackage" , "bool", "Notify package finished" , True ), ("notifyprocessed", "bool", "Notify processed packages status" , True
), ("timeout" , "int" , "Timeout between captchas in seconds" , 5 ), ("force" , "bool", "Send notifications if client is connected", False)] __description__ = """Send push notifications to your Android Phone using notifymyandroid.com""" __license__ = "GPLv3" __authors__ = [("Steven Kosyra", "steven.kosyra@gmail.com"), ("Walter Purcaro", "vuolter@gmail.com")] event_list = ["allDownloadsProcessed"] #@TODO: Remove in 0.4.10 def initPeriodical(self): pass def setup(self): self.info = {} #@TODO: Remove in 0.4.10 self.last_notify = 0 def newCaptchaTask(self, task): if not self.getConfig("notifycaptcha"): return False if time() - self.last_notify < self.getConf("timeout"): return False self.notify(_("Captcha"), _("New request waiting user input")) def packageFinished(self, pypack): if self.getConfig("notifypackage"): self.notify(_("Package finished"), pypack.name) def allDownloadsProcessed(self): if not self.getConfig("notifyprocessed"): return False if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal): self.notify(_("Package failed"), _("One or more packages was not completed successfully")) else: self.notify(_("All packages finished")) def notify(self, event, msg=""): apikey = self.getConfig("apikey") if not apikey: return False if self.core.isClientConnected() and not self.getConfig("force"): return False getURL("http://www.notifymyandroid.com/publicapi/notify", get={'apikey' : apikey, 'application': "pyLoad", 'event' : event, 'description': msg}) self.last_notify = time()
import sys from .calc import * from .colls import * from .tree import * from .decorators import * from .funcolls import * from .funcs import * from .seqs import * from .types import * from .strings import * from .flow import * from .objects import * from .namespaces import namespace from .debug import * from .primitives import * # Setup __all__ modules
= ('calc', 'colls', 'tree', 'decorators', 'funcolls', 'funcs', 'seqs', 'types', 'strings', 'flow', 'objects', 'namespaces', 'debug', 'primitives') __all__ = cat(sys.modules['funcy.' + m].__all__ for m in modules) # Python 2 style zip() for Python 3 from .cross import PY3 if PY3: _z
ip = zip def zip(*seqs): return list(_zip(*seqs)) __all__.append('zip') else: zip = zip
pc_message') def npc_message(data): if not npc_owner: return npc = mapserv.beings_cache.findName(data.id) m = '[npc] {} : {}'.format(npc, data.message) whisper(npc_owner, m) @extends('smsg_npc_choice') def npc_choice(data): if not npc_owner: return choices = filter(lambda s: len(s.strip()) > 0, data.select.split(':')) whisper(npc_owner, '[npc][select] (use !input <number> to select)') for i, s in enumerate(choices): whisper(npc_owner, ' {}) {}'.format(i + 1, s)) @extends('smsg_npc_int_input') @extends('smsg_npc_str_input') def npc_input(data): if not npc_owner: return t = 'number' if plugins.npc.input_type == 'str': t = 'string' whisper(npc_owner, '[npc][input] (use !input <{}>)'.format(t)) @extends('smsg_storage_status') def storage_status(data): print 'storage_status' global storage_is_open storage_is_open = True _times['storage'] = time.time() if npc_owner: whisper(npc_owner, '[storage]') @extends('smsg_storage_items') @extends('smsg_storage_equip') def storage_items(data): if not npc_owner: return ls = status.invlists2(max_length=255, source='storage') for l in ls: whisper(npc_owner, l) @extends('smsg_storage_close') def storage_close(data): print 'smsg_storage_close' global storage_is_open storage_is_open = False _times['storage'] = 0 def cmd_where(nick, message, is_whisper, match): if not is_whisper: return msg = status.player_position() whisper(nick, msg) def cmd_goto(nick, message, is_whisper, match): if not is_whisper: return try: x = int(match.group(1)) y = int(match.group(2)) except ValueError: return set_npc_owner(nick) plugins.autofollow.follow = '' mapserv.cmsg_player_change_dest(x, y) def cmd_goclose(nick, message, is_whisper, match): if not is_whisper: return x = mapserv.player_pos['x'] y = mapserv.player_pos['y'] if message.startswith('!left'): x -= 1 elif message.startswith('!right'): x += 1 elif message.startswith('!up'): y -= 1 elif message.startswith('!down'): y += 1 set_npc_owner(nick) plugins.autofollow.follow = '' mapserv.cmsg_player_change_dest(x, y) def cmd_pickup(nick, message, is_whisper, match): if not is_whisper: return commands.pickup() def cmd_drop(nick, message, is_whisper, match): if not is_whisper: return try: amount = int(
match.group(1)) item_id = int(match.group(2)) except ValueError: return if nick not in admins: if item_id not in allowed_drops: return index = get_item_index(item_id) if index > 0: mapserv.cmsg_player_inventory_drop(index, amount) def cmd_item_action(nick, message, is_whisper, match): if not is_whisper: retur
n try: itemId = int(match.group(1)) except ValueError: return index = get_item_index(itemId) if index <= 0: return if message.startswith('!equip'): mapserv.cmsg_player_equip(index) elif message.startswith('!unequip'): mapserv.cmsg_player_unequip(index) elif message.startswith('!use'): mapserv.cmsg_player_inventory_use(index, itemId) def cmd_emote(nick, message, is_whisper, match): if not is_whisper: return try: emote = int(match.group(1)) except ValueError: return mapserv.cmsg_player_emote(emote) def cmd_attack(nick, message, is_whisper, match): if not is_whisper: return target_s = match.group(1) try: target = mapserv.beings_cache[int(target_s)] except (ValueError, KeyError): target = find_nearest_being(name=target_s, ignored_ids=walkto.unreachable_ids) if target is not None: set_npc_owner(nick) plugins.autofollow.follow = '' walkto.walkto_and_action(target, 'attack') def cmd_say(nick, message, is_whisper, match): if not is_whisper: return msg = match.group(1) whisper(nick, msg) def cmd_sit(nick, message, is_whisper, match): if not is_whisper: return plugins.autofollow.follow = '' mapserv.cmsg_player_change_act(0, 2) def cmd_turn(nick, message, is_whisper, match): if not is_whisper: return commands.set_direction('', message[6:]) def cmd_follow(nick, message, is_whisper, match): if not is_whisper: return if plugins.autofollow.follow == nick: plugins.autofollow.follow = '' else: set_npc_owner(nick) plugins.autofollow.follow = nick def cmd_lvlup(nick, message, is_whisper, match): if not is_whisper: return stat = match.group(1).lower() stats = {'str': 13, 'agi': 14, 'vit': 15, 'int': 16, 'dex': 17, 'luk': 18} skills = {'mallard': 45, 'brawling': 350, 'speed': 352, 'astral': 354, 'raging': 355, 'resist': 353} if stat in stats: mapserv.cmsg_stat_update_request(stats[stat], 1) elif stat in skills: mapserv.cmsg_skill_levelup_request(skills[stat]) def cmd_invlist(nick, message, is_whisper, match): if not is_whisper: return ls = status.invlists(50) for l in ls: whisper(nick, l) def cmd_inventory(nick, message, is_whisper, match): if not is_whisper: return ls = status.invlists2(255) for l in ls: whisper(nick, l) def cmd_status(nick, message, is_whisper, match): if not is_whisper: return all_stats = ('stats', 'hpmp', 'weight', 'points', 'zeny', 'attack', 'skills') sr = status.stats_repr(*all_stats) whisper(nick, ' | '.join(sr.values())) def cmd_zeny(nick, message, is_whisper, match): if not is_whisper: return whisper(nick, 'I have {} GP'.format(mapserv.player_money)) def cmd_talk2npc(nick, message, is_whisper, match): if not is_whisper: return npc_s = match.group(1) jobs = [] name = '' try: jobs = [int(npc_s)] except ValueError: name = npc_s b = find_nearest_being(name=name, type='npc', allowed_jobs=jobs) if b is None: return set_npc_owner(nick) plugins.autofollow.follow = '' plugins.npc.npc_id = b.id mapserv.cmsg_npc_talk(b.id) def cmd_input(nick, message, is_whisper, match): if not is_whisper: return plugins.npc.cmd_npcinput('', match.group(1)) def cmd_close(nick, message, is_whisper, match): if not is_whisper: return if storage_is_open: reset_storage() else: plugins.npc.cmd_npcclose() def cmd_history(nick, message, is_whisper, match): if not is_whisper: return for user, cmd in history: whisper(nick, '{} : {}'.format(user, cmd)) def cmd_store(nick, message, is_whisper, match): if not is_whisper: return if not storage_is_open: return try: amount = int(match.group(1)) item_id = int(match.group(2)) except ValueError: return index = get_item_index(item_id) if index > 0: mapserv.cmsg_move_to_storage(index, amount) def cmd_retrieve(nick, message, is_whisper, match): if not is_whisper: return if not storage_is_open: return try: amount = int(match.group(1)) item_id = int(match.group(2)) except ValueError: return index = get_storage_index(item_id) if index > 0: mapserv.cmsg_move_from_storage(index, amount) def cmd_help(nick, message, is_whisper, match): if not is_whisper: return m = ('[@@https://forums.themanaworld.org/viewtopic.php?f=12&t=19673|Forum@@]' '[@@https://bitbucket.org/rumly111/manachat|Sources@@] ' 'Try !commands for list of commands') whisper(nick, m) def cmd_commands(nick, message, is_whisper, match): if not is_whisper: return c = [] for cmd in manaboy_commands: if cmd.starts
else: return figshare_settings.API_OAUTH_URL @property def has_auth(self): return bool(self.user_settings and self.user_settings.has_auth) @property def complete(self): return self.has_auth and self.figshare_id is not None @property def linked_content(self): return { 'id': self.figshare_id, 'type': self.figshare_type, 'name': self.figshare_title, } def authorize(self, user_settings, save=False): self.user_settings = user_settings node = self.owner node.add_log( action='figshare_node_authorized', params={ 'project': node.parent_id, 'node': node._id, }, auth=Auth(user=user_settings.owner), ) if save: self.save() def deauthorize(self, auth=None, add_log=True, save=False): """Remove user authorization from this node and log the event.""" self.user_settings = None self.figshare_id = None self.figshare_type = None self.figshare_title = None if add_log: node = self.owner self.owner.add_log( action='figshare_node_deauthorized', params={ 'project': node.parent_id, 'node': node._id, }, auth=auth, ) if save: self.save() def serialize_waterbutler_credentials(self): if not self.has_auth: raise exceptions.AddonError('Cannot serialize credentials for unauthorized addon') return { 'client_token': figshare_settings.CLIENT_ID, 'client_secret': figshare_settings.CLIENT_SECRET, 'owner_token': self.user_settings.oauth_access_token, 'owner_secret': self.user_settings.oauth_access_token_secret, } def serialize_waterbutler_settings(self): if not self.figshare_type or not self.figshare_id: raise exceptions.AddonError('Cannot serialize settings for unconfigured addon') return { 'container_type': self.figshare_type, 'container_id': str(self.figshare_id), } def create_waterbutler_log(self, auth, action, metadata): if action in [NodeLog.FILE_ADDED, NodeLog.FILE_UPDATED]: name = metadata['name'] url = self.owner.web_url_for('addon_view_or_download_file', provider='figshare', path=metadata['path']) urls = { 'view': url, 'download': url + '?action=download' } elif action == NodeLog.FILE_REMOVED: name = metadata['path'] urls = {} self.owner.add_log( 'figshare_{0}'.format(action), auth=auth, params={ 'project': self.owner.parent_id, 'node': self.owner._id, 'path': name, 'urls': urls, 'figshare': { 'id': self.figshare_id, 'type': self.figshare_type, }, }, ) def delete(self, save=False): super(AddonFigShareNodeSettings, self).delete(save=False) self.deauthorize(add_log=False, save=save) def update_fields(self, fields, node, auth): updated = False if fields.get('id'): updated = updated or (fields['id'] != self.figshare_id) self.figshare_id = fields['id'] if fields.get('name'): updated = updated or (fields['name'] != self.figshare_title) self.figshare_title = fields['name'] if fields.get('type'): updated = updated or (fields['type'] != self.figshare_type) self.figshare_type = fields['type'] self.save() if updated: node.add_log( action='figshare_content_linked', params={ 'project': node.parent_id, 'node'
: node.
_id, 'figshare': { 'type': self.figshare_type, 'id': self.figshare_id, 'title': self.figshare_title, }, }, auth=auth, ) def to_json(self, user): ret = super(AddonFigShareNodeSettings, self).to_json(user) figshare_user = user.get_addon('figshare') ret.update({ 'figshare_id': self.figshare_id or '', 'figshare_type': self.figshare_type or '', 'figshare_title': self.figshare_title or '', 'node_has_auth': self.has_auth, 'user_has_auth': bool(figshare_user) and figshare_user.has_auth, 'figshare_options': [], 'is_registration': self.owner.is_registration, }) if self.has_auth: ret.update({ 'authorized_user': self.user_settings.owner.fullname, 'owner_url': self.user_settings.owner.url, 'is_owner': user == self.user_settings.owner }) return ret ############# # Callbacks # ############# def before_page_load(self, node, user): """ :param Node node: :param User user: :return str: Alert message """ if not self.figshare_id: return [] figshare = node.get_addon('figshare') # Quit if no user authorization node_permissions = 'public' if node.is_public else 'private' if figshare.figshare_type == 'project': if node_permissions == 'private': message = messages.BEFORE_PAGE_LOAD_PRIVATE_NODE_MIXED_FS.format(category=node.project_or_component, project_id=figshare.figshare_id) return [message] else: message = messages.BEFORE_PAGE_LOAD_PUBLIC_NODE_MIXED_FS.format(category=node.project_or_component, project_id=figshare.figshare_id) connect = Figshare.from_settings(self.user_settings) article_is_public = connect.article_is_public(self.figshare_id) article_permissions = 'public' if article_is_public else 'private' if article_permissions != node_permissions: message = messages.BEFORE_PAGE_LOAD_PERM_MISMATCH.format( category=node.project_or_component, node_perm=node_permissions, figshare_perm=article_permissions, figshare_id=self.figshare_id, ) if article_permissions == 'private' and node_permissions == 'public': message += messages.BEFORE_PAGE_LOAD_PUBLIC_NODE_PRIVATE_FS # No HTML snippets, so escape message all at once return [markupsafe.escape(message)] def before_remove_contributor(self, node, removed): """ :param Node node: :param User removed: :return str: Alert message """ if self.user_settings and self.user_settings.owner == removed: return messages.BEFORE_REMOVE_CONTRIBUTOR.format( category=node.project_or_component, user=removed.fullname, ) def after_remove_contributor(self, node, removed, auth=None): """ :param Node node: :param User removed: :return str: Alert message """ if self.user_settings and self.user_settings.owner == removed: # Delete OAuth tokens self.user_settings = None self.save() message = ( u'Because the FigShare add-on for {category} "{title}" was authenticated ' u'by {user}, authentication information has been deleted.' ).format( category=markupsafe.escape(node.category_display), title=markupsafe.escape(node.title), user=markupsafe.escape(removed.fullname) ) if not auth or auth.user != removed: url = node.web_url_for('node_setting') messag
from pposgd_mpi.common.mpi_running_mean_std import RunningMeanStd import pposgd_mpi.common.tf_util as U import tensorflow as tf import gym from pposgd_mpi.common.distributions import make_pdtype class CnnPolicy(object): recurrent = False def __ini
t__(self, name, ob_space, ac_space, kind='large'): with tf.variable_scope(name): self._init(ob_space, ac_space, kind) self.scope = tf.get_variable_scope().name def _init(self, ob_space, ac_space, kind): assert isinstance(ob_space, gym.spaces.Box) self.pdtype = pdtype = make_pdtype(ac_space) sequence_length = None ob = U.get_plac
eholder(name="obs", dtype=tf.float32, shape=[sequence_length] + list(ob_space.shape)) x = ob / 255.0 if kind == 'small': # from A3C paper x = tf.nn.relu(U.conv2d(x, 16, "l1", [8, 8], [4, 4], pad="VALID")) x = tf.nn.relu(U.conv2d(x, 32, "l2", [4, 4], [2, 2], pad="VALID")) x = U.flattenallbut0(x) x = tf.nn.relu(U.dense(x, 256, 'lin', U.normc_initializer(1.0))) elif kind == 'large': # Nature DQN x = tf.nn.relu(U.conv2d(x, 32, "l1", [8, 8], [4, 4], pad="VALID")) x = tf.nn.relu(U.conv2d(x, 64, "l2", [4, 4], [2, 2], pad="VALID")) x = tf.nn.relu(U.conv2d(x, 64, "l3", [3, 3], [1, 1], pad="VALID")) x = U.flattenallbut0(x) x = tf.nn.relu(U.dense(x, 512, 'lin', U.normc_initializer(1.0))) else: raise NotImplementedError logits = U.dense(x, pdtype.param_shape()[0], "logits", U.normc_initializer(0.01)) self.pd = pdtype.pdfromflat(logits) self.vpred = U.dense(x, 1, "value", U.normc_initializer(1.0))[:,0] self.state_in = [] self.state_out = [] stochastic = tf.placeholder(dtype=tf.bool, shape=()) ac = self.pd.sample() # XXX self._act = U.function([stochastic, ob], [ac, self.vpred]) def act(self, stochastic, ob): ac1, vpred1 = self._act(stochastic, ob[None]) return ac1[0], vpred1[0] def get_variables(self): return tf.get_collection(tf.GraphKeys.VARIABLES, self.scope) def get_trainable_variables(self): return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope) def get_initial_state(self): return []
#!/usr/bin/python # -*- coding: utf-8 -*- """ Ansible module to manage elasticsearch shield role (c) 2016, Thierno IB. BARRY @barryib Sponsored by Polyconseil http://polyconseil.fr. This file is part of Ansible Ansible is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Ansible is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ import os ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: kibana_plugin short_description: Manage Kibana plugins description: - Manages Kibana plugins. version_added: "2.2" author: Thierno IB. BARRY (@barryib) options: name: description: - Name of the plugin to install required: True state: description: - Desired state of a plugin. required: False choices: ["present", "absent"] default: present url: description: - Set exact URL to download the plugin from. For local file, prefix its absolute path with file://
required: False default: None timeout: description:
- "Timeout setting: 30s, 1m, 1h..." required: False default: 1m plugin_bin: description: - Location of the plugin binary required: False default: /opt/kibana/bin/kibana plugin_dir: description: - Your configured plugin directory specified in Kibana required: False default: /opt/kibana/installedPlugins/ version: description: - Version of the plugin to be installed. If plugin exists with previous version, it will NOT be updated if C(force) is not set to yes required: False default: None force: description: - Delete and re-install the plugin. Can be useful for plugins update required: False choices: ["yes", "no"] default: no ''' EXAMPLES = ''' - name: Install Elasticsearch head plugin kibana_plugin: state: present name: elasticsearch/marvel - name: Install specific version of a plugin kibana_plugin: state: present name: elasticsearch/marvel version: '2.3.3' - name: Uninstall Elasticsearch head plugin kibana_plugin: state: absent name: elasticsearch/marvel ''' RETURN = ''' cmd: description: the launched command during plugin mangement (install / remove) returned: success type: string name: description: the plugin name to install or remove returned: success type: string url: description: the url from where the plugin is installed from returned: success type: string timeout: description: the timout for plugin download returned: success type: string stdout: description: the command stdout returned: success type: string stderr: description: the command stderr returned: success type: string state: description: the state for the managed plugin returned: success type: string ''' PACKAGE_STATE_MAP = dict( present="--install", absent="--remove" ) def parse_plugin_repo(string): elements = string.split("/") # We first consider the simplest form: pluginname repo = elements[0] # We consider the form: username/pluginname if len(elements) > 1: repo = elements[1] # remove elasticsearch- prefix # remove es- prefix for string in ("elasticsearch-", "es-"): if repo.startswith(string): return repo[len(string):] return repo def is_plugin_present(plugin_dir, working_dir): return os.path.isdir(os.path.join(working_dir, plugin_dir)) def parse_error(string): reason = "reason: " try: return string[string.index(reason) + len(reason):].strip() except ValueError: return string def install_plugin(module, plugin_bin, plugin_name, url, timeout): cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["present"], plugin_name] if url: cmd_args.append("--url %s" % url) if timeout: cmd_args.append("--timeout %s" % timeout) cmd = " ".join(cmd_args) if module.check_mode: return True, cmd, "check mode", "" rc, out, err = module.run_command(cmd) if rc != 0: reason = parse_error(out) module.fail_json(msg=reason) return True, cmd, out, err def remove_plugin(module, plugin_bin, plugin_name): cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["absent"], plugin_name] cmd = " ".join(cmd_args) if module.check_mode: return True, cmd, "check mode", "" rc, out, err = module.run_command(cmd) if rc != 0: reason = parse_error(out) module.fail_json(msg=reason) return True, cmd, out, err def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=True), state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()), url=dict(default=None), timeout=dict(default="1m"), plugin_bin=dict(default="/opt/kibana/bin/kibana", type="path"), plugin_dir=dict(default="/opt/kibana/installedPlugins/", type="path"), version=dict(default=None), force=dict(default="no", type="bool") ), supports_check_mode=True, ) name = module.params["name"] state = module.params["state"] url = module.params["url"] timeout = module.params["timeout"] plugin_bin = module.params["plugin_bin"] plugin_dir = module.params["plugin_dir"] version = module.params["version"] force = module.params["force"] present = is_plugin_present(parse_plugin_repo(name), plugin_dir) # skip if the state is correct if (present and state == "present" and not force) or (state == "absent" and not present and not force): module.exit_json(changed=False, name=name, state=state) if (version): name = name + '/' + version if state == "present": if force: remove_plugin(module, plugin_bin, name) changed, cmd, out, err = install_plugin(module, plugin_bin, name, url, timeout) elif state == "absent": changed, cmd, out, err = remove_plugin(module, plugin_bin, name) module.exit_json(changed=changed, cmd=cmd, name=name, state=state, url=url, timeout=timeout, stdout=out, stderr=err) from ansible.module_utils.basic import * if __name__ == '__main__': main()
# coding: utf-8 # @ 2015 Valentin CHEMIERE @ Akretion # ©2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). import logging from base64 import b64decode import hashlib from .common import TestConnection, ContextualStringIO from .mock_server import server_mock_ftp from .mock_server import MultiResponse from openerp.exceptions import UserError _logger = logging.getLogger(__name__) class TestFtpConnection(TestConnection): def setUp(se
lf): super(TestFtpConnection, self).setUp() self.test_file_ftp = ContextualStringIO() self.test_file_ftp.write('import ftp') self.test_file_f
tp.seek(0) def test_00_ftp_import(self): self.task = self.env.ref('external_file_location.ftp_import_task') with server_mock_ftp( {'open': self.test_file_ftp, 'listdir': ['test-import-ftp.txt']}): self.task.run_import() search_file = self.env['ir.attachment.metadata'].search( [('name', '=', 'test-import-ftp.txt')]) self.assertEqual(len(search_file), 1) self.assertEqual(b64decode(search_file[0].datas), 'import ftp') def test_01_ftp_export(self): self.task = self.env.ref('external_file_location.ftp_export_task') self.ftp_attachment = self.env.ref( 'external_file_location.ir_attachment_export_file_ftp') with server_mock_ftp( {'setcontents': ''}) as FakeFTP: self.task.run_export() if FakeFTP: self.assertEqual('setcontents', FakeFTP[-1]['method']) self.assertEqual('done', self.ftp_attachment.state) self.assertEqual( '/home/user/test/ftp_test_export.txt', FakeFTP[-1]['args'][0]) self.assertEqual( 'test ftp file export', FakeFTP[-1]['kwargs']['data']) def test_02_ftp_import_md5(self): md5_file = ContextualStringIO() md5_file.write(hashlib.md5('import ftp').hexdigest()) md5_file.seek(0) task = self.env.ref('external_file_location.ftp_import_task') task.md5_check = True with server_mock_ftp( {'open': MultiResponse({ 1: md5_file, 0: self.test_file_ftp}), 'listdir': [task.filename]}) as Fakeftp: task.run_import() search_file = self.env['ir.attachment.metadata'].search( (('name', '=', task.filename),)) self.assertEqual(len(search_file), 1) self.assertEqual(b64decode(search_file[0].datas), 'import ftp') self.assertEqual('open', Fakeftp[-1]['method']) self.assertEqual(hashlib.md5('import ftp').hexdigest(), search_file.external_hash) def test_03_ftp_import_md5_corrupt_file(self): md5_file = ContextualStringIO() md5_file.write(hashlib.md5('import test ftp corrupted').hexdigest()) md5_file.seek(0) task = self.env.ref('external_file_location.ftp_import_task') task.md5_check = True with server_mock_ftp( {'open': MultiResponse({ 1: md5_file, 0: self.test_file_ftp}), 'listdir': [task.filename]}): with self.assertRaises(UserError): task.run_import()
from __future__ import unicode_literals from django.db import models from jsonfield import JSONF
ield
SEVERITY_CHOICES = [('c', "critical"), ('w', "warning"), ('i', "info")] class Event(models.Model): description = models.TextField(max_length=500, default='') severity = models.CharField(('severity'), choices=SEVERITY_CHOICES, default='i', max_length=1) timestamp = models.IntegerField() lat = models.FloatField(default=0) long = models.FloatField(default=0) extra = JSONField(null=True, default="") class Media(models.Model): description = models.TextField() format = models.CharField(blank=False, max_length=3) timestamp = models.IntegerField() filename = models.CharField(max_length=255) filesize = models.BigIntegerField() event = models.ForeignKey( Event, null=True, related_name='media', on_delete=models.CASCADE, ) class Meta: ordering = ('timestamp',)
# # Copyright 2016-2017 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You shoul
d have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for ful
l details of the license # from __future__ import absolute_import from __future__ import division import collections import logging import threading import six from vdsm.common import compat from vdsm.common import concurrent from vdsm.config import config try: from hawkular import metrics except ImportError as e: raise compat.Unsupported(str(e)) _running = False _queue = collections.deque(maxlen=config.getint('metrics', 'queue_size')) _cond = threading.Condition(threading.Lock()) _STOP = object() def start(address): global _running if _running: raise RuntimeError('trying to start reporter while running') logging.info("Starting hawkular reporter") concurrent.thread(_run, name='hawkular', args=(address,)).start() _running = True def stop(): logging.info("Stopping hawkular reporter") with _cond: _queue.clear() _queue.append(_STOP) _cond.notify() def send(report): metrics_list = [_get_gauge_metric(name, value) for name, value in six.iteritems(report)] _queue.append(metrics_list) with _cond: _cond.notify() def _get_gauge_metric(name, value): return metrics.create_metric(metrics.MetricType.Gauge, name, metrics.create_datapoint(float(value))) def _run(address): global _running client = metrics.HawkularMetricsClient(tenant_id="oVirt", host=address) while True: with _cond: while not _queue: _cond.wait() while _queue: items = _queue.popleft() if items is _STOP: break client.put(items) _running = False
self._write_event = None if self._connect_event: self._connect_event.delete() self._connect_event = None if self._sock: self._sock.close() self._sock = None # Flush any pending data to the read callbacks as appropriate. Do this # manually as there is a chance for the following race condition to occur: # pending data read by cb # callback reads 1.1 messages, re-buffers .1 msg back # callback disconnects from socket based on message, calling close() # we get back to this code and find there's still data in the input buffer # and the read cb hasn't been cleared. ruh roh. #if self._parent_read_cb and self._read_buf.tell()>0: if self._parent_read_cb and len(self._read_buf)>0: cb = self._parent_read_cb self._parent_read_cb = None self._error_msg = "error processing remaining socket input buffer" self._protected_cb( cb, self ) # Only mark as closed after socket is really closed, we've flushed buffered # input, and we're calling back to close handlers. self._closed = True if self._parent_close_cb: self._parent_close_cb( self ) if self._pending_read_cb_event: self._pending_read_cb_event.delete() self._pending_read_cb_event = None if self._inactive_event: self._inactive_event.delete() self._inactive_event = None # Delete references to callbacks to help garbage collection self._parent_accept_cb = None self._parent_read_cb = None self._parent_error_cb = None self._parent_close_cb = None self._parent_output_empty_cb = None # Clear buffers self._write_buf = None self._read_buf = None def accept(self): """ No-op as we no longer perform blocking accept calls. """ pass def _set_read_cb(self, cb): """ Set the read callback. If there's data in the output buffer, immediately setup a call. """ self._parent_read_cb = cb #if self._read_buf.tell()>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None: if len(self._read_buf)>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None: self._pending_read_cb_event = \ event.timeout( 0, self._protected_cb, self._parent_read_timer_cb ) # Allow someone to change the various callbacks. read_cb = property( fset=_set_read_cb ) accept_cb = property( fset=lambda self,func: setattr(self, '_parent_accept_cb', func ) ) close_cb = property( fset=lambda self,func: setattr(self, '_parent_close_cb', func ) ) error_cb = property( fset=lambda self,func: setattr(self, '_parent_error_cb', func ) ) output_empty_cb = property( fset=lambda self,func: setattr(self, '_parent_output_empty_cb',func) ) def bind(self, *args): """ Bind the socket. """ if self._debug: self._logger.debug( "binding to %s", str(args) ) self._sock.bind( *args ) self._peername = "%s:%d"%self.getsockname() self._accept_event = event.read( self, self._protected_cb, self._accept_cb ) def connect(self, *args, **kwargs): ''' Connect to the socket. If currently non-blocking, will return immediately and call close_cb when the timeout is reached. If timeout_at is a float, will wait until that time and then call the close_cb. Otherwise, it will set timeout_at as time()+timeout, where timeout is a float argument or the current timeout value of the socket. The check interval for successful connection on a non-blocking socket is 100ms. IMPORTANT: If you want the socket to timeout at all in non-blocking mode, you *must* pass in either a relative timout in seconds, or an absolute value in timeout_at. Otherwise, the socket will forever try to connect. Passes *args on to socket.connect_ex, and **kwargs are used for local control of `timeout` and `timeout_at`. ''' timeout_at = kwargs.get('timeout_at') timeout = kwargs.get('timeout') if not isinstance(timeout_at, float): if not isinstance(timeout,(int,long,float)): timeout = self._sock.gettimeout() if timeout>0: timeout_at = time.time()+timeout self._connect_cb(timeout_at, *args, immediate_raise=True) def _connect_cb(self, timeout_at, *args, **kwarg
s): ''' Local support for synch and asynch connect. Required becaus
e `event.timeout` doesn't support kwargs. They are spec'd though so that we can branch how exceptions are handled. ''' err = self._sock.connect_ex( *args ) if not err: self._peername = "%s:%d"%self._sock.getpeername() self._read_event = event.read( self._sock, self._protected_cb, self._read_cb ) self._write_event = event.write( self._sock, self._protected_cb, self._write_cb ) if self._connect_event: self._connect_event.delete() self._connect_event = None elif err in (errno.EINPROGRESS,errno.EALREADY): # Only track timeout if we're about to re-schedule. Should only receive # these on a non-blocking socket. if isinstance(timeout_at,float) and time.time()>timeout_at: self._error_msg = 'timeout connecting to %s'%str(args) self.close() return if self._connect_event: self._connect_event.delete() # Checking every 100ms seems to be a reasonable amount of frequency. If # requested this too can be configurable. self._connect_event = event.timeout(0.1, self._connect_cb, timeout_at, *args) else: if self._connect_event: self._connect_event.delete() self._error_msg = os.strerror(err) serr = socket.error( err, self._error_msg ) if kwargs.get('immediate_raise'): raise serr else: self._handle_error( serr ) def set_inactive_timeout(self, t): """ Set the inactivity timeout. If is None or 0, there is no activity timeout. If t>0 then socket will automatically close if there has been no activity after t seconds (float supported). Will raise TypeError if <t> is invalid. """ if t==None or t==0: if self._inactive_event: self._inactive_event.delete() self._inactive_event = None self._inactive_timeout = 0 elif isinstance(t,(int,long,float)): if self._inactive_event: self._inactive_event.delete() self._inactive_event = event.timeout( t, self._inactive_cb ) self._inactive_timeout = t else: raise TypeError( "invalid timeout %s"%(str(t)) ) ### Private support methods def _handle_error(self, exc): ''' Gracefully handle errors. ''' if self._parent_error_cb: if self._error_msg!=None: self._parent_error_cb( self, self._error_msg, exc ) else: self._parent_error_cb( self, "unknown error", exc ) else: if self._error_msg!=None: msg = "unhandled error %s"%(self._error_msg) else: msg = "unhandled unknown error" if self._logger: self._logger.error( msg, exc_info=True ) else: traceback.print_exc() def _protected_cb(self, cb, *args, **kwargs): """ Wrap any callback from libevent so that we can be sure that exceptions are handled and errors forwarded to error_cb. """ rval = None try: rval = cb(*args, **kwargs) except Exception, e: self._handle_error( e ) self._error_msg = None return rval def _accept_cb(self): """ Accept callback from libevent. """ self._error_msg = "error accepting new socket" (conn, addr) = self._sock.accept() if self._debug: self._logger.debug("accepted connection from %s"%(str(addr))) evsock = EventSocket( read_cb=self._parent_read_cb, error_cb=self._parent_error_cb, close_cb=self._parent_close_cb, sock=conn, debug=self._debug, logger=self._logger, max_read_buffer=self._max_read_buffer ) if self._parent_accept_cb: # 31 march 09 aaron - We can't call accept callback asynchronously in the # event that the s
"""Utility functions for handling and fetching repo archives in zip format.""" from __future__ import absolute_import import os import tempfile from zipfile import ZipFile import requests tr
y: # BadZipfile was renamed to BadZipFile in Python 3.2. from zipfile import BadZipFile except ImportError: from zipfile import BadZipfile as BadZipFile from cookiecutter.exceptions import InvalidZipRepository from cookiecutter.prompt import read_repo_password from cookiecutter.utils import make_sure_path_exists, prompt_and_delete def unzip(zip_uri, is_url, clone_to_dir='.', no_input=False, password=None): """Download and unpack a zipfile at a given U
RI. This will download the zipfile to the cookiecutter repository, and unpack into a temporary directory. :param zip_uri: The URI for the zipfile. :param is_url: Is the zip URI a URL or a file? :param clone_to_dir: The cookiecutter repository directory to put the archive into. :param no_input: Suppress any prompts :param password: The password to use when unpacking the repository. """ # Ensure that clone_to_dir exists clone_to_dir = os.path.expanduser(clone_to_dir) make_sure_path_exists(clone_to_dir) if is_url: # Build the name of the cached zipfile, # and prompt to delete if it already exists. identifier = zip_uri.rsplit('/', 1)[1] zip_path = os.path.join(clone_to_dir, identifier) if os.path.exists(zip_path): download = prompt_and_delete(zip_path, no_input=no_input) else: download = True if download: # (Re) download the zipfile r = requests.get(zip_uri, stream=True) with open(zip_path, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) else: # Just use the local zipfile as-is. zip_path = os.path.abspath(zip_uri) # Now unpack the repository. The zipfile will be unpacked # into a temporary directory try: zip_file = ZipFile(zip_path) if len(zip_file.namelist()) == 0: raise InvalidZipRepository( 'Zip repository {} is empty'.format(zip_uri) ) # The first record in the zipfile should be the directory entry for # the archive. If it isn't a directory, there's a problem. first_filename = zip_file.namelist()[0] if not first_filename.endswith('/'): raise InvalidZipRepository( 'Zip repository {} does not include ' 'a top-level directory'.format(zip_uri) ) # Construct the final target directory project_name = first_filename[:-1] unzip_base = tempfile.mkdtemp() unzip_path = os.path.join(unzip_base, project_name) # Extract the zip file into the temporary directory try: zip_file.extractall(path=unzip_base) except RuntimeError: # File is password protected; try to get a password from the # environment; if that doesn't work, ask the user. if password is not None: try: zip_file.extractall( path=unzip_base, pwd=password.encode('utf-8') ) except RuntimeError: raise InvalidZipRepository( 'Invalid password provided for protected repository' ) elif no_input: raise InvalidZipRepository( 'Unable to unlock password protected repository' ) else: retry = 0 while retry is not None: try: password = read_repo_password('Repo password') zip_file.extractall( path=unzip_base, pwd=password.encode('utf-8') ) retry = None except RuntimeError: retry += 1 if retry == 3: raise InvalidZipRepository( 'Invalid password provided ' 'for protected repository' ) except BadZipFile: raise InvalidZipRepository( 'Zip repository {} is not a valid zip archive:'.format(zip_uri) ) return unzip_path
# -*- coding: utf-8 -*- import cereconf __version__ = "1.5" IP_NUMBER = 'IPNumber' IPv6_NUMBER = 'IPv6Number' DNS_OWNER='DnsOwner' REV_IP_NUMBER = 'IPNumber_rev' A_RECORD = 'ARecord' AAAA_RECORD = 'AAAARecord' HOST_INFO = 'HostInfo' MX_SET = 'MXSet' SRV_TARGET = "SRV_target" SRV_OWNER = "SRV_owner" GENERAL_DNS_RECORD = "GeneralDnsRecord" CNAME_OWN
ER = "Cname_owner" CNAME_TARGET = "Cname_target" # TODO: This value should not be hardcoded here. Didn't put it in # cereconf as the zone support for dns_owner should be here "real soon
# now" ZONE='uio.no'
#!/usr/bin/env python3 # -*- mode: python -*- import os, sys if __name__ != '__main__': sys.exit(1) from argparse import ArgumentParser _NAME_ = "pbench-base.py" parser = ArgumentParser(_NAME_) parser.add_argument( "-C", "--config", dest="cfg_name", help="Specify config file") parser.set_defaults(cfg_name = os.environ.get("CONFIG")) parser.add_argument('prog', metavar='PROG', type=str, nargs=1, help='the program name of the caller') parser.add_argument('args', metavar='args', type=str, nargs='*', help='program arguments') parsed, _ = parser.parse_known_args() _prog = os.path.basename(parsed.prog[0]) _dir = os.path.dirname(parsed.prog[0]) if not parsed.cfg_name: # pbench-base.py is not always invoked with -C or --config or the CONFIG # environment variable set. Since we really need access to the config # file to operate, and we know the relative location of that config file, # we check to see if that exists before declaring a problem. config_name = os.path.join(os.path.dirname(_dir), "lib", "config", "pbench-server.cfg") if not os.path.exists(config_name): print("{}: No config file specified: set CONFIG env variable or use" " --config <file> on the command line".format(_prog), file=sys.stderr) sys.exit(1) else: config_name = parsed.cfg_name # Export all the expected pbench config file attributes for the # existing shell scripts. This maintains the single-source-of- # truth for those definitions in the PbenchConfig class, but # still accessible to all pbench bash shell scripts. from pbench import PbenchConfig, BadConfig try: config =
PbenchConfig(config_name) except BadConfig as e: print("{}: {}".format(_prog, e), file=sys.stderr) sys.exit(1) # Exclude the "files" and "conf" attributes from being exported vars = sorted([ key for key in config.__dict__.keys() \ if key not in ('files', 'conf', 'timestamp', '_unittests', 'get') ]) for att in vars: try: os.environ[att] = getattr(config, att) except AttributeError: print("{}: Missing internal
pbench attribute, \"{}\", in" " configuration".format(_prog, att), file=sys.stderr) sys.exit(1) if config._unittests: os.environ['_PBENCH_SERVER_TEST'] = "1" cmd = "{}.sh".format(sys.argv[1]) args = [ cmd ] + sys.argv[2:] os.execv(cmd, args)
i) + (10 *i*np.pi) ) / ( 1.0 + self.timepoints * self.timepoints ) +np.sin( self.timepoints * 0.2* 2*np.pi) self.data[i,:] = np.sin(self.timepoints * (2 * i+1) * 2* np.pi) self.data_4_vbo = np.zeros((n,2), dtype=np.float32).flatten() #self.data_vbo[:,0] = self.timepoints #self.data_vbo[:,1] = self.data[-1,:] print"done calc" self.data_4_vbo_tp = self.data_4_vbo[0:-1:2] self.data_4_vbo_sig = self.data_4_vbo[1::2] self.data_4_vbo_sig[:] = self.data[0,:] self.data_4_vbo_tp[:] = self.timepoints # graph[i].x = x; # graph[i].y = sin(x * 10.0) / (1.0 + x * x); #my $data_4_vbo = pdl( zeroes(2,$data->dim(-1) ) )->float(); #my $data_4_vbo_timepoints = $data_4_vbo->slice("(0),:"); #my $data_4_vbo_signal = $data_4_vbo->slice("(1),:"); # $data_4_vbo_timepoints .= $datax; #$self->xdata(); #my $data_vbo = $data_4_vbo->flat; def set_window(self,l,r,b,t): glMatrixMode(GL_PROJECTION) glLoadIdentity() gluOrtho2D(l,r,b,t) def set_viewport(self,l,r,b,t): glViewport(l,b,r-l,t-b) def OnMouseLeftDown(self, event): self.mouse_down = True self.prev_x = event.GetX() self.prev_y = event.GetY() print"MLD" print self.prev_x print self.prev_y def OnMouseLeftUp(self, event): self.mouse_down = False print"MLU" # Canvas Proxy Methods def GetGLExtents(self): """Get the extents of the OpenGL canvas.""" return self.GetClientSize() #def SwapBuffers(self): # """Swap the OpenGL buffers.""" # #self.canvas.SwapBuffers() # self.SwapBuffers() # # wxPython Window Handlers #def processEraseBackgroundEvent(self, event): # """Process the erase background event.""" # pass # Do nothing, to avoid flashing on MSWin def OnSize(self, event): """Process the resize event.""" if self.GetContext(): # Make sure the frame is shown before calling SetCurrent. #self.Show() self.SetCurrent() size = self.GetGLExtents() self.OnReshape(size.width, size.height) self.Refresh(False) event.Skip() def OnPaint(self, event): """Process the drawing event.""" self.SetCurrent() # This is a 'perfect' time to initialize OpenGL ... only if we need to if not self.GLinitialized: self.OnInitGL() self.GLinitialized = True self.OnDraw() event.Skip() # # GLFrame OpenGL Event Handlers def OnInitGL(self): """Initialize OpenGL for use in the window.""" glClearColor(1, 1, 1, 1) def OnReshape(self, width, height): """Reshape the OpenGL viewport based on the dimensions of the window.""" self.set_viewport(0,width,0, height) def OnDraw(self, *args, **kwargs): "Draw the window." if self.is_on_draw: return self.is_on_draw = True #self.vbo = VertexBuffer(self.data_vbo) size = self.GetGLExtents() #--- reshape glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) glClearColor(1.0,1.0,1.0,0.0) glLineWidth(2) # self.set_viewport(0,width,0, height) #glMatrixMode(GL_MODELVIEW) #glLoadIdentity() xmin=self.timepoints[0] xmax=self.timepoints[-1] #---start sub plots w0 = 10 w1 = size.width-10 h0 = 0 dh = int( size.height / self.data.shape[0] ); h1 = dh ymin=-1.0 ymax=1.0 dpos = ymin + (ymax - ymin) / 2.0 glColor3f(0.0,0.0,1.0) glLineWidth(2) glColor3f(0,0,1) self.data_4_vbo_sig[:] = self.data[0,:] self.vbo.data = self.data_4_vbo print self.vbo.data_buffer_size self.vbo.vbo_init() for idx in range( self.n_channels ): # glColor3f(0.0,0.0,1.0) self.set_viewport(w0,w1,h0,h1) #ymin = self.data[idx,:].min() #ymax = self.data [idx,:].max() #dpos = ymin + (ymax - ymin) / 2.0 self.set_window(xmin,xmax,ymin,ymax ) #--- draw zero line # glLineWidth(1) # glColor3f
(0,0,0) # glColor3f(0.4,0.4,0.4) # glBegin(GL_LINES) # glVertex2f(xmin,0.0) # glVertex2f(xmax,0.0) # glEnd() # glBegin(GL_LINES) # glVertex2f(xmin,dpos) # glVertex2f(xmax,dpos) # glEnd(); #glRasterPos2f(xmin,dpos) #--- plot signal # glLineWidth(2) # glColor3f(0,0,1)
#--- create OGL verts buffer # glDisableClientState(GL_VERTEX_ARRAY) # self.data_vbo[:,0] = self.timepoints self.data_4_vbo_sig[:] = self.data[idx,:] self.vbo.data = self.data_4_vbo self.vbo.vbo_update() # self.vbo.data = self.data_vbo self.vbo.vbo_draw() h0 += dh h1 += dh + 1 # glBufferSubDataARB_p(GL_ARRAY_BUFFER_ARB,0,$ogl_array); glFlush(); self.SwapBuffers() self.is_on_draw=False self.vbo.vbo_reset() class JuMEG_TSV_MainFrame(wx.Frame): """JuMEG TSV wxProject MainFrame.""" def __init__(self, parent,title="JuMEG TSV",id=wx.ID_ANY, pos=wx.DefaultPosition,size=wx.DefaultSize,style=wx.DEFAULT_FRAME_STYLE,name="MainWindow"): super(JuMEG_TSV_MainFrame, self).__init__(parent,id, title, pos, size, style, name) #--- Options Plot/Time/Channels self._ID_OPT_PLOT = 10111 self._ID_OPT_TIME = 10112 self._ID_OPT_CHANNELS = 10113 #--- init wx body self.wx_init_main_menu() #self.wx_init_toolbar() self.wx_init_statusbar() #--- init wx panels # self.wx_init_panels() # Create the splitter window. # splitter = wx.SplitterWindow(self, style=wx.NO_3D|wx.SP_3D) # splitter.SetMinimumPaneSize(1) # Add the Widget Panel # self.Panel = DemoPanel(self) # self.OGLFrame = GLFrame(self, -1, 'GL Window') self.OGLplot = JuMEGPlot2D(self) #--- Sizer = wx.BoxSizer(wx.VERTICAL) Sizer.Add(self.OGLplot, 1, wx.EXPAND|wx.ALL, 5) #Sizer.Add(MsgBtn, 0, wx.ALIGN_CENTER|wx.ALL, 5) self.SetSizerAndFit(Sizer) self.Bind(wx.EVT_LEFT_DOWN,self.click_OnMouseLeftDown) #,self.Id) # self.Fit() #--- init stuff I/O #--- click on stuff def click_on_open(self,event=None): print"click_on_open" def click_on_save(self,event=None): print"click_on_save" def click_on_clear(self,event=None): print"click_on_clear" def click_on_exit(self,event=None): print"click_on_exit" self.Close() def click_on_plot(self,event=None): print"click_on_plot" def click_on_time(self,event=None): print"click_on_time" def click_on_channels(self,event=None): print"click_on_channels" def click_on_about(self,event=None): print"click_on_about" #--- display # self.Show(True) def wx_init_main_menu(self): _menubar = wx.MenuBar() #--- File I/O _menu_file = wx.Menu() __id=_menu_file.Append(wx.ID_OPEN, '&Open') self.Bind(wx.EVT_MENU,self.click_on_open,__id ) __id=_menu_file.Append(wx.ID_SAVE, '&Save') self.Bind(wx.EVT_MENU,self.click_on_save,__id) _menu_file.AppendSeparator() __idx=_menu_file.Append(wx.ID_CLEAR,'&Clear') self.Bind(wx.EVT_MENU,self.click_on_clear,__id) _menu_file.AppendSeparator() __id=_menu_file.Append(wx.ID_EXIT, '&Exit') self.Bind(wx.EVT_MENU,self.click_on_exit,__id) _menubar.Append(_menu_file, '&File') #--- Options _menu_opt = wx.Menu() __id=_menu_opt.Append(self._ID_OPT_PLOT, '&Plot') self.Bind(wx.EVT_MENU,self.click_on_plot,__id) __id=_menu_opt
# -*- coding: utf-8 -*- from couchdb.design import ViewDefinition from couchdb.http import HTTPError from time import sleep from random import randint import os def add_index_options(doc): doc['options'] = {'local_seq': True} start_date_chronograph = ViewDefinition( 'chronograph', 'start_date', open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'design_files/start_date.js')).read() ) def sync_design_chronograph(db): views = [start_date_chronograph] ViewDefinition.sync_many(db, views, remove_missing=True, callback=add_index_options) endDate_view = ViewDefinition( 'auctions', 'by_endDate', ''' function(doc) { var end = new Date(doc.endDate||doc.stages[0].start).getTime() emit(end, null); } ''' ) startDate_view = ViewDefinition( 'auctions', 'by_startDate', ''' function(doc) { var start = new Date(doc.stages[0].start).getTime() emit(start, null); } ''' ) PreAnnounce_view = ViewDefinition( 'auctions', 'PreAnnounce', ''' function(doc) { if ((doc.stages.length - 2) == doc.current_stage){ emit(null, null); } } ''' ) def sync_design(db): views = [endDate_view, startDate_view, PreAnnounce_view] for view in views: view.sync(db) while True: design = db.get('_design/auctions') if not design: design = {'_id': '_design/auctions'} validate_doc_update = ''' function(newDoc, oldDoc, userCtx, secObj) { if (userCtx.roles.indexOf('_admin') !== -1) { return true; } else {
throw({forbidden: 'Only valid user may change docs.'}); } } ''' start_date_filter = '''function(doc, req) { var now = new Date(); var start = new Date(((doc.stages||[])[0]||{}).start
|| '2000'); if (start > now){ return true; } return false; } ''' if 'validate_doc_update' not in design or \ validate_doc_update != design['validate_doc_update'] or \ start_date_filter != design.get('filters', {}).get('by_startDate'): design['validate_doc_update'] = validate_doc_update design['filters'] = design.get('filters', {}) design['filters']['by_startDate'] = start_date_filter try: return db.save(design) except HTTPError: sleep(randint(0, 2000) / 1000.0) else: return
lation_obj = self.pool[relation] get_args.setdefault('kanban', "") kanban = get_args.pop('kanban') kanban_url = "?%s&kanban=" % werkzeug.url_encode(get_args) pages = {} for col in kanban.split(","): if col: col = col.split("-") pages[int(col[0])] = int(col[1]) objects = [] for group in model_obj.read_group(cr, uid, domain, ["id", column], groupby=column): obj = {} # browse column relation_id = group[column][0] obj['column_id'] = relation_obj.browse(cr, uid, relation_id) obj['kanban_url'] = kanban_url for k, v in pages.items(): if k != relation_id: obj['kanban_url'] += "%s-%s" % (k, v) # pager number = model_obj.search(cr, uid, group['__domain'], count=True) obj['page_count'] = int(math.ceil(float(number) / step)) obj['page'] = pages.get(relation_id) or 1 if obj['page'] > obj['page_count']: obj['page'] = obj['page_count'] offset = (obj['page']-1) * step obj['page_start'] = max(obj['page'] - int(math.floor((scope-1)/2)), 1) obj['page_end'] = min(obj['page_start'] + (scope-1), obj['page_count']) # view data obj['domain'] = group['__domain'] obj['model'] = model obj['step'] = step obj['orderby'] = orderby # browse objects object_ids = model_obj.search(cr, uid, group['__domain'], limit=step, offset=offset, order=orderby)
obj['object_ids'] = model_obj.browse(cr, uid, object_ids) objects.append(obj)
values = { 'objects': objects, 'range': range, 'template': template, } return request.website._render("website.kanban_contain", values) def kanban_col(self, cr, uid, ids, model, domain, page, template, step, orderby, context=None): html = "" model_obj = self.pool[model] domain = safe_eval(domain) step = int(step) offset = (int(page)-1) * step object_ids = model_obj.search(cr, uid, domain, limit=step, offset=offset, order=orderby) object_ids = model_obj.browse(cr, uid, object_ids) for object_id in object_ids: html += request.website._render(template, {'object_id': object_id}) return html def _image_placeholder(self, response): # file_open may return a StringIO. StringIO can be closed but are # not context managers in Python 2 though that is fixed in 3 with contextlib.closing(openerp.tools.misc.file_open( os.path.join('web', 'static', 'src', 'img', 'placeholder.png'), mode='rb')) as f: response.data = f.read() return response.make_conditional(request.httprequest) def _image(self, cr, uid, model, id, field, response, max_width=maxint, max_height=maxint, cache=None, context=None): """ Fetches the requested field and ensures it does not go above (max_width, max_height), resizing it if necessary. Resizing is bypassed if the object provides a $field_big, which will be interpreted as a pre-resized version of the base field. If the record is not found or does not have the requested field, returns a placeholder image via :meth:`~._image_placeholder`. Sets and checks conditional response parameters: * :mailheader:`ETag` is always set (and checked) * :mailheader:`Last-Modified is set iif the record has a concurrency field (``__last_update``) The requested field is assumed to be base64-encoded image data in all cases. """ Model = self.pool[model] id = int(id) ids = Model.search(cr, uid, [('id', '=', id)], context=context) if not ids and 'website_published' in Model._fields: ids = Model.search(cr, openerp.SUPERUSER_ID, [('id', '=', id), ('website_published', '=', True)], context=context) if not ids: return self._image_placeholder(response) concurrency = '__last_update' [record] = Model.read(cr, openerp.SUPERUSER_ID, [id], [concurrency, field], context=context) if concurrency in record: server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT try: response.last_modified = datetime.datetime.strptime( record[concurrency], server_format + '.%f') except ValueError: # just in case we have a timestamp without microseconds response.last_modified = datetime.datetime.strptime( record[concurrency], server_format) # Field does not exist on model or field set to False if not record.get(field): # FIXME: maybe a field which does not exist should be a 404? return self._image_placeholder(response) response.set_etag(hashlib.sha1(record[field]).hexdigest()) response.make_conditional(request.httprequest) if cache: response.cache_control.max_age = cache response.expires = int(time.time() + cache) # conditional request match if response.status_code == 304: return response data = record[field].decode('base64') image = Image.open(cStringIO.StringIO(data)) response.mimetype = Image.MIME[image.format] filename = '%s_%s.%s' % (model.replace('.', '_'), id, str(image.format).lower()) response.headers['Content-Disposition'] = 'inline; filename="%s"' % filename if (not max_width) and (not max_height): response.data = data return response w, h = image.size max_w = int(max_width) if max_width else maxint max_h = int(max_height) if max_height else maxint if w < max_w and h < max_h: response.data = data else: size = (max_w, max_h) img = image_resize_and_sharpen(image, size, preserve_aspect_ratio=True) image_save_for_web(img, response.stream, format=image.format) # invalidate content-length computed by make_conditional as # writing to response.stream does not do it (as of werkzeug 0.9.3) del response.headers['Content-Length'] return response def image_url(self, cr, uid, record, field, size=None, context=None): """Returns a local url that points to the image field of a given browse record.""" model = record._name id = '%s_%s' % (record.id, hashlib.sha1(record.sudo().write_date).hexdigest()[0:7]) size = '' if size is None else '/%s' % size return '/website/image/%s/%s/%s%s' % (model, id, field, size) class website_menu(osv.osv): _name = "website.menu" _description = "Website Menu" _columns = { 'name': fields.char('Menu', required=True, translate=True), 'url': fields.char('Url'), 'new_window': fields.boolean('New Window'), 'sequence': fields.integer('Sequence'), # TODO: support multiwebsite once done for ir.ui.views 'website_id': fields.many2one('website', 'Website'), 'parent_id': fields.many2one('website.menu', 'Parent Menu', select=True, ondelete="cascade"), 'child_id': fields.one2many('website.menu', 'parent_id', string='Child Menus'), 'parent_left': fields.integer('Parent Left', select=True), 'parent_right': fields.integer('Parent Right', select=True), } def __defaults_sequence(self, cr, uid, context): menu = self.search_read(cr, uid, [(1,"=",1)], ["sequence"], limit=1, order="sequence DESC", context=context) return menu and menu[0]["sequence"] or 0 _defaults = { 'url': '', 'sequence': __defaults_sequence, 'new_window': False, } _parent_store = True _pa
import collections import requests MovieResult = collections.namedtuple( 'MovieResult', "imdb_code,title,duration,director,year,rating,imdb_score,keywords,genres") def find_movies(search_text): if not search_text or not search_
text.strip(): raise ValueError("Search text is required") # This URL changed since the recording to support SSL. url = 'http://movieservice.talkpython.fm/api/search/{}'.format(search_text) resp = requests.get(url) resp.raise_for_status() movie_data = resp.json() movies_list = movie_data.get('hits') movies = [ MovieResult(**md) for md
in movies_list ] movies.sort(key=lambda m: -m.year) return movies
from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, current_user, logout_user, login_required from . import auth from ..models import User, AnonymousUser from .forms import LoginForm, RegistrationForm, ChangePasswordForm, \ PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm from .. import db from ..email import send_email @auth.before_app_request def before_request(): if current_user.is_authenticated(): current_user.ping() if not current_user.confirmed and request.endpoint[:5] != 'auth.': return redirect(url_for('auth.unconfirmed')) @auth.route('/unconfirmed') def unconfirmed(): if current_user.is_anonymous() or current_user.confirmed: return redirect(url_for('main.index')) return render_template('auth/unconfirmed.html') @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None and user.verify_password(form.password.data): login_user(user, form.remember_me.data) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password.') return render_template('auth/login.html', form=form) @auth.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data, username=form.username.data, password=form.password.data) db.session.add(user) db.session.commit() token = user.generate_confirmation_token() send_email(user.email, 'Confirm Your Account', 'auth/email/confirm', user=user, token=token) flash('A confirmation email has been sent to you by email.') return redirect(url_for('auth.login')) return render_template('auth/register.html', form=form) @auth.route('/confirm/<token>') @login_required def confirm(token): if current_user.confirmed: return redirect(url_for('main.index')) if current_user.confirm(token): flash('You have confirmed your account. Thanks!') else: flash('The confirmation link is invalid or has expired.') return redirect(url_for('main.index')) @auth.route('/confirm') @login_required def resend_confirmation(): token = current_user.generate_confirmation_token() send_email(current_user.email, 'Confirm Your Account', 'auth/email/confirm', user=current_user, token=token) flash('A new confirmation email has been sent to you by email.') return redirect(url_for('main.index')) @auth.route('/change_password', methods=['GET', 'POST']) @login_required def change_password(): form = ChangePasswordForm() if form.validate_on_submit(): if current_user.verify_password(form.old_password.data): current_user.password = form.password.data db.session.add(current_user) flash('Your password has been updated.') return redirect(url_for('main.index')) else: flash('Inval
id password.') return render_template("auth/change_password.html", form=form) @auth.rou
te('/reset', methods=['GET', 'POST']) def password_reset_request(): if not current_user.is_anonymous(): return redirect(url_for('main.index')) form = PasswordResetRequestForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user: token = user.generate_reset_token() send_email(user.email, 'Reset Your Password', 'auth/email/reset_password', user=user, token=token, next=request.args.get('next')) flash('An email with instructions to reset your password has been ' 'sent to you.') return redirect(url_for('auth.login')) return render_template('auth/reset_password.html', form=form) @auth.route('/reset/<token>', methods=['GET', 'POST']) def password_reset(token): if not current_user.is_anonymous(): return redirect(url_for('main.index')) form = PasswordResetForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is None: return redirect(url_for('main.index')) if user.reset_password(token, form.password.data): flash('Your password has been updated.') return redirect(url_for('auth.login')) else: return redirect(url_for('main.index')) return render_template('auth/reset_password.html', form=form) @auth.route('/change_email', methods=['GET', 'POST']) @login_required def change_email_request(): form = ChangeEmailForm() if form.validate_on_submit(): if current_user.verify_password(form.password.data): new_email = form.email.data token = current_user.generate_email_change_token(new_email) send_email(new_email, 'Confirm your email address', 'auth/email/change_email', user=current_user, token=token) flash('An email with instructions to confirm your new email ' 'address has been sent to you.') return redirect(url_for('main.index')) else: flash('Invalid email or password.') return render_template("auth/change_email.html", form=form) @auth.route('/change_email/<token>') @login_required def change_email(token): if current_user.change_email(token): flash('Your email address has been updated.') else: flash('Invalid request.') return redirect(url_for('main.index'))
#!/u
sr/bin/env python import os import sys from swampdragon.swampdragon_server import run_server os.environ.setdefault("DJANGO_SETTINGS_MODULE", "flat.settings") host_port = sys.argv[1] if len(sys.argv) > 1 else None run_server(host_
port=host_port)
""" This module signs newly installed client puppet certificates if the puppet master server is running on the same machine as the cobbler server. Based on: http://www.ithiriel.com/content/2010/03/29/writing-install-triggers-cobbler """ from builtins import str import re import cobbler.utils as utils def register(): # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster. # the return of this method indicates the trigger type return "/var/lib/cobbler/triggers/install/post/*" def run(api, args, logger): objtype = args[0] # "system" or "profile" name = args[1] # name of system or profile # ip = args[2] # ip or "?" if objtype != "system": return 0 settings = api.settings() if not str(settings.puppet_auto_setup).lower() in ["1", "yes", "y", "true"]: return 0 if not str(settings.sign_puppet_certs_automatically).lower() in ["1", "yes", "y", "true"]:
return 0 system = api.find_system(name) system = utils.blender(api, False, system) hostname = system["hostname"] if not re.match(r'[\w-]+\..+', hostname): search_domains = system['name_servers_search'] if search_domains: hostname += '.' + search_domains[0] puppetca_path = settings.puppetca_path cmd = [puppetca_pa
th, 'cert', 'sign', hostname] rc = 0 try: rc = utils.subprocess_call(logger, cmd, shell=False) except: if logger is not None: logger.warning("failed to execute %s" % puppetca_path) if rc != 0: if logger is not None: logger.warning("signing of puppet cert for %s failed" % name) return 0
# Principal Component Analysis Code : from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud from pylab import * import numpy as np import matplotlib.pyplot as pp #from enthought.mayavi import mlab import scipy.ndimage as ni import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3') import rospy #import hrl_lib.mayavi2_util as mu import hrl_lib.viz as hv import hrl_lib.util as ut import hrl_lib.matplotlib_util as mpu import pickle from mvpa.clfs.knn import kNN from mvpa.datasets import Dataset from mvpa.clfs.transerror import TransferError from mvpa.misc.data_generators import normalFeatureDataset from mvpa.algorithms.cvtranserror import CrossValidatedTransferError from mvpa.datasets.splitters import NFoldSplitter import sys sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/BMED_8813_HAP/Data') from data import Fmat_original def pca(X): #get dimensions num_data,dim = X.shape #center data mean_X = X.mean(axis=1) M = (X-mean_X) # subtract the mean (along columns) Mcov = cov(M) ###### Sanity Check ###### i=0 n=0 while i < 41: j=0 while j < 90: if X[i,j] != X[i,j]: print X[i,j] print i,j n=n+1 j = j+1 i=i+1 print n ########################## print 'PCA - COV-Method used' val,vec = linalg.eig(Mcov) #return the projection matrix, the variance and the mean return vec,val,mean_X, M, Mcov if __name__ == '__main__': Fmat = Fmat_original[82:123,:] # Checking the Data-Matrix m_tot, n_tot = np.shape(Fmat) print 'Total_Matrix_Shape:',m_tot,n_tot eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat) #print eigvec_total #print eigval_total #print mean_data_total m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total)) m_eigvec_total, n_eigvec_total = np.shape(eigvec_total) m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total)) print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total #Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used. pe
rc_total = cumsum(eigval_total)/sum(eigval_total) # Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure) W = eigvec_total[:,0:6] m_W, n_W = np.shape(W) print 'Reduced Dimension Eigenvector Shape:',m_W, n_W #Projected Data: Y = (W.T)*B m_Y, n_Y = np.shape(Y.T) print 'Transposed Projected Data Shape:', m_Y, n_Y #Using PYMVPA P
CA_data = np.array(Y.T) PCA_label_1 = ['Edge-1']*30 + ['Surface']*30 + ['Edge-2']*30 PCA_chunk_1 = ['Can-Edge-1']*5 + ['Book-Edge-1']*5 + ['Brown-Cardboard-Box-Edge-1']*5 + ['Cinder-Block-Edge-1']*5 + ['Tin-Box-Edge-1']*5 + ['White-Cardboard-Box-Edge-1']*5 + ['Can-Surface']*5 + ['Book-Surface']*5 + ['Brown-Cardboard-Box-Surface']*5 + ['Cinder-Block-Surface']*5 + ['Tin-Box-Surface']*5 + ['White-Cardboard-Box-Surface']*5 + ['Can-Edge-2']*5 + ['Book-Edge-2']*5 + ['Brown-Cardboard-Box-Edge-2']*5 + ['Cinder-Block-Edge-2']*5 + ['Tin-Box-Edge-2']*5 + ['White-Cardboard-Box-Edge-2']*5 clf = kNN(k=2) terr = TransferError(clf) ds1 = Dataset(samples=PCA_data,labels=PCA_label_1,chunks=PCA_chunk_1) print ds1.samples.shape cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion']) error = cvterr(ds1) print error print cvterr.confusion.asstring(description=False) figure(1) cvterr.confusion.plot(numbers='True') # Variances figure(2) title('Variances of PCs') stem(range(len(perc_total)),perc_total,'--b') axis([-0.3,30.3,0,1.2]) grid('True') show()
import collections import re #imports class Jison:#extends symbols = {} terminals = {} productions = {} table = {} default_actions = {} version = '0.3.12' debug = False action_none = 0 action_shift = 1 action_deduce = 2 action_accept = 3 unput_stack = [] def trace(self): """trace""" def __init__(self): """Setup Parser""" """@@PARSER_INJECT@@""" def parser_perform_action(self, yy, yystate, s, o): """@@ParserPerformAction@@""" def parser_lex(self): token = self.lexerLex() #end = 1 if token is not None: return token return self.Symbols["end"] def parse_error(self, _str='', _hash=None): raise Exception(_str) def lexer_error(self, _str='', _hash=None): raise Exception(_str) def parse(self, _input): if self.table is None: raise Exception("Empty ") self.eof = ParserSymbol("Eof", 1) first_action = ParserAction(0, self.table[0]) first_cached_action = ParserCachedAction(first_action) stack = collections.deque(first_cached_action) stack_count = 1 vstack = collections.deque(None) vstach_count = 1 yy = None _yy = None recovering = 0 symbol = None action = None err_str = '' pre_error_symbol = None state = None self.set_input(_input) while True: # retrieve state number from top of stack state = stack[stack_count].action.state # use default actions if available if state is not None and self.default_actions[state.index]: action = self.default_actions[state.index] else: if symbol is None: symbol = self.parser_lex() # read action for current state and first input if state is not None: action = state.actions[symbol.index] else: action = None if action is None: if recovering is 0: # Report error expected = [] actions = self.table[state.index].actions for p in actions: if self.terminals[p] is not None and p > 2: expected.push(self.terminals[p].name) if symbol.index in self.terminals: got = self.terminals[symbol.index].name else: got = "NOTHING" err_str = "Parser error on line " + self.yy.line_no + ":\n" + self.show + "\nExpecting " + (", ".join(expected)) + ". got '" + got + "'" self.parse_error(self, err_str, ParserError()) # Jison generated lexer eof = None yy = None match = '' condition_stack = collections.deque() rules = {} conditions = {} done = False less = None _more = False input = None offset = None ranges = None flex = False line_expression = re.compile("(?:\r\n?|\n).*") def set_input(self, _input): self.input = InputReader(_input) self._more = self.less = self.done = False self.yy = ParserValue()# self.condition_stack.clear() self.condition_stack.append('INITIAL') if self.ranges is not None: self.yy.loc = ParserLocation() self.yy.loc.set_range(ParserRange(0, 0)) else: self.yy.loc = ParserLocation() self.offset = 0 def input(self): ch = self.input.ch() self.yy.text += ch self.yy.leng += 1 self.offset += 1 self.match += ch lines = self.line_expression.match(ch) if lines is not None: self.yy.line_no += 1 self.yy.loc.last_line += 1 else: self.yy.loc.last_column += 1 if self.ranges is not None: self.yy.loc.range.y += 1 return ch def unput(self, ch): yy = ParserValue()# _len = len(ch) lines = self.line_expression.split(ch) lines_count = len(lines) self.input.un_ch(_len) yy.text = self.yy.text[0: _len - 1] self.offset -= _len old_lines = self.line_expression.split(self.match) old_lines_count = len(old_lines) self.match = self.match[0:len(self.match) - 1] if lines_count - 1 > 0: yy.line_no = self.yy.line_no - lines_count - 1 r = self.yy.loc.range old_lines_length = old_lines[old_lines_count - lines_count] if o
ld_lines[old_lines_count - lines_count] is not None else 0
yy.loc = ParserLocation( self.yy.loc.first_line, self.yy.line_no, self.yy.loc.first_column, self.yy.loc.first_line, None)#TODO if self.ranges is not None: yy.loc.range(ParserRange(r.x, r.x + self.yy.leng - _len)) self.unput_stack.push(yy) def more(self): self._more = True def past_input(self): matched = self.input.to_string() past = matched[0:len(matched) - len(self.match)] result = past[-20].replace('\n', '') if len(past) > 20: return '...' + result return result def upcoming_input(self): if self.done == false: next = self.match next_len = len(next) if next_len < 20: next += self.input.to_string()[:20 - next_len] else: if next_len > 20: next = next[:-20] + '...' return next.replace('\n', '') def show_position(self): pre = self.past_input() c = '-' * len(pre) return pre + self.upcoming_input() + '\n' + c + '^' def next(self): if len(self.unput_stack) > 0: this.yy = yy if self.done: return self.eof if self.input.done: self.done = true if self.more is false: self.yy.text = '' self.match = '' rules = self.current_rules() class ParserLocation: first_line = 1 last_line = 0 first_column = 1 last_column = 0 range = None def __init__(self, first_line = 1, last_line = 0, first_column = 1, last_column = 0): self.first_line = first_line self.last_line = last_line self.first_column = first_column self.last_column = last_column def set_range(self, range): self.range = range class ParserValue: leng = 0 loc = None line_no = 0 text = None class ParserCachedAction: def __init__(self, action, symbol=None): self.action = action self.symbol = symbol class ParserAction: action = None state = None symbol = None def __init__(self, action, state=None, symbol=None): self.action = action self.state = state self.symbol = symbol class ParserSymbol: name = None Index = 0 index = -1 symbols = {} symbols_by_name = {} def __init__(self, name, index): self.name = name self.index = index def add_action(self, parser_action): self.symbols[parser_action.index] = self.symbols_by_name[parser_action.name] = parser_action class ParserError: text = None state = None symbol = None line_no = 0 loc = None expected = None def __init__(self, text, state, symbol, line_no, loc, expected): self.text = text self.state = state self.symbol = symbol self.line_no = line_no self.loc = loc self.expected = expected class LexerError: text = None token = None line_no = 0 def __init__(self, text, token, line_no): self.text = text self.token = token self.line_no class ParserState: index = 0 actions = [] def __init__(self, index): self.index = index def set_actions(self, actions): self.actions = actions class ParserRange: x = 0 y = 0 def __init__(self, x, y): self.x = x self.y = y class InputReader: input = None length = 0 done = False matches = [] position = 0 def __init__(self, _input): self.input = _input self.length = len(_input) def add_match(self, match): self.matches.append(match) self.position += len(match) self.done = (self.position >= self.length) def ch(self): ch = self.input[self.position] self.add_match(ch) return ch def un_ch(self, ch_length): self.position -= ch_length self.position = max(0, self.position) self.done = (self.position >= self.length) def substring(self, start, end): start = self.position if start == 0 else start + self.position end = self.length if end == 0 else start + end return self.input[start:end] def match(self, rule): matches = re.search(rule, self.position) if matches is not None: return matches.group() return None def to_string(self): return ''.join(self.matches)
'sphinx.ext.*') or your custom # ones. # on_rtd is whether we are on readthedocs.org on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinxcontrib.napoleon', 'sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.graphviz', 'sphinx.ext.intersphinx' ] intersphinx_mapping = { 'theano': ('http://theano.readthedocs.org/en/latest/', None), 'numpy': ('http://docs.scipy.org/doc/numpy/', None), 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None), 'python': ('http://docs.python.org/3.4', None), 'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None) } graphviz_dot_args = ['-Gbgcolor=#fcfcfc'] # To match the RTD theme # Render todo lists todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Fuel' copyright = u'2014, Université de Montréal' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path =
[] # If not
'', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Fueldoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Fuel.tex', u'Fuel Documentation', u'Université de Montréal', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'fuel', u'Fuel Documentation', [u'Université de Montréal'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Fuel', u'Fuel Documentation', u'Université de Montréal', 'Fuel', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False import inspect from sphinx.ext.autodoc import cut_lines def skip_abc(app, w
from multiprocess
ing import Pool class Process: def __init__(self, processes=8): self.p = Pool(processes) def Exec(self, f, data): return self
.p.map(f, data)
# -*- coding: utf-8 -*- """ requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. """ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Reques
t`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json dat
a to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') <Response [200]> """ # By using the 'with' statement we are sure the session is closed, thus we # avoid leaving sockets open which can trigger a ResourceWarning in some # cases, and look like a memory leak in others. with sessions.Session() as session: return session.request(method=method, url=url, **kwargs) def get(url, params=None, **kwargs): """Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) def options(url, **kwargs): """Sends a OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): """Sends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): """Sends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): """Sends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): """Sends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): """Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('delete', url, **kwargs)
pr(exc_info[1]) if exprinfo and exprinfo.startswith(cls._assert_start_repr): _striptext = "AssertionError: " return cls(exc_info, _striptext) @classmethod def from_current( cls, exprinfo: Optional[str] = None ) -> "ExceptionInfo[BaseException]": """returns an ExceptionInfo matching the current traceback .. warning:: Experimental API :param exprinfo: a text string helping to determine if we should strip ``AssertionError`` from the output, defaults to the exception message/``__str__()`` """ tup = sys.exc_info() assert tup[0] is not None, "no current exception" assert tup[1] is not None, "no current exception" assert tup[2] is not None, "no current exception" exc_info = (tup[0], tup[1], tup[2]) return cls.from_exc_info(exc_info) @classmethod def for_later(cls) -> "ExceptionInfo[_E]": """return an unfilled ExceptionInfo """ return cls(None) def fill_unfilled(self, exc_info: Tuple["Type[_E]", _E, TracebackType]) -> None: """fill an unfilled ExceptionInfo created with for_later()""" assert self._excinfo is None, "ExceptionInfo was already filled" self._excinfo = exc_info @property def type(self) -> "Type[_E]": """the exception class""" assert ( self._excinfo is not None ), ".type can only be used after the context manager exits" return self._excinfo[0] @property def value(self) -> _E: """the exception value""" assert ( self._excinfo is not None ), ".value can only be used after the context manager exits" return self._excinfo[1] @property def tb(self) -> TracebackType: """the exception raw traceback""" assert ( self._excinfo is not None ), ".tb can only be used after the context manager exits" return self._excinfo[2] @property def typename(self) -> str: """the type name of the exception""" assert ( self._excinfo is not None ), ".typename can only be used after the context manager exits" return self.type.__name__ @property def traceback(self) -> Traceback: """the traceback""" if self._traceback is None: self._traceback = Traceback(self.tb, excinfo=ref(self)) return self._traceback @traceback.setter def traceback(self, value: Traceback) -> None: self._traceback = value def __repr__(self) -> str: if self._excinfo is None: return "<ExceptionInfo for raises contextmanager>" return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback)) def exconly(self, tryshort: bool = False) -> str: """ return the exception as a string when 'tryshort' resolves to True, and the exception is a _pytest._code._AssertionError, only the actual exception part of the exception representation is returned (so 'AssertionError: ' is removed from the beginning) """ lines = format_exception_only(self.type, self.value) text = "".join(lines) text = text.rstrip() if tryshort: if text.startswith(self._striptext): text = text[len(self._striptext) :] return text def errisinstance( self, exc: Union["Type[BaseException]", Tuple["Type[BaseException]", ...]] ) -> bool: """ return True if the exception is an instance of exc """ return isinstance(self.value, exc) def _getreprcrash(self) -> "ReprFileLocation": exconly = self.exconly(tryshort=True) entry = self.traceback.getcrashentry() path, lineno = entry.frame.code.raw.co_filename, entry.lineno return ReprFileLocation(path, lineno + 1, exconly) def getrepr( self, showlocals: bool = False, style: str = "long", abspath: bool = False, tbfilter: bool = True, funcargs: bool = False, truncate_locals: bool = True, chain: bool = True, ): """ Return str()able representation of this exception info. :param bool showlocals: Show locals per traceback entry. Ignored if ``style=="native"``. :param str style: long|short|no|native traceback style :param bool abspath: If paths should be changed to absolute or left unchanged. :param bool tbfilter: Hide entries that contain a local variable ``__tracebackhide__==True``. Ignored if ``style=="native"``. :param bool funcargs: Show fixtures ("funcargs" for legacy purposes) per traceback entry. :param bool truncate_locals: With ``showlocals==True``, make sure locals can be safely represented as strings. :param bool chain: if chained exceptions in Python 3 should be shown. .. versionchanged:: 3.9 Added the ``chain`` parameter. """ if style == "native": return ReprExceptionInfo( ReprTracebackNative( traceback.format_exception( self.type, self.value, self.traceback[0]._rawentry ) ), self._getreprcrash(), ) fmt = FormattedExcinfo( showlocals=showlocals, style=style, abspath=abspath, tbfilter=tbfilter, funcargs=funcargs, truncate_locals=truncate_locals, chain=chain, ) return fmt.repr_excinfo(self) def match(self, regexp: "Union[str, Pattern]") -> bool: """ Check whether the regular expression 'regexp' is found in the string representation of the exception using ``re.search``. If it matches then True is returned (so that it is possible to write ``assert excinfo.match()``). If it doesn't match an AssertionError is raised. """ __tracebackhide__ = True if not re.search(regexp, str(self.value)): assert 0, "Pattern {!r} not found in {!r}".format(regexp, str(self.value)) return True @attr.s class FormattedExcinfo: """ presenting information about failing Functions and Generators. """ # for traceback entries flow_marker = ">" fail_marker = "E" showlocals = attr.ib(default=False) style = attr.ib(default="long") abspath = attr.ib(default=True) tbfilter = attr.ib(default=True) funcargs = attr.ib(default=False) truncate_locals = attr.ib(default=True) chain = attr.ib(default=True) astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False) def _getindent(self, source): # figure out indent for given source try: s = str(source.getstatement(len(source) - 1)) except KeyboardInterrupt: raise except: # noqa try: s = str(source[-1]) except KeyboardInter
rupt: raise except:
# noqa return 0 return 4 + (len(s) - len(s.lstrip())) def _getentrysource(self, entry): source = entry.getsource(self.astcache) if source is not None: source = source.deindent() return source def repr_args(self, entry): if self.funcargs: args = [] for argname, argvalue in entry.frame.getargs(var=True): args.append((argname, saferepr(argvalue))) return ReprFuncArgs(args) def get_source(self, source, line_index=-1, excinfo=None, short=False): """ return formatted and marked up source lines. """ import _pytest._code lines = [] if source is None or line_index >= len(source.lines): source = _pytest._code.Source("???") line_index = 0 if line_index < 0: line_index += len(source)
# -*- coding: utf-8 -*- """ Copyright 2013 Ryan Olson This file is part of CloudApp. CloudApp is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CloudApp is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CloudApp. If not, see <http://www.gnu.org/licenses/>. """ from copy import copy from pprint import pprint from flask import current_app, session, g from flask.ext.principal import UserNeed, RoleNeed, AnonymousIdentity from flask.ext.couchdb import to_dict from cloudapp.permissions import valid_user from cloudapp.authentication.models import Session def _load_user(user_id, identity): user = g.User.load(user_id) if user: identity.user = user identity.provides.add(UserNeed('Valid')) identity.provides.add(UserNeed(user.id)) for role in user.roles: identity.provides.add(RoleNeed(role)) else: raise RuntimeError("user is None; user_id not found") def _cache_identity(identity): if current_app.cache is None: return cached_identity = copy(identity) cached_identity.user = to_dict(identity.user) current_app.cache.set(identity.name, cached_identity, timeout=600) def on_load_identity(sender, identity): """ This function is called to load the user's identity from either data saved in the client's session or from a identity_changed.send signal/notificaiton. This function should never be triggered unless we have passed a valid identity; however, we should do a quick double check here before loading the identity's allowed permissions / needs. In the future, we may want to avoid the user lookup and utilize memcache fo
r the storage of the user's base information. """ if current_app.cache is not None: stored_identity = current_app.cache.get(identity.name) if stored_identity is not None: identity.user = g.User.wrap(stored_identity.user) identity.provides = stored_identity.provides if current_app.testing: session['loaded_from']='memcached' return try:
if identity.auth_type == 'web-token': _load_user(identity.name, identity) _cache_identity(identity) elif identity.auth_type == 'token': auth_session = Session.load(identity.name) if auth_session: _load_user(auth_session.user_id, identity) _cache_identity(identity) if not session.permanent: session.permanent=True if current_app.testing: session['loaded_from']='couchdb' except: g.identity = AnonymousIdentity() session.pop('identity.name',None) session.pop('identity.auth_type', None) session.modified = True
i,j=1,2 sum=0 while j <4000000: i,j=j,i+j if i%2==
0: sum
=sum+i print sum
project) def check_credit_limit(self): from erpnext.selling.doctype.customer.customer import check_credit_limit check_credit_limit(self.customer, self.company) def check_nextdoc_docstatus(self): # Checks Delivery Note submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2 where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name) if submit_dn: frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn))) # Checks Sales Invoice submit_rv = frappe.db.sql_list("""select t1.name from `tabSales Invoice` t1,`tabSales Invoice Item` t2 where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""", self.name) if submit_rv: frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv))) #check maintenance schedule submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name) if submit_ms: frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms))) # check maintenance visit submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name) if submit_mv: frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv))) # check production order pro_order = frappe.db.sql_list("""select name from `tabProduction Order` where sales_order = %s and docstatus = 1""", self.name) if pro_order: frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order))) def check_modified_date(self): mod_db = frappe.db.get_value("Sales Order", self.name, "modified") date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" % ( mod_db, cstr(self.modified))) if date_diff and date_diff[0][0]: frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name)) def update_status(self, status): self.check_modified_date() self.set_status(update=True, status=status) self.update_reserved_qty() self.notify_update() clear_doctype_notifications(self) def update_reserved_qty(self, so_item_rows=None): """update requested qty (before ordered_qty is updated)""" item_wh_list = [] def _valid_for_reserve(item_code, warehouse): if item_code and warehouse and [item_code, warehouse] not in item_wh_list \ and frappe.db.get_value("Item", item_code, "is_stock_item"): item_wh_list.append([item_code, warehouse]) for d in self.get("items"): if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier: if self.has_product_bundle(d.item_code): for p in self.get("packed_items"): if p.parent_detail_docname == d.name and p.parent_item == d.item_code: _valid_for_reserve(p.item_code, p.warehouse) else: _valid_for_reserve(d.item_code, d.warehouse) for item_code, warehouse in item_wh_list: update_bin_qty(item_code, warehouse, { "reserved_qty": get_reserved_qty(item_code, warehouse) }) def on_update(self): pass def before_update_after_submit(self): self.validate_po() self.validate_drop_ship() self.validate_supplier_after_submit() def validate_supplier_after_submit(self): """Check that supplier is the same after submit if PO is already made""" exc_list = [] for item in self.items: if item.supplier: supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code}, "supplier") if item.ordered_qty > 0.0 and item.supplier != supplier: exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx)) if exc_list: frappe.throw('\n'.join(exc_list)) def update_delivery_status(self): """Update delivery status from Purchase Order for drop shipping""" tot_qty, delivered_qty = 0.0, 0.0 for item in self.items: if item.delivered_by_supplier: item_delivered_qty = frappe.db.sql("""select sum(qty) from `tabPurchase Order Item` poi, `tabPurchase Order` po where poi.sales_order_item = %s and poi.item_code = %s and poi.parent = po.name and po.docstatus = 1 and po.status = 'Delivered'""", (item.name, item.item_code)) item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0 item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False) delivered_qty += item.delivered_qty tot_qty += item.qty self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100, update_modified=False) def set_indicator(self): """Set indicator for portal""" if self.per_billed < 100 and self.per_delivered < 100: self.indicator_color = "orange" self.indicator_title = _("Not Paid and Not Delivered") elif self.per_billed == 100 and self.per_delivered < 100: self.indicator_color = "orange" self.indicator_title = _("Paid and Not Delivered") else: self.indicator_color = "green" self.indicator_title = _("Paid") def get_production_order_items(self): '''Returns items with BOM that already do not have a linked production order''' items = [] for table in [self.items, self.packed_items]: for i in table: bom = get_default_bom_item(i.item_code) if bom: stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty items.append(dict( item_code= i.item_code, bom = bom, warehouse = i.warehouse, pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order` where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0]) )) return items def on_recurring(self, reference_doc, subscription_doc): mcount = month_map[subscription_doc.frequency] self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount, cint(subscription_doc.repeat_on_day))) for d in self.get("items"): reference_delivery_date = frappe.db.get_value("Sales Order Item", {"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date") d.set("delivery_date", get_next_date(reference_delivery_date, mcount, cint(subscription_do
c.repeat_on_day))) def get_list_context(context=None): from erpnext.controllers.website_list_for_contact import get_list_context list_context = get_list_context(context) list_context.update({ 'show_sidebar': True, 'show_search': True, 'no_breadcrum
bs': True, 'title': _('Orders'), }) return list_context @frappe.whitelist() def close_or_unclose_sales_orders(names, status): if not frappe.has_permission("Sales Order", "write"): frappe.throw(_("Not permitted"), frappe.PermissionError) names = json.loads(names) for name in names: so = frappe.get_doc("Sales Order", name) if so.docstatus == 1: if status == "Closed": if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100): so.update_status(status) else: if so.status == "Closed": so.update_status('Draft') frappe.local.message_log = [] @frappe.whitelist() def make_material_request(source_name, target_doc=None): def postprocess(source, doc): doc.material_request_type = "Purchase" def update_item(source, target, source_parent): target.project = source_parent.project doc = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Material Request", "validation": { "docstatus": ["=", 1] } }, "Packed Item": { "doctype": "Material Request Item", "field_map": { "parent": "sales_order", "stock_uom": "uom" }, "postprocess": update_item }, "Sales Order Item": { "doctype": "Material Request Item", "field_map": { "parent": "sales_order", "stock_uom": "uom", "stock_qty": "qty" },
##################################################################### # linktest_rsp_header.py # # (c) Copyright 2021, Benjamin Parzella. All rights reserved. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. ############################################
######################### """Header for the hsms linktest response.""" from .header import HsmsHeader class HsmsLinktestRspHeader(HsmsHeader): """ Header for Linktest Response. Header for message with SType 6. """ def __init__(self, system): """ Initialize a hsms linktest response. :param syst
em: message ID :type system: integer **Example**:: >>> import secsgem.hsms >>> >>> secsgem.hsms.HsmsLinktestRspHeader(10) HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \ system:0x0000000a, requireResponse:False}) """ HsmsHeader.__init__(self, system, 0xFFFF) self.requireResponse = False self.stream = 0x00 self.function = 0x00 self.pType = 0x00 self.sType = 0x06
def main() -> None: N = int(input()) A = [int(x) for x in input().split()] rev_A = A[:] left = [-1] * N left_cnt = [0] * N A_left = [A[0]] for i in range(1, N): if rev_A[i-1] < rev_A[i]: cnt = 0 while rev_A[i-1] pass elif rev_A[i-1] < rev_A[i] * 4: now = i-1 while left[now] != -1: now = left[now] left[i] = now A_left.append(A[i]) left[i] = i-1 else: pass ans
= 10 ** 9 for i in range(N + 1): A = AA[:] cnt = 0 if i > 0: A[i-1] *= -2 cnt += 1 for j in r
eversed(range(i-1)): A[j] *= -2 cnt += 1 while A[j] > A[j+1]: A[j] *= 4 cnt += 2 for j in range(i+1, N): while A[j-1] > A[j]: A[j] *= 4 cnt += 2 print(i, cnt, A) ans = min(ans, cnt) print(ans) if __name__ == '__main__': main()
# AlexMunoz
905 | 1/1/17 answer = "Hello 2017!"[::-1] print(answer)
'''This module contains functions related to raw metrics. The main function is :func:`~radon.raw.analyze`, and should be the only one that is used. ''' import tokenize import operator import collections try: import StringIO as io except ImportError: # pragma: no cover import io __all__ = ['OP', 'COMMENT', 'TOKEN_NUMBER', 'NL', 'EM', 'Module', '_generate', '_less_tokens', '_find', '_logical', 'analyze'] COMMENT = tokenize.COMMENT OP = tokenize.OP NL = tokenize.NL EM = tokenize.ENDMARKER # Helper for map() TOKEN_NUMBER = operator.itemgetter(0) # A module object. It contains the following data: # loc = Lines of Code (total lines) # lloc = Logical Lines of Code # comments = Comments lines # blank = Blank lines (or whitespace-only lines) Module = collections.namedtuple('Module', ['loc', 'lloc', 'sloc', 'comments', 'multi', 'blank']) def _generate(code): '''Pass the code into `tokenize.generate_tokens` and convert the result into a list. ''' return list(tokenize.generate_tokens(io.StringIO(code).readline)) def _less_tokens(tokens, remove): '''Process the output of `tokenize.generate_tokens` removing the tokens specified in `remove`. ''' for values in tokens: if values[0] in remove: continue yield values def _find(tokens, token, value): '''Return the position of the last token with the same (token, value) pair supplied. The position is the one of the rightmost term. ''' for index, token_values in enumerate(reversed(tokens)): if (token, value) == token_values[:2]: return len(tokens) - index - 1 raise ValueError('(token, value) pair not found') def _split_tokens(tokens, token, value): '''Split a list of tokens on the specified token pair (token, value), where *token* is the token type (i.e. its code) and *value* its actual value in the code. ''' res = [[]] for token_values in tokens: if (token, value) == token_values[:2]: res.append([]) continue res[-1].append(token_values) return res def _get_all_tokens(line, lines): '''Starting from *line*, generate the necessary tokens which represent the shortest tokenization possible. This is done by catching :exc:`tokenize.TokenError` when a multi-line string or statement is encountered. ''' sloc_increment = multi_increment = 0 try: tokens = _generate(line) except tokenize.TokenError: # A multi-line string or statement has been encountered: # start adding lines and stop when tokenize stops complaining while True: sloc_increment += 1 line = '\n'.join([line, next(lines)]) try: tokens = _generate(line) except tokenize.TokenError: continue if tokens[0][0] == 3 and len(tokens) == 2: # Multi-line string detected multi_increment += line.count('\n') + 1 break return tokens, sloc_increment, multi_increment def _logical(tokens): '''Find how many logical lines are there in the current line. Normally 1 line of code is equivalent to 1 logical line of code, but there are cases when this is not true. For example:: if cond: return 0 this line actually corresponds to 2 logical lines, since it can be translated into:: if cond: return 0 Examples:: if cond: -> 1 if cond: return 0 -> 2 try: 1/0 -> 2 try: -> 1 if cond: # Only a comment -> 1 if cond: return 0 # Only a comment -> 2 ''' def aux(sub_tokens): '''The actual function which does the job.''' # Get the tokens and, in the meantime, remove comments processed = list(_less_tokens(sub_tokens, [COMMENT])) try: # Verify whether a colon is present among the tokens and that # it is the last token. token_pos = _find(processed, OP, ':') return 2 - (token_pos == len(processed) - 2) except ValueError: # The colon is not present # If the line is only composed by comments, newlines and endmarker # then it does not count as a logical line. # Otherwise it count as 1. if not list(_less_tokens(processed, [NL, EM])): return 0 return 1 return sum(aux(sub) for sub in _split_tokens(tokens, OP, ';')) def analyz
e(source): '''Analyze the source code and return a namedtuple with the following fields: * **loc**: The number of lines of code (total) * **lloc**: The number of logical lines of code * **sloc**: The number of source lines of code (not necessarily corresponding to the LLOC) *
**comments**: The number of Python comment lines * **multi**: The number of lines which represent multi-line strings * **blank**: The number of blank lines (or whitespace-only ones) The equation :math:`sloc + blanks = loc` should always hold. Multiline strings are not counted as comments, since, to the Python interpreter, they are not comments but strings. ''' loc = sloc = lloc = comments = multi = blank = 0 lines = iter(source.splitlines()) for lineno, line in enumerate(lines, 1): loc += 1 line = line.strip() if not line: blank += 1 continue # If this is not a blank line, then it counts as a # source line of code sloc += 1 try: # Process a logical line that spans on multiple lines tokens, sloc_incr, multi_incr = _get_all_tokens(line, lines) except StopIteration: raise SyntaxError('SyntaxError at line: {0}'.format(lineno)) # Update tracked metrics loc += sloc_incr # LOC and SLOC increments are the same sloc += sloc_incr multi += multi_incr # Add the comments comments += list(map(TOKEN_NUMBER, tokens)).count(COMMENT) # Process a logical line # Split it on semicolons because they increase the number of logical # lines for sub_tokens in _split_tokens(tokens, OP, ';'): lloc += _logical(sub_tokens) return Module(loc, lloc, sloc, comments, multi, blank)
import sys, os import pygame as pg import tilerender """Initialize pygame, create a clock, create the window with a surface to blit the map onto.""" pg.init() fps_clock = pg.time.Clock() main_surface = pg.display.set_mode((420, 420)) main_rect = main_surface.get_rect() """Load the tmx file from the current directory, create the tile_renderer object and load the tmx file.""" tmx_file = os.path.join(os.getcwd(), 'test.tmx') tile_renderer = tilerender.Renderer(tmx_file) """Create the map surface using the make_map() method. Used to blit onto the main_surface.""" map_surface = tile_renderer.make_map() map_rect = map_surface.get_rect() """Create a list of rects called "blockers" that the player can collide with. The getObjects() method returns a list of objects in your tile map. Each tile has properties like name, type, x, y, width, height. Double click objects in Tiled to see these properties. These properties are used to make rect objects in Pygame.""" blockers = [] tilewidth = tile_renderer.tmx_data.tilewidth for tile_object in tile_renderer.tmx_data.getObjects(): properties = tile_object.__dict__ if properties['name'] == 'blocker': x = properties['x'] y = properties['y'] width = properties['width'] height = properties['height'] new_rect = pg.Rect(x, y, width, height) blockers.append(new_rect) """ The Player class will be a player-controlled sprite that will collide with the blockers we just created. We pass in the blockers as a constructor argument so that we can assign them as an attribute. During the update method, we can refer to this attribute to detect collision. """ class Player(pg.sprite.Sprite): def __init__(self, blockers): super(Player, self).__init__() self.image = pg.Surface((22, 22)) self.image.fill((130, 100, 200)) self.rect = self.image.get_rect(x=100, y=300) self.x_vel = 0 self.y_vel = 0 self.blockers = blockers def update(self, keys): """ Set player velocity by keys, move by velocity, check for collision. It's important to check collisions for both on the x-axis and y-axis, rather than just once. """ if keys[pg.K_DOWN]: self.y_vel = 3 elif keys[pg.K_UP]: self.y_vel = -3 else: self.y_vel = 0 if keys[pg.K_LEFT]: self.x_vel = -3 elif keys[pg.K_RIGHT]: self.x_vel = 3 else: self.x_vel = 0 self.rect.x += self.x_vel for blocker in self.blockers: if self.rect.colliderect(blocker): self.rect.x -= self.x_vel self.x_vel = 0 self.rect.y += self.y_vel for blocker in self.blockers: if self.rect.colliderect(blocker): self.rect.y -= self.y_vel self.y_vel = 0 def draw(self, surface): """ Blit player image to screen. """ surface.blit(self.image, self.rect) player = Player(blockers)
"""Simple game loop that updates the player sprite, blits the map_surface onto the main surface, and blits the player sprite onto the main surface. """ def main(): while True: keys = pg.key.get_pressed() player.update(keys) main_surface.blit(map_surface, map_rect) player.draw(main_surface) for event in pg.event.get(): if event.type == pg.QUIT: pg.quit() sys.exit()
pg.display.update() fps_clock.tick(60) if __name__ == "__main__": main()
from datetime import datetime from django.core.exceptions import ValidationError from django.db import models def validate_answer_to_universe(value): if value != 42: raise ValidationError('This is not the answer to life, universe and everything!', code='not42') class ModelToValidate(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField(default=datetime.now) number = models.IntegerField(db_column='number_val') parent = models.ForeignKey( 'self', models.SET_NULL, blank=True, null=True, limit_choices_to={'number': 10}, ) email = models.EmailField(blank=True) ufm = models.ForeignKey( 'UniqueFieldsModel', models.SET_NULL, to_field='unique_charfield', blank=True, null=True, ) url = models.URLField(blank=True) f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe]) f_with_iterable_of_validators = models.IntegerField(blank=True, null=True, validators=(validate_answer_to_universe,)) slug = models.SlugField(blank=True) def clean(self): super(ModelToValidate, self).clean() if self.number == 11: raise ValidationError('Invalid number supplied!') class UniqueFieldsModel(models.Model): unique_charfield = models.CharField(max_length=100, unique=True) unique_integerfield = models.IntegerField(unique=True) non_unique_field = models.IntegerField() class CustomPKModel(models.Model): my_pk_field = models.CharField(max_length=100, primary_key=True) class UniqueTogetherModel(models.Model): cfield = models.CharField(max_length=100) ifield = models.IntegerField() efield = models.EmailField() class Meta: unique_together = (('ifield', 'cfield',), ['ifield', 'efield']) class UniqueForDateModel(models.Model): start_date = models.DateField() end_date = models.DateTimeField() count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date") order = models.IntegerField(unique_for_month="end_date") name = models.CharField(max_length=100) class CustomMessagesModel(models.Model): other = models.IntegerField(blank=True, null=True) number = models.IntegerField( db_column='number_val', error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'}, validators=[validate_answer_to_universe] ) class Author(models.Model): name = models.CharField(max_length=100) class Article(models.Model): title = models.CharField(max_length=100) author = models.ForeignKey(Author, models.CASCADE) pub_date = models.DateTimeField(blank=True) def clean(self): if self.pub_date is None: self.pub_date = datetime.now() class Post(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField() def __str__(self): return self.name class FlexibleDatePost(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField(blank=True, null=True) class UniqueErrorsModel(models.Model): name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'}) no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'}) class GenericIPAddressTestModel(models.Model): generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True) v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4") v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6") ip_verbose_name = models.GenericIPAddressField("IP Address Verbose", blank=True, null=True)
class GenericIPAddrUnpackUniqueTest(models.Model): generic_v4unpack_ip = models.GenericIPAddressField(null=True, blank=True, unique=True, unpack_ipv4=True) # A model
can't have multiple AutoFields # Refs #12467. assertion_error = None try: class MultipleAutoFields(models.Model): auto1 = models.AutoField(primary_key=True) auto2 = models.AutoField(primary_key=True) except AssertionError as exc: assertion_error = exc assert str(assertion_error) == "A model can't have more than one AutoField."
#------------------------------------------------------------------------------ # Name: pychrono example # Purpose: # # Author: Alessandro Tasora # # Created: 1/01/2019 # Copyright: (c) ProjectChrono 2019 #------------------------------------------------------------------------------ print ("First tutorial for PyChrono: vectors, matrices etc."); # Load the Chrono::Engine core module! import pychrono as chrono try: import numpy as np from numpy import linalg as LA except ImportError: print("You need NumPyto run this demo!") # Test logging chrono.GetLog().Bar() chrono.GetLog() << "result is: " << 11+1.5 << "\n" chrono.GetLog().Bar() # Test vectors my_vect1 = chrono.ChVectorD() my_vect1.x=5 my_vect1.y=2 my_vect1.z=3 my_vect2 = chrono.ChVectorD(3,4,5) my_vect4 = my_vect1*10 + my_vect2 my_len = my_vect4.Length() print ('vect sum =', my_vect1 + my_vect2) print ('vect cross =', my_vect1 % my_vect2) print ('vect dot =', my_vect1 ^ my_vect2) # Test quaternions my_quat = chrono.ChQuaternionD(1,2,3,4) my_qconjugate = ~my_quat print ('quat. conjugate =', my_qconjugate) print ('quat. dot product=', my_qconjugate ^ my_quat) print ('quat. product=', my_qconjugate % my_quat) # Test matrices and NumPy interoperability mlist = [[1,2,3,4], [5,6,7,8], [9,10,11,12], [13,14,15,16]] ma = chrono.ChMatrixDynamicD() ma.SetMatr(mlist) # Create a Matrix from a list. Size is adjusted automatically. npmat = np.asarray(ma.GetMatr()) # Create a 2D npy array from the list extracted from ChMatrixDyn
amic w, v = LA.eig(npmat) # ge
t eigenvalues and eigenvectors using numpy mb = chrono.ChMatrixDynamicD(4,4) prod = v * npmat # you can perform linear algebra operations with numpy and then feed results into a ChMatrixDynamicD using SetMatr mb.SetMatr(v.tolist()) # create a ChMatrixDynamicD from the numpy eigenvectors mr = chrono.ChMatrix33D() mr.SetMatr([[1,2,3], [4,5,6], [7,8,9]]) print (mr*my_vect1); # Test frames - # create a frame representing a translation and a rotation # of 20 degrees on X axis my_frame = chrono.ChFrameD(my_vect2, chrono.Q_from_AngAxis(20*chrono.CH_C_DEG_TO_RAD, chrono.ChVectorD(1,0,0))) my_vect5 = my_vect1 >> my_frame # Print the class hierarchy of a chrono class import inspect inspect.getmro(chrono.ChStreamOutAsciiFile) # Use the ChFunction classes my_funct = chrono.ChFunction_Sine(0,0.5,3) print ('function f(0.2)=', my_funct.Get_y(0.2) ) # Inherit from the ChFunction, from the Python side, # (do not forget the __init__ constructor) class MySquareFunct (chrono.ChFunction): def __init__(self): chrono.ChFunction.__init__(self) def Get_y(self,x): return x*x my_funct2 = MySquareFunct() print ('function f(2) =', my_funct2.Get_y(3) ) print ('function df/dx=', my_funct2.Get_y_dx(3) )
onst import ( ATTR_CURRENT_ENERGY_KWH, ATTR_CURRENT_POWER_W, CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN, VERA_ID_FORMAT, ) _LOGGER = logging.getLogger(__name__) VERA_ID_LIST_SCHEMA = vol.Schema([int]) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CONTROLLER): cv.url, vol.Optional(CONF_EXCLUDE, default=[]): VERA_ID_LIST_SCHEMA, vol.Optional(CONF_LIGHTS, default=[]): VERA_ID_LIST_SCHEMA, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: """Set up for Vera controllers.""" hass.data[DOMAIN] = {} config = base_config.get(DOMAIN) if not config: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config, ) ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Do setup of vera.""" # Use options entered during initial config flow or provided from configuration.yml if entry.data.get(CONF_LIGHTS) or entry.data.get(CONF_EXCLUDE): hass.config_entries.async_update_entry( entry=entry, data=entry.data, options=new_options( entry.data.get(CONF_LIGHTS, []), entry.data.get(CONF_EXCLUDE, []), ), ) saved_light_ids = entry.options.get(CONF_LIGHTS, []) saved_exclude_ids = entry.options.get(CONF_EXCLUDE, []) base_url = entry.data[CONF_CONTROLLER] light_ids = fix_device_id_list(saved_light_ids) exclude_ids = fix_device_id_list(saved_exclude_ids) # If the ids were corrected. Update the config entry. if light_ids != saved_light_ids or exclude_ids != saved_exclude_ids: hass.config_entries.async_update_entry( entry=entry, options=new_options(light_ids, exclude_ids) ) # Initialize the Vera controller. subscription_registry = SubscriptionRegistry(hass) controller = veraApi.VeraController(base_url, subscription_registry) try: all_devices = await hass.async_add_executor_job(controller.get_devices) all_scenes = await hass.async_add_executor_job(controller.get_scenes) except RequestException as exception: # There was a network related error connecting to the Vera controller. _LOGGER.exception("Error communicating with Vera API") raise ConfigEntryNotReady from exception # Exclude devices unwanted by user. devices = [device for device in all_devices if device.device_id not in exclude_ids] vera_devices = defaultdict(list) for device in devices: device_type = map_vera_device(device, light_ids) if device_type is not None: vera_devices[device_type].append(device) vera_scenes = [] for scene in all_scenes: vera_scenes.append(scene) controller_data = ControllerData( controller=controller, devices=vera_devices, scenes=vera_scenes, config_entry=entry, ) set_controller_data(hass, entry, controller_data) # Forward the config data to the necessary platforms. for platform in get_configured_platforms(controller_data): hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) def stop_subscription(event): """Stop SubscriptionRegistry updates.""" controller.stop() await hass.async_add_executor_job(controller.start) entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_subscription) ) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) return True async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload Withings config entry.""" controller_data: ControllerData = get_controller_data(hass, config_entry) tasks = [ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in get_configured_platforms(controller_data) ] tasks.append(hass.async_add_executor_job(controller_data.controller.stop)) await asyncio.gather(*tasks) return True async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) def map_vera_device(vera_device: veraApi.VeraDevice, remap: list[int]) -> str: """Map vera classes to Home Assistant types.""" type_map = { veraApi.VeraDimmer: "light", veraApi.VeraBinarySensor: "binary_sensor", veraApi.VeraSensor: "sensor", veraApi.VeraArmableDevice: "switch", veraApi.VeraLock: "lock", veraApi.VeraThermostat: "climate", veraApi.VeraCurtain: "cover", veraA
pi.VeraSceneContr
oller: "sensor", veraApi.VeraSwitch: "switch", } def map_special_case(instance_class: type, entity_type: str) -> str: if instance_class is veraApi.VeraSwitch and vera_device.device_id in remap: return "light" return entity_type return next( iter( map_special_case(instance_class, entity_type) for instance_class, entity_type in type_map.items() if isinstance(vera_device, instance_class) ), None, ) DeviceType = TypeVar("DeviceType", bound=veraApi.VeraDevice) class VeraDevice(Generic[DeviceType], Entity): """Representation of a Vera device entity.""" def __init__( self, vera_device: DeviceType, controller_data: ControllerData ) -> None: """Initialize the device.""" self.vera_device = vera_device self.controller = controller_data.controller self._name = self.vera_device.name # Append device id to prevent name clashes in HA. self.vera_id = VERA_ID_FORMAT.format( slugify(vera_device.name), vera_device.vera_device_id ) if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): self._unique_id = str(self.vera_device.vera_device_id) else: self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" async def async_added_to_hass(self) -> None: """Subscribe to updates.""" self.controller.register(self.vera_device, self._update_callback) def _update_callback(self, _device: DeviceType) -> None: """Update the state.""" self.schedule_update_ha_state(True) def update(self): """Force a refresh from the device if the device is unavailable.""" refresh_needed = self.vera_device.should_poll or not self.available _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) if refresh_needed: self.vera_device.refresh() @property def name(self) -> str: """Return the name of the device.""" return self._name @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the device.""" attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level if self.vera_device.is_armable: armed = self.vera_device.is_armed attr[ATTR_ARMED] = "True" if armed else "False" if self.vera_device.is_trippable: last_tripped = self.vera_device.last_trip if last_tripped is not None: utc_time = utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.is_tripped attr[ATTR_TRIPPED] = "True" if tripped else "False" power = self.vera_device.power if power: attr[ATTR_CURRENT_POWER_W] = convert(power, float, 0.0) energy = self.vera_de
'PriceClass', 'Enabled' ] distribution_list = { list_name: [] } distributions = self.list_streaming_distributions(False) if streaming else self.list_distributions(False) for dist in distributions: temp_distribution = {} for key_name in key_list: temp_distribution[key_name] = dist[key_name] temp_distribution['Aliases'] = [alias for alias in dist['Aliases'].get('Items', [])] temp_distribution['ETag'] = self.get_etag_from_distribution_id(dist['Id'], streaming) if not streaming: temp_distribution['WebACLId'] = dist['WebACLId'] invalidation_ids = self.get_list_of_invalidation_ids_from_distribution_id(dist['Id']) if invalidation_ids: temp_distribution['Invalidations'] = invalidation_ids resource_tags = self.client.list_tags_for_resource(Resource=dist['ARN']) temp_distribution['Tags'] = boto3_tag_list_to_ansible_dict(resource_tags['Tags'].get('Items', [])) distribution_list[list_name].append(temp_distribution) return distribution_list except botocore.exceptions.ClientError as e: self.module.fail_json(msg="Error generating summary of distributions - " + str(e), exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response)) except Exception as e: self.module.fail_json(msg="Error generating summary of distributions - " + str(e), exception=traceback.format_exc()) def get_etag_from_distribution_id(self, distribution_id, streaming): distribution = {} if not streaming: distribution = self.get_distribution(distribution_id) else: distribution = self.get_streaming_distribution(distribution_id) return distribution['ETag'] def get_list_of_invalidation_ids_from_distribution_id(self, distribution_id): try: invalidation_ids = [] invalidations = self.list_invalidations(distribution_id) for invalidation in invalidations: invalidation_ids.append(invalidation['Id']) return invalidation_ids except botocore.exceptions.ClientError as e: self.module.fail_json(msg="Error getting list of invalidation ids - " + str(e), exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response)) def get_distribution_id_from_domain_name(self, domain_name): try: distribution_id = "" distributions = self.list_distributions(False) distributions += self.list_streaming_distributions(False) for dist in distributions: if 'Items' in dist['Aliases']: for alias in dist['Aliases']['Items']: if str(alias).lower() == domain_name.lower(): distribution_id = dist['Id'] break return distribution_id except botocore.exceptions.ClientError as e: self.module.fail_json(msg="Error getting distribution id from domain name - " + str(e), exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.resp
onse)) def get_aliases_from_distribution_id(sel
f, distribution_id): aliases = [] try: distributions = self.list_distributions(False) for dist in distributions: if dist['Id'] == distribution_id and 'Items' in dist['Aliases']: for alias in dist['Aliases']['Items']: aliases.append(alias) break return aliases except botocore.exceptions.ClientError as e: self.module.fail_json(msg="Error getting list of aliases from distribution_id - " + str(e), exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response)) def paginated_response(self, func, result_key=""): ''' Returns expanded response for paginated operations. The 'result_key' is used to define the concatenated results that are combined from each paginated response. ''' args = dict() results = dict() loop = True while loop: response = func(**args) if result_key == "": result = response result.pop('ResponseMetadata', None) else: result = response.get(result_key) results.update(result) args['NextToken'] = response.get('NextToken') loop = args['NextToken'] is not None return results def keyed_list_helper(self, list_to_key): keyed_list = dict() for item in list_to_key: distribution_id = item['Id'] if 'Items' in item['Aliases']: aliases = item['Aliases']['Items'] for alias in aliases: keyed_list.update( { alias: item } ) keyed_list.update( { distribution_id: item } ) return keyed_list def set_facts_for_distribution_id_and_alias(details, facts, distribution_id, aliases): facts[distribution_id].update(details) for alias in aliases: facts[alias].update(details) return facts def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( distribution_id=dict(required=False, type='str'), invalidation_id=dict(required=False, type='str'), origin_access_identity_id=dict(required=False, type='str'), domain_name_alias=dict(required=False, type='str'), all_lists=dict(required=False, default=False, type='bool'), distribution=dict(required=False, default=False, type='bool'), distribution_config=dict(required=False, default=False, type='bool'), origin_access_identity=dict(required=False, default=False, type='bool'), origin_access_identity_config=dict(required=False, default=False, type='bool'), invalidation=dict(required=False, default=False, type='bool'), streaming_distribution=dict(required=False, default=False, type='bool'), streaming_distribution_config=dict(required=False, default=False, type='bool'), list_origin_access_identities=dict(required=False, default=False, type='bool'), list_distributions=dict(required=False, default=False, type='bool'), list_distributions_by_web_acl_id=dict(required=False, default=False, type='bool'), list_invalidations=dict(required=False, default=False, type='bool'), list_streaming_distributions=dict(required=False, default=False, type='bool'), summary=dict(required=False, default=False, type='bool') )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) if not HAS_BOTO3: module.fail_json(msg='boto3 is required.') service_mgr = CloudFrontServiceManager(module) distribution_id = module.params.get('distribution_id') invalidation_id = module.params.get('invalidation_id') origin_access_identity_id = module.params.get('origin_access_identity_id') web_acl_id = module.params.get('web_acl_id') domain_name_alias = module.params.get('domain_name_alias') all_lists = module.params.get('all_lists') distribution = module.params.get('distribution') distribution_config = module.params.get('distribution_config') origin_access_identity = module.params.get('origin_access_identity') origin_access_identity_config = module.params.get('origin_access_identity_config') invalidation = module.params.get('invalidation') streaming_distribution = module.params.get('streaming_distribution') streaming_distribution_config = module.params.get('streaming_distribution_config') list_origin_access_identities = modul
from __future__ import unicode_literals from moto.core.responses import BaseResponse from .models import kinesisvideoarchivedmedia_backends import json class KinesisVideoArchivedMediaResponse(BaseResponse): SERVICE_NAME = "kinesis-video-archived-media" @property def kinesisvideoarchivedmedia_backend(self): return kinesisvideoarchivedmedia_backends[self.region] def get_hls_streaming_session_url(self): stream_name = self._get_param("StreamName") stream_arn = self._get_param("StreamARN") playback_mode = self._get_param("PlaybackMode") hls_fragment_selector = self._get_param("HLSFragmentSelector") container_format = self._get_param("ContainerFormat") discontinuity_mode = self._get_param("DiscontinuityMode") display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp") expires = self._get_int_param("Expires") max_media_playlist_fragment_results = self._get_param( "MaxMediaPlaylistFragmentResults" ) hls_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_hls_streaming_session_url( stream_name=stream_name, stream_arn=stream_arn, playback_mode=playback_mode, hls_fragment_selector=hls_fragment_selector, container_format=container_format, discontinuity_mode=discontinuity_mode, display_fragment_timestamp=display_fragment_timestamp, expires=expires, max_media_playlist_fragment_r
esults=max_media_playlist_fragment_results, ) return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url)) def get_dash_streaming_session_url(self): stream_name = self._get_param("StreamName") stream_arn = self._get_param("StreamARN") playback_mode = self._get_param("PlaybackMode") display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
display_fragment_number = self._get_param("DisplayFragmentNumber") dash_fragment_selector = self._get_param("DASHFragmentSelector") expires = self._get_int_param("Expires") max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults") dash_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url( stream_name=stream_name, stream_arn=stream_arn, playback_mode=playback_mode, display_fragment_timestamp=display_fragment_timestamp, display_fragment_number=display_fragment_number, dash_fragment_selector=dash_fragment_selector, expires=expires, max_manifest_fragment_results=max_manifest_fragment_results, ) return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url)) def get_clip(self): stream_name = self._get_param("StreamName") stream_arn = self._get_param("StreamARN") clip_fragment_selector = self._get_param("ClipFragmentSelector") content_type, payload = self.kinesisvideoarchivedmedia_backend.get_clip( stream_name=stream_name, stream_arn=stream_arn, clip_fragment_selector=clip_fragment_selector, ) new_headers = {"Content-Type": content_type} return payload, new_headers
class TNumberDataLines(object): """ Adapter used in TTableClipboard Requires: getNumberDataRows or GetNumberRows getNumberDataCols or GetNumberCols Provides: _getNumberDataRows _getNumberDataCols _getBaseDataRow _getBaseDataCol """ def _getBaseDataRow(self): if hasattr(self, 'getBaseDataRow'): return self.getBaseDataRow() else: retur
n 0 def _getBaseDataCol(self): if hasattr(self, 'getBaseDataCol'): return self.getB
aseDataCol() else: return 0 def _getNumberDataRows(self): if hasattr(self, 'getNumberDataRows'): return self.getNumberDataRows() else: return self.GetNumberRows() - self._getBaseDataRow() def _getNumberDataCols(self): if hasattr(self, 'getNumberDataCols'): return self.getNumberDataCols() else: return self.GetNumberCols() - self._getBaseDataCol()
#!/usr/bin/env python # Copyright 2016 Criteo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Downsampling helpers for drivers that do not implement it server-side.""" from __future__ import absolute_import from __future__ import print_function import collections import logging import time log = logging.getLogger(__name__) class DelayedWriter(object): """Delay writes.""" DEFAULT_PERIOD_MS = 600000 def __init__(self, accessor, period_ms=DEFAULT_PERIOD_MS): """Create a DelayedWriter. The delayed writer will separate high resolution points and low resolution points and will write the low resolution ones every `period_ms` milliseconds. For these points the value for a given timestamp is frequently updated and we can safely delay the writes. In case of an unclean shutdown we might loose up to `period_ms` points of data. Args: accessor: a connected accessor. period_ms: delay before writing low resolution points. """ self.accessor = accessor self.period_ms = period_ms self._queue = [] self._metrics_per_ms = 0 self._last_write_ms = 0 self._points = collections.defaultdict(dict) def clear(self): """Reset internal structures.""" self._queue = [] self._points.clear() def feed(self, metric, datapoints): """Feed the delayed writer. This function will seperate datapoints based on their resolutions and keep the low resolution points for later. Args: metric: the metric associated with these points. datapoints: downsampled datapoints. Returns: list(datapoints) list of high resolution points that should get written now. """ high_res, low_res = [], [] for datapoint in datapoints: _, _, _, stage = datapoint # In case of unclean shutdown we could loose up to # 25% of the data. We also allow a lag of up to 1/4th of # a period. stage0 are never delayed. if stage.stage0 or stage.precision_ms < (self.period_ms * 4): high_res.append(datapoint) else: low_res.append(datapoint) self.write_later(metric, low_res) # We piggy back on feed() to write delayed points, this works # as long as we receive points regularly. We might want to add # a timer at some point. self.write_some() return high_res def flush(self): """Flush all buffered points.""" self._build_queue() while self._queue: self.write_some(flush=True) def size(self): """Number of queued metrics.""" return len(self._points) def write_later(self, metric, datapoints): """Queue points for later.""" for datapoint in datapoints: timestamp, value, count, stage = datapoint self._points[metric][(stage, timestamp)] = (value, count) self._build_queue() def _build_queue(self): """Build the queue of metrics to write.""" if len(self._queue) > 0: return # Order by number of points. self._queue = sorted(self._points.keys(), key=lambda k: len(self._points[k])) # We know that we have up to `period_ms` to write everything # so let's write only a few metrics per iteration. self._metrics_per_ms = float(len(self._queue
)) / self.period_ms log.debug( "rebuilt the queues: %d metrics, %d per second", len(self._queue), self._metrics_per_ms, ) def write_some(self
, flush=False, now=time.time): """Write some points from the queue.""" now = now() * 1000 # convert to ms. if self._last_write_ms == 0: self._last_write_ms = now delta_ms = (now - self._last_write_ms) + 1 if flush: metrics_to_write = len(self._queue) else: metrics_to_write = round(delta_ms * self._metrics_per_ms) if metrics_to_write == 0: return i = 0 log.debug("writing low res points for %d metrics" % metrics_to_write) while self._queue and i < metrics_to_write: metric = self._queue.pop() datapoints = [] # collect the points to write them. for k, v in self._points[metric].items(): stage, timestamp = k value, count = v i += 1 datapoints.append((timestamp, value, count, stage)) self.accessor.insert_downsampled_points_async(metric, datapoints) # remove the points that have been written del self._points[metric] self._last_write_ms = now
""" A fake time series data server used during development to serve up JSON for live updating plot rendering in the browser. The test datafile has 25000 points and cycles through this entire series every 60 minutes. Examples -------- >>> python fake_dataserver.py >>> import requests; requests.get('http://127.0.0.1:5000/ts/10').content """ from datetime import datetime, timedelta from flask import Flask, jsonify from flask import make_response, request, current_app from functools import update_wrapper app = Flask(__name__) points_per_hour = 6000 def crossdomain(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True): if methods is not None: methods = ', '.join(sorted(x.upper() for x in methods)) if headers is not None and not isinstance(headers, basestring): headers = ', '.join(x.upper() for x in headers) if not isinstance(origin, basestring): origin = ', '.join(origin) if isinstance(max_age, timedelta): max_age = max_age.total_seconds() def get_methods(): if methods is not None: return methods options_resp = current_app.make_default_options_response() return options_resp.headers['allow'] def decorator(f): def wrapped_function(*args, **kwargs): if automatic_options and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: resp = make_response(f(*args, **kwargs)) if not attach_to_all and request.method != 'OPTIONS': return resp h = resp.headers h['Access-Control-Allow-Origin'] = origin h['Access-Control-Allow-Methods'] = get_methods() h['Access-Control-Max-Age'] = str(max_age) if headers is not None: h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False return update_wrapper(wrapped_function, f) return decorator # Open local datafile (omit the standard error column) with open ("time_val_error_data.dat","r") as datafile: lines = datafile.readlines()[:points_per_hour] (t, v) = zip(*[(float(l.split()[0]), float(l.split()[1])) for l in lines]) @app.route("/ts/", defaults={'points': 100}) @app.route("/ts/<int:points>", methods=["GET"]) @crossdomain(origin='*') def get_data(points): now = datetime.now() fraction=int(len(t)*(now.minute+(now.second/60.0))/60.0) ts = {'time': range(points), 'value': v[fraction:fraction+points]} #ts = {'time': t[fraction:fraction+points], # 'value': v[fraction:fraction+points]} return jsonify({'ts': ts}) if __name__ == "__main__": app.run(host='0.0.0.0', port=5000)
BlockFlag = 2 else: if BlockFlag == 1 or BlockFlag == 2: BlockFlag = 3 # # An item have Usage or Notify information and the first time get this information # elif BlockFlag == -1: BlockFlag = 4 # # Combine two comment line if they are generic comment # if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED: CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText # # Store this information for next line may still need combine operation. # PreHelpText = CommentItemHelpText if BlockFlag == 4: CommentItemIns = InfPpiItemCommentContent() CommentItemIns.SetUsage(CommentItemUsage) CommentItemIns.SetNotify(CommentItemNotify) CommentItemIns.SetHelpStringItem(CommentItemHelpText) CommentInsList.append(CommentItemIns) BlockFlag = -1 PreUsage = None PreNotify = None PreHelpText = '' elif BlockFlag == 3: # # Add previous help string # CommentItemIns = InfPpiItemCommentContent() CommentItemIns.SetUsage(DT.ITEM_UNDEFINED) CommentItemIns.SetNotify(DT.ITEM_UNDEFINED) if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE): PreHelpText += DT.END_OF_LINE CommentItemIns.SetHelpStringItem(PreHelpText) CommentInsList.append(CommentItemIns) # # Add Current help string # CommentItemIns = InfPpiItemCommentContent() CommentItemIns.SetUsage(CommentItemUsage) CommentItemIns.SetNotify(CommentItemNotify) CommentItemIns.SetHelpStringItem(CommentItemHelpText) CommentInsList.append(CommentItemIns) BlockFlag = -1 PreUsage = None PreNotify = None PreHelpText = '' else: PreUsage = CommentItemUsage PreNotify = CommentItemNotify PreHelpText = CommentItemHelpText InfPpiItemObj.SetCommentList(CommentInsList) return InfPpiItemObj class InfPpiItemCommentContent(): def __init__(self): # # ## SOMETIMES_CONSUMES ## HelpString # self.UsageItem = '' # # Help String # self.HelpStringItem = '' self.Notify = '' self.CommentList = [] def SetUsage(self, UsageItem): self.UsageItem = UsageItem def GetUsage(self): return self.UsageItem def SetNotify(self, Notify): if Notify != DT.ITEM_UNDEFINED: self.Notify = 'true' def GetNotify(self): return self.Notify def SetHelpStringItem(self, HelpStringItem): self.HelpStringItem = HelpStringItem def GetHelpStringItem(self): return self.HelpStringItem class InfPpiItem(): def __init__(self): self.Name = '' self.FeatureFlagExp = '' self.SupArchList = [] self.CommentList = [] def SetName(self, Name): self.Name = Name def GetName(self): return self.Name def SetSupArchList(self, SupArchList): self.SupArchList = SupArchList def GetSupArchList(self): return self.SupArchList def SetCommentList(self, CommentList): self.CommentList
= CommentList def GetCommentList(self): return self.CommentList def SetFeatureFlagExp(self, FeatureFlagExp): self.FeatureFlagExp = FeatureFlagExp def GetFeatureFlagExp(self): return self.FeatureFlagExp ## # # # class InfPpiObject(): def __init__(self): self.P
pis = Sdict() # # Macro defined in this section should be only used in this section. # self.Macros = {} def SetPpi(self, PpiList, Arch = None): __SupArchList = [] for ArchItem in Arch: # # Validate Arch # if (ArchItem == '' or ArchItem == None): ArchItem = 'COMMON' __SupArchList.append(ArchItem) for Item in PpiList: # # Get Comment content of this protocol # CommentsList = None if len(Item) == 3: CommentsList = Item[1] CurrentLineOfItem = Item[2] Item = Item[0] InfPpiItemObj = InfPpiItem() if len(Item) >= 1 and len(Item) <= 2: # # Only CName contained # if not IsValidCVariableName(Item[0]): Logger.Error("InfParser", ToolError.FORMAT_INVALID, ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]), File=CurrentLineOfItem[2], Line=CurrentLineOfItem[1], ExtraData=CurrentLineOfItem[0]) if (Item[0] != ''): InfPpiItemObj.SetName(Item[0]) else: Logger.Error("InfParser", ToolError.FORMAT_INVALID, ST.ERR_INF_PARSER_CNAME_MISSING, File=CurrentLineOfItem[2], Line=CurrentLineOfItem[1], ExtraData=CurrentLineOfItem[0]) # # Have FeatureFlag information # if len(Item) == 2: # # Contained CName and Feature Flag Express # <statements> ::= <CName> ["|" <FeatureFlagExpress>] # Item[1] should not be empty # if Item[1].strip() == '': Logger.Error("InfParser", ToolError.FORMAT_INVALID, ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING, File=CurrentLineOfItem[2], Line=CurrentLineOfItem[1], ExtraData=CurrentLineOfItem[0]) # # Validate Feature Flag Express for PPI entry # Item[1] contain FFE information # FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip()) if not FeatureFlagRtv[0]: Logger.Error("InfParser", ToolError.FORMAT_INVALID, ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]), File=CurrentLineOfItem[2], Line=CurrentLineOfItem[1], ExtraData=CurrentLineOfItem[0]) InfPpiItemObj.SetFeatureFlagExp(Item[1]) if len(Item) != 1 and len(Item) != 2: # # Invalid format of Ppi statement # Logger.Error("InfParser", ToolError.FORMAT_INVALID, ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR, File=CurrentLineOfItem[2], Line=CurrentLineOfItem[1], ExtraData=CurrentLineOfItem[0]) # # Get/Set Usage and HelpString for PPI entry # if CommentsList != None and len(CommentsList) != 0: InfPpiItemObj = ParsePpiComment(CommentsList, InfPpiItemObj) else: CommentItemIns = InfPpiIte
# # Copyright 2015-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # from __future__ import absolute_import import contextlib import os import uuid import xml.etree.ElementTree as ET import convirt import convirt.config import convirt.config.environ import convirt.xmlfile from . import testlib class XMLFileTests(testlib.TestCase): def setUp(self): self.vm_uuid = str(uuid.uuid4()) @contextlib.contextmanager def test_env(self): with testlib.named_temp_dir() as tmp_dir: with testlib.global_conf(run_dir=tmp_dir): yield convirt.xmlfile.XMLFile( self.vm_uuid, convirt.config.environ.current() ) def test_fails_without_conf(self): self.assertRaises(convirt.xmlfile.UnconfiguredXML, convirt.xmlfile.XMLFile, self.vm
_uuid, None) def test_path(self): with self.test_env() as xf: self.assertTrue(xf.path.endswith('xml'))
self.assertIn(self.vm_uuid, xf.path) def test_save(self): root = ET.fromstring(testlib.minimal_dom_xml()) with self.test_env() as xf: conf = convirt.config.environ.current() self.assertEquals(os.listdir(conf.run_dir), []) self.assertNotRaises(xf.save, root) self.assertTrue(len(os.listdir(conf.run_dir)), 1) def test_load(self): xml_data = testlib.minimal_dom_xml() root = ET.fromstring(xml_data) with self.test_env() as xf: xf.save(root) new_root = xf.load() xml_copy = convirt.xmlfile.XMLFile.encode(new_root) # FIXME: nasty trick to tidy up the XML xml_ref = convirt.xmlfile.XMLFile.encode(root) self.assertEquals(xml_ref, xml_copy) def test_clear(self): xml_data = testlib.minimal_dom_xml() root = ET.fromstring(xml_data) with self.test_env() as xf: xf.save(root) conf = convirt.config.environ.current() self.assertTrue(len(os.listdir(conf.run_dir)), 1) self.assertNotRaises(xf.clear) self.assertEquals(os.listdir(conf.run_dir), [])
ype', 'flags', 'ttl', 'grace', 'cache_key')) CACHE_FETCH_ERR = 'Unable to fetch option cache for %s' CACHE_UPDATE_ERR = 'Unable to update option cache for %s' logger = logging.getLogger('sentry') def _make_cache_key(key): return 'o:%s' % md5_text(key).hexdigest() def _make_cache_value(key, value): now = int(time()) return ( value, now + key.ttl, now + key.ttl + key.grace, ) class OptionsStore(object): """ Abstraction for the Option storage logic that should be driven by the OptionsManager. OptionsStore is gooey and raw. It provides no protection over what goes into the store. It only knows that it's reading/writing to the right place. If using the OptionsStore directly, it's your job to do validation of the data. You should probably go through OptionsManager instead, unless you need raw access to something. """ def __init__(self, cache=None, ttl=None): self.cache = cache self.ttl = ttl self.flush_local_cache() @cached_property def model(self): from sentry.models.option import Option return Option def make_key(self, name, default, type, flags, ttl, grace): return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name)) def get(self, key, silent=False): """ Fetches a value from the options store. """ result = self.get_cache(key, silent=silent) if result is not None: return result result = self.get_store(key, silent=silent) if result is not None: return result # As a last ditch effort, let's hope we have a key # in local cache that's possibly stale return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False): """ First check agaist our local in-process cache, falling back to the network cache. """ value = self.get_local_cache(key) if value is not None:
return value if self.cache is None: return None cache_key = key.cache_key try: value = self.cache.get(cache_key) except Exception: if not silent: logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True) value = None else: if key.ttl > 0: self._local_cache[cache_key] = _make_cache_value(key, value) return value def get_local_cache(self, key, force_grace=False): """ Attempt to fetch a key out of the local cache. If the key exists, but is beyond expiration, we only return it if grace=True. This forces the key to be returned in a disaster scenario as long as we're still holding onto it. This allows the OptionStore to pave over potential network hiccups by returning a stale value. """ try: value, expires, grace = self._local_cache[key.cache_key] except KeyError: return None now = int(time()) # Key is within normal expiry window, so just return it if now < expires: return value # If we're able to accept within grace window, return it if force_grace and now < grace: return value # Let's clean up values if we're beyond grace. if now > grace: try: del self._local_cache[key.cache_key] except KeyError: # This could only exist in a race condition # where another thread has already deleted this key, # but we'll guard ourselves against it Justin Case. # In this case, it's also possible that another thread # has updated the value at this key, causing us to evict # it prematurely. This isn't ideal, but not terrible # since I don't want to introduce locking to prevent this. # Even if it did happen, the consequence is just another # network hop. pass # If we're outside the grace window, even if we ask for it # in grace, too bad. The value is considered bad. return None def get_store(self, key, silent=False): """ Attempt to fetch value from the database. If successful, also set it back in the cache. Returns None in both cases, if the key doesn't actually exist, or if we errored fetching it. NOTE: This behavior should probably be improved to differentiate between a miss vs error, but not worth it now since the value is limited at the moment. """ try: value = self.model.objects.get(key=key.name).value except self.model.DoesNotExist: value = None except Exception as e: if not silent: logger.exception(six.text_type(e)) value = None else: # we only attempt to populate the cache if we were previously # able to successfully talk to the backend # NOTE: There is definitely a race condition here between updating # the store and the cache try: self.set_cache(key, value) except Exception: if not silent: logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True) return value def set(self, key, value): """ Store a value in the option store. Value must get persisted to database first, then attempt caches. If it fails database, the entire operation blows up. If cache fails, we ignore silently since it'll get repaired later by sync_options. A boolean is returned to indicate if the network cache was set successfully. """ assert self.cache is not None, 'cache must be configured before mutating options' self.set_store(key, value) return self.set_cache(key, value) def set_store(self, key, value): create_or_update( model=self.model, key=key.name, values={ 'value': value, 'last_updated': timezone.now(), } ) def set_cache(self, key, value): if self.cache is None: return None cache_key = key.cache_key if key.ttl > 0: self._local_cache[cache_key] = _make_cache_value(key, value) try: self.cache.set(cache_key, value, self.ttl) return True except Exception: logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True) return False def delete(self, key): """ Remove key out of option stores. This operation must succeed on the database first. If database fails, an exception is raised. If database succeeds, caches are then allowed to fail silently. A boolean is returned to indicate if the network deletion succeeds. """ assert self.cache is not None, 'cache must be configured before mutating options' self.delete_store(key) return self.delete_cache(key) def delete_store(self, key): self.model.objects.filter(key=key.name).delete() def delete_cache(self, key): cache_key = key.cache_key try: del self._local_cache[cache_key] except KeyError: pass try: self.cache.delete(cache_key) return True except Exception: logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True) return False def clean_local_cache(self): """ Iterate over our local cache items, and remove the keys that are beyond their grace time. """ to_expire = [] now = int(time()) try: for k, (_, _, grace) in six.iteritems(self._local_cache): if now > grace: to_expire.append(k) except RuntimeError: # It's possible for the dictionary to be mutated in an
#!/usr/bin/env python3 """pgm2jack Convert a P1 pbm (monochome ascii, a.k.a. "Plain PBM") file to a jack array (nand2tetris). One word is 16bits in jack, so image width must be a multiple! Copyright 2013-2016 Gavin Stewart. You are required to give attribution to the author (Gavin Stewart) for any use of this program (GPLv3 Section 7b). Trying to pass off my code as your own in your Elements of Computing classes will result in a cursed life of forever buggy software. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import sys import os import re import math class Parser (): """P1 PBM parser.""" def __init__ (self, filename): if not os.path.exists(filename): sys.exit('Parser Error: Input file not found: %s' % filename) self._fh = open(filename, mode='r') self._currentWord = 0 self._currentBit = 0; self._nextLine = None self._nextBit = '' self._explodeLines = False self.hasMoreLines() search = re.search(r'^P1$', self._nextLine) if not search: sys.exit('Failed to identify P1 type pbm') self.hasMoreLines() search = re.search(r'^(\d+)\s+(\d+)$', self._nextLine) if not search: sys.exit('Failed to identify resolution') self.width = int(search.group(1)) self.height = int(search.group(2)) if self.width%16 != 0: sys.exit('Width is not a multiple of 16') self.width = self.width // 16 self._explodeLines = True self.hasMoreLines() def close(self): """Close input file""" self._fh.close() def hasMoreLines (self) : # Find the next non-empty line if one exists. line = None for line in self._fh: line = re.sub(r
'#.*$', '', line) # Remove comments line = re.sub(r'^\s+', '', line) # Remove leading whitespace line = re.sub(r'\s+$', '', line) # Remove trailing whitespace if len(line): break # Break if line not empty if line: if self._explodeLines: self._nextLine = list(lin
e) else: self._nextLine = line return True return False def hasMoreBits (self) : # Find the next bit. if not len(self._nextLine): if not self.hasMoreLines(): return False # Shift first bit from string. self._currentBit = self._nextLine.pop(0) return True def buildWord (self) : """buildWord - build a 16bit word MSB to LSB, left to right. Returns a string of 16 bits. """ word = '' word = self._currentBit for i in range(1,16): if self.hasMoreBits(): word = self._currentBit + word else: word = '0' + word return word def formatWord (bits) : """Format input bit string as a 16bit 2s complement Jack word. Returns formatted string suitable for use as a literal value in Jack. """ val = int(bits, base=2) if val > (2**15)-1 : # Ensure 2s complement 16bit int. val = -(2**15 - (val - 2**15)) # e.g. 65528: 1111111111111000 # 32768-(65528-32768) == 8 # which we negate: -8 if val == -32768: # A limitation in the Jack compiler prevents the maximum negative number # as a literal, so we have to add an operation to get the value. return "-32767-1" else: return "{0}".format(val) ### Main ### if len(sys.argv) < 3: sys.exit('Usage: %s <class name> <file.pbm>' % sys.argv[0]) className = sys.argv[1] infile = sys.argv[2] filename, ext = os.path.splitext(infile) if ext != '.pbm': sys.exit("Expected file extension .pbm") outfile = className + '.jack' outfh = open(outfile, 'w') parser = Parser(infile) # Initialise data array rounded up to nearest multiple of 16, since we push # 16 words at a time into bitmap. data = [0] * int(math.ceil(parser.width * parser.height / 16) * 16); ### Class declaration classHeader="""/** * Static Image factory class. * * Requires Image.jack * * Generated from "{filename}" by pbm2jack.py * * Copyright 2013 Gavin Stewart. */ class {cn} {{ /** * newImage - returns an Image object containing bitmap data. */ function Image newImage () {{ var Image i; var int width, height; let width = {width}; let height = {height}; let i = Image.newBitmap(width, height); """ classFooter=""" return i; }} }} """ outfh.write(classHeader.format(filename = os.path.basename(infile), cn = className, width = parser.width, height = parser.height)) index = 0 while parser.hasMoreBits(): bitWord = parser.buildWord() data[index] = formatWord(bitWord); index += 1 index = 0 while index < len(data): outfh.write(" do i.push({0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15});\n". format( data[index], data[index + 1], data[index + 2], data[index + 3], data[index + 4], data[index + 5], data[index + 6], data[index + 7], data[index + 8], data[index + 9], data[index + 10], data[index + 11], data[index + 12], data[index + 13], data[index + 14], data[index + 15], ) ) index += 16 outfh.write(classFooter.format()) parser.close() outfh.close()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/li
censes/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License
for the specific language governing permissions and # limitations under the License. import os from utils import paddle_includes, extra_compile_args, IS_MAC from paddle.utils.cpp_extension import CUDAExtension, setup, CppExtension # Mac-CI don't support GPU Extension = CppExtension if IS_MAC else CUDAExtension sources = ['custom_relu_op.cc', 'custom_relu_op_dup.cc'] if not IS_MAC: sources.append('custom_relu_op.cu') # custom_relu_op_dup.cc is only used for multi ops test, # not a new op, if you want to test only one op, remove this # source file setup( name='custom_relu_module_setup', ext_modules=Extension( # test for not specific name here. sources=sources, # test for multi ops include_dirs=paddle_includes, extra_compile_args=extra_compile_args))
""" saltrepo ~~~~~~~~ SaltStack Repository Sphinx directives """ def source_read_handler(app, docname, source): if "|repo_primary_branch|" in source[0]: source[0] = source[0].replace( "|repo_primary_branch|", app.config.h
tml_context["repo_primary_branch"] ) def setup(app): app.connect("source-read", source_read_handler) return { "version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True,
}
__all__ = ['pwnedapi', 'utils'] from .pwnedapi import H
aveIBeenPwnedApi
ANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General # Public License along with this program. If not, see # <http://www.gnu.org/licenses/>. # An outer layer to the pipeline scripts. Depending upon the flags specified # in the command line input, this script will run either the complete / a # subsection of the pipeline. # python -m unittest tests/test_bnet_calculation.py import os import unittest from rabdam.Subroutines.CalculateBDamage import rabdam class TestClass(unittest.TestCase): def test_bnet_values(self): """ Checks that RABDAM calculates expected Bnet values for a selection of PDB entries """ import os import requests import shutil import pandas as pd exp_bnet_dict = {'2O2X': 3.300580966, '4EZF': 3.193514624, '4MWU': 3.185476349, '4MOV': 3.144130191, '3NBM': 3.141821366, '1GW1': 3.105626889, '4EWE': 3.08241654, '3F1P': 3.060628186, '3IV0': 3.054440912, '4ZWV': 3.017330004, '1T2I': 3.004830448, '3LX3': 2.962424378, '5P4N': 2.916582486, '5MAA': 2.91219352, '1E73': 2.850203561, '1YKI': 2.797739814, '4WA4': 2.720540993, '3V2J': 2.669599635, '3CUI': 2.666605946, '4XLA': 2.624366813, '4DUK': 2.854175949, '3V38': 2.500984382, '1VJF': 2.496374854, '5IO2': 2.467587911, '5CM7': 2.44869046, '2EHU': 2.448290431, '5JOW': 2.439619791, '2C54': 2.379224017, '4GZK': 2.349526276, '2NUM': 2.326904729, '5FYO': 2.319618192, '4ODK': 2.304354685, '6EV4': 2.302433369, '5P5U': 2.288966997, '3VHV': 2.285877338, '4JCK': 2.27150332, '5EKM': 2.258574341, '3H4O': 2.231817033, '5JIG': 2.247664542, '2H5S': 2.206850226, '4M5I': 2.169405117, '1Y59': 2.138787261, '4C45': 2.131256276, '5F90': 2.11287042, '4NI3': 2.088735516, '4Z6N': 2.083743584, '5M2G': 2.06566475, '5ER6': 2.05707889, '4R0X': 2.006996308, '5LLG': 1.981501196, '1FCX': 1.976990791, '5M90': 1.96542442, '3NJK': 1.955577757, '5CWG': 1.949818624, '2P7O': 1.921138477, '5SZC': 1.962633169, '2I0K': 1.901555841, '4RDK': 1.886900766, '5MA0': 1.877853781, '4C1E': 1.877575448, '5EJ3': 1.875439995, '2WUG': 1.87334953, '4MPY': 1.842338963, '4OTZ': 1.835716553, '4IOO': 1.828349113, '4Z6O': 1.800528596, '4ZOT': 1.799163077, '5PHB': 1.783879628, '3UJC': 1.747894856, '4FR8': 1.738876799, '5PH8': 1.736825591, '5UPM': 1.736663507, '3MWX': 1.733132746, '4KDX': 1.729650659, '3WH5': 1.717975404, '4P04': 1.714107945, '5Y90': 1.695283923, '4H31': 1.674014779, '5HJE': 1.662869176, '4YKK': 1.653894709, '1Q0F': 1.646880018, '5JP6': 1.629246723, '1X7Y': 1.618817315, '4ZC8': 1.60606196, '5EPE': 1.604407869, '4ZS9': 1.582398487, '5VNX': 1.543824945, '5IHV': 1.542271159, '5J90': 1.526469901, '4K6W': 1.520316883, '3PBC': 1.512738972, '5CMB': 1.504620762, '4PSC': 1.491796934, '5UPN': 1.477252783, '4XLZ': 1.473298738, '4XGY': 1.465885549, '5M4G': 1.400219288, '3A54': 1.319587779} if not os.path.isdir('tests/temp_files/'): os.mkdir('tests/temp_files/') for code, exp_bnet in exp_bnet_dict.items(): # Checks cif file cif_text = requests.get('https://files.rcsb.org/view/%s.cif' % code) with open('tests/temp_files/%s.cif' % code, 'w') as f: f.write(cif_text.text) rabdam_run = rabdam( pathToInput='%s/tests/temp_files/%s.cif' % (os.getcwd(), code), outputDir='%s/tests/temp_files/' % os.getcwd(), batchRun=True, overwrite=True, PDT=7, windowSize=0.02, protOrNA='protein', HETATM=False, removeAtoms=[], addAtoms=[], highlightAtoms=[], createOrigpdb=False, createAUpdb=False, createUCpdb=False, createAUCpdb=False, createTApdb=False ) rabdam_run.rabdam_dataframe(test=True) rabdam_run.rabdam_analysis( output_options=['csv', 'pdb', 'cif', 'kde', 'bnet', 'summary'] ) bnet_df = pd.read_pickle('tests/temp_files/Logfiles/Bnet_protein.pkl') act_bnet_cif = bnet_df['Bnet'].tolist()[-1] self.assertEqual(round(exp_bnet, 7), round(act_bnet_cif, 7)) os.remove('tests/temp_files/%s.cif' % code) os.remove('tests/temp_files/Logfiles/Bnet_protein.pkl') # Checks PDB file pdb_text = requests.get('https://files.rcsb.org/view/%s.pdb' % code) with open('tests/temp_files/%s.pdb' % code, 'w') as f: f.write(pdb_text.text) rabdam_run = rabdam( pathToInput='%s/tests/temp_files/%s.pdb' % (os.getcwd(), code), outputDir='%s/tests/te
mp_files/' % os.getcwd(), batchRun=True, overwrite=True, PDT=7, windowSize=0.02, protOrNA='protein', HETATM=False, removeAtoms=[], addAtoms=[], highlightAtoms=[], createOrigpdb=False, createAUpdb=False, createUCpdb=False,
createAUCpdb=False, createTApdb=False ) rabdam_run.rabdam_dataframe(test=True) rabdam_run.rabdam_analysis( output_options=['csv', 'pdb', 'cif', 'kde', 'bnet', 'summary'] ) bnet_df = pd.read_pickle( '%s/tests/temp_files/Logfiles/Bnet_protein.pkl' % os.getcwd() ) act_bnet_pdb = bnet_df['Bnet'].tolist()[-1] self.assertEqual(round(exp_bnet, 7
04a])) x_s05 = np.ma.masked_where(z_s05 > threshold, np.array(gz['z_best'][smooth05a])) y_s05 = np.ma.masked_where(z_s05 > threshold, np.array(gz['V_rest'][smooth05a])) x_s06 = np.ma.masked_where(z_s06 > threshold, np.array(gz['z_best'][smooth06a])) y_s06 = np.ma.masked_where(z_s06 > threshold, np.array(gz['V_rest'][smooth06a])) x_s07 = np.ma.masked_where(z_s07 > threshold, np.array(gz['z_best'][smooth07a])) y_s07 = np.ma.masked_where(z_s07 > threshold, np.array(gz['V_rest'][smooth07a])) x_s08 = np.ma.masked_where(z_s08 > threshold, np.array(gz['z_best'][smooth08a])) y_s08 = np.ma.masked_where(z_s08 > threshold, np.array(gz['V_rest'][smooth08a])) # featured - Vz featured_ptf04 = np.vstack([np.array(gz['z_best'][featured04a]), np.array(gz['V_rest'][featured04a])]) featured_ptf05 = np.vstack([np.array(gz['z_best'][featured05a]), np.array(gz['V_rest'][featured05a])]) featured_ptf06 = np.vstack([np.array(gz['z_best'][featured06a]), np.array(gz['V_rest'][featured06a])]) featured_ptf07 = np.vstack([np.array(gz['z_best'][featured07a]), np.array(gz['V_rest'][featured07a])]) kde_f04 = gaussian_kde(featured_ptf04, bw_method=kde_f04.scotts_factor()*.8) kde_f05 = gaussian_kde(featured_ptf05, bw_method=kde_f05.scotts_factor()*.8) kde_f06 = gaussian_kde(featured_ptf06, bw_method=kde_f06.scotts_factor()*.8) kde_f07 = gaussian_kde(featured_ptf07, bw_method=kde_f07.scotts_factor()*.8) z_f04 = kde_f04(featured_ptf04) z_f05 = kde_f05(featured_ptf05) z_f06 = kde_f06(featured_ptf06) z_f07 = kde_f07(featured_ptf07) # mask points above density threshold x_f04 = np.ma.masked_where(z_f04 > threshold, np.array(gz['z_best'][featured04a])) y_f04 = np.ma.masked_where(z_f04 > threshold, np.array(gz['V_rest'][featured04a])) x_f05 = np.ma.masked_where(z_f05 > threshold, np.array(gz['z_best'][featured05a])) y_f05 = np.ma.masked_where(z_f05 > threshold, np.array(gz['V_rest'][featured05a])) x_f06 = np.ma.masked_where(z_f06 > threshold, np.array(gz['z_best'][featured06a])) y_f06 = np.ma.masked_where(z_f06 > threshold, np.array(gz['V_rest'][featured06a])) x_f07 = np.ma.masked_where(z_f07 > threshold, np.array(gz['z_best'][featured07a])) y_f07 = np.ma.masked_where(z_f07 > threshold, np.array(gz['V_rest'][featured07a])) colall = '#AAAAAA' col04 = '#006e35' col05 = '#4455CC' col06 = '#30a0ca' #col07 = '#00ccaE' col07 = "#ac0e30" colclean = "#ac0e30" sty04 = 'dashed' sty05 = 'dotted' sty06 = 'dashdot' sty07 = 'solid' styclean = 'solid' fig = plt.figure(figsize=(10, 4)) gs = gridspec.GridSpec(1,2) #gs.update(hspace=0.25, wspace=0.001) zaxis = (0.0, 4.) Vaxis = (-24.2, -10.) ax1 = fig.add_subplot(gs[0,0]) ax1.set_xlim(zaxis) ax1.set_ylim(Vaxis) ax1.invert_yaxis() # it should be ax1 not plt below but if I do that I can't get the colorbar to work #plt.hexbin(gz['z_best'][Vzlims], gz['V_rest'][Vzlims], gridsize=25, bins='log', cmap='Greys', label='_nolegend_') # plot unmasked points ax1.scatter(x, y, c=colall, marker='.', edgecolor='None') # get bounds from axes # this is a bit silly as we've already defined them above, but just in case # you need this for some other purpose later you'll maybe find this in a search xmin, xmax = ax1.get_xlim() ymin, ymax = ax1.get_ylim() #xmin = -.2 # prepare grid for density map xedges = np.linspace(xmin, xmax, bins) yedges = np.linspace(ymin, ymax, bins) xx, yy = np.meshgrid(xedges, yedges) gridpoints = np.array([xx.ravel(), yy.ravel()]) # compute density maps zz = np.reshape(kde_all(gridpoints), xx.shape) zz_s04 = np.reshape(kde_s04(gridpoints), xx.shape) zz_s05 = np.reshape(kde_s05(gridpoints), xx.shape) zz_s06 = np.reshape(kde_s06(gridpoints), xx.shape) zz_s07 = np.reshape(kde_s07(gridpoints), xx.shape) zz_s08 = np.reshape(kde_s08(gridpoints), xx.shape) # plot density map im1 = ax1.imshow(zz, cmap='Greys', interpolation='nearest', origin='lower', extent=[xmin, xmax, ymin, ymax], aspect='auto') ax1.contour(xx, yy, zz, levels=[threshold_all], colors=colall, linestyles='solid', label = '_nolegend_') # plot threshold contour #ax1.contour(xx, yy, zz_s04, levels=[threshold], colors=col04, linestyles=sty04, label = '$f_{\\rm smooth} \\geq 0.4$', lineweights=2) cs04 = ax1.contour(xx, yy, zz_s04, levels=[threshold], colors=col04, linestyles=sty04, label = '$f_{\\rm smooth} \\geq 0.4$', lineweights=2) cs05 = ax1.contour(xx, yy, zz_s05, levels=[threshold], colors=col05, linestyles=sty05, label = '$f_{\\rm smooth} \\geq 0.5$', lineweights=2) cs06 = ax1.contour(xx, yy, zz_s06, levels=[threshold], colors=col06, linestyles=sty06, label = '$f_{\\rm smooth} \\geq 0.6$', lineweights=2) cs07 = ax1.contour(xx, yy, zz_s07, levels=[threshold], colors=col07, linestyles=sty07, label = '$f_{\\rm smooth} \\geq 0.7$', lineweights=2) #cs08 = ax1.contour(xx, yy, zz_s08, levels=[threshold], colors=colclean, linestyles=styclean, label = '$f_{\\rm smooth} \\geq 0.8$', lineweights=3) cslabels = ['$f_{\\rm smooth} \\geq 0.4$', '$f_{\\rm smooth} \\geq 0.5$', '$f_{\\rm smooth} \\geq 0.6$', '$f_{\\rm smooth} \\geq 0.7$'] cs04.collections[0].set_label(cslabels[0]) cs05.
collections[0].set_label(cslabels[1]) cs06.collections[0].set_label(cslabels[2]) cs07.collections[0].set_label(cslabels[3]) ax1.set_xlabel('Redshift $z$') ax1.set_ylabel('Rest-frame $V$ absolute magnitude') ax1.legend(loc='lower right', frameon=True) cb1 = plt.colorbar(im1) cb1.set_label("log(N)") ax2 = fig.add_subplot(gs[0,1]) plt.xlim(zaxis) plt.ylim(Vaxis) ax2.invert_yaxis() # it should be ax1 not plt below but if I do that I can't get
the colorbar to work #plt.hexbin(gz['z_best'][Vzlims], gz['V_rest'][Vzlims], gridsize=25, bins='log', cmap='Greys', label='_nolegend_') # plot density map im2 = ax2.imshow(zz, cmap='Greys', interpolation='nearest', origin='lower', extent=[xmin, xmax, ymin, ymax], aspect='auto') ax2.contour(xx, yy, zz, levels=[threshold_all], colors=colall, linestyles='solid', label = '_nolegend_') # plot unmasked points ax2.scatter(x, y, c='#AAAAAA', marker='.', edgecolor='None') xmin, xmax = ax2.get_xlim() ymin, ymax = ax2.get_ylim() #xmin = -.2 # prepare grid for density map xedges = np.linspace(xmin, xmax, bins) yedges = np.linspace(ymin, ymax, bins) xx, yy = np.meshgrid(xedges, yedges) gridpoints = np.array([xx.ravel(), yy.ravel()]) # compute density maps zz_f04 = np.reshape(kde_f04(gridpoints), xx.shape) zz_f05 = np.reshape(kde_f05(gridpoints), xx.shape) zz_f06 = np.reshape(kde_f06(gridpoints), xx.shape) zz_f07 = np.reshape(kde_f07(gridpoints), xx.shape) # plot density map #im = ax1.imshow(zz, cmap='CMRmap_r', interpolation='nearest', origin='lower', extent=[xmin, xmax, ymin, ymax]) # plot threshold contour cf04 = ax2.contour(xx, yy, zz_f04, levels=[threshold], colors=col04, linestyles=sty04, label = '$f_{\\rm featured} \\geq 0.4$', lineweights=2) cf05 = ax2.contour(xx, yy, zz_f05, levels=[threshold], colors=col05, linestyles=sty05, label = '$f_{\\rm featured} \\geq 0.5$', lineweights=2) cf06 = ax2.contour(xx, yy, zz_f06, levels=[threshold], colors=col06, linestyles=sty06, label = '$f_{\\rm featured} \\geq 0.6$', lineweights=2) cf07 = ax2.contour(xx, yy, zz_f07, levels=[threshold], colors=col07, linestyles=sty07, label = '$f_{\\rm featured} \\geq 0.7$', lineweights=3) cflabels = ['$f_{\\rm featured} \\geq 0.4$', '$f_{\\rm featured} \\geq 0.5$', '$f_{\\rm featured} \\geq 0.6$', '$f_{\\rm featured} \\geq 0.7$'] cf04.collections[0].set_label(cflabels[0]) cf05.collections[0].set_label(cflabels[1]) cf06.collections[0].set_label(cflabels[2]) cf07.collections[0].set_label(cflabels[3]) ax2.set_xlabel('Redshift $z$') #ax2.ylabel('Rest-frame $V$ absolute magnitude') ax2.legend(loc='lower right', frameon=True) cb2 = plt.colorbar(im2) cb2.set_label("log(N)") plt.tight_layout() fout = 'V_z_thresholds_smooth_featured' plt.savefig('%s.png' % fout, facecolor='None', edgecolor='None') plt.savefig('%s.eps' % fout, facecolor='None', edgecolor='None') plt.close() plt.cla() plt.clf() ##################################################### ##################################################### ##################################################### ##################################################### ##################################################### #bo
from streamline import RouteBase class MyRoute(RouteBase): path = '/' def
get(self): self.response.headers['foo'] = 'bar' return 'Hello world!' class MyOtherRoute(RouteBase):
path = '/other' def post(self): return 'Posted' def delete(self): return 'Deleted' def patch(self): return 'Patched' def main(): MyRoute.route() MyOtherRoute.route() MyRoute.bottle.run() if __name__ == '__main__': main()
import json import os from errata_tool import ErrataConnector, Erratum from errata_tool.products import ProductList import requests import pytest TESTS_DIR = os.path.dirname(os.path.abspath(__file__)) FIXTURES_DIR = os.path.join(TESTS_DIR, 'fixtures') class MockResponse(object): status_code = 200 encoding = 'utf-8' headers = {'content-type': 'application/json; charset=utf-8'} def raise_for_status(self): pass @property def _fixture(self): """ Return path to our static fixture file. """ return self.url.replace('https://errata.devel.redhat.com/', os.path.join(FIXTURES_DIR, 'errata.devel.redhat.com/')) def json(self): try: with open(s
elf._fixture) as fp: return json.load(fp) except IOError: print('Try ./new-fixture.sh %s' % self.url) raise @property def text(self): """ Return contents of our static fixture file. """ try: with open(s
elf._fixture) as fp: return fp.read() except IOError: print('Try ./new-fixture.sh %s' % self.url) raise class RequestRecorder(object): """ Record args to requests.get() or requests.post() """ def __call__(self, url, **kwargs): """ mocking requests.get() or requests.post() """ self.response = MockResponse() self.response.url = url self.kwargs = kwargs return self.response @pytest.fixture def mock_get(): return RequestRecorder() @pytest.fixture def mock_post(): return RequestRecorder() @pytest.fixture def mock_put(): return RequestRecorder() @pytest.fixture def advisory(monkeypatch, mock_get): monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return Erratum(errata_id=26175) @pytest.fixture def rhsa(monkeypatch, mock_get): """ Like the advisory() fixture above, but an RHSA. """ monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return Erratum(errata_id=25856) @pytest.fixture def productlist(monkeypatch, mock_get): monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return ProductList()
# -*- encoding: utf-8 -*- ''' Given an array of integers, every element appears three times except for one. Find that single one. Note: Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory? ''' class Solution(object): def singleNumber(self, nums): """ :ty
pe nums: List[int] :rtype: int ""
" count = {} for num in nums: if num not in count: count[num] = 0 count[num] += 1 for key in count: if count[key] != 3: return key
'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)), 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)), 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-java
script+lasso', 'text/javascript+lasso')), 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',
)), 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), 'Modula2Lexer': ('pygments.lexers.modula2
import subprocess import os import errno def download_file(url, local_fname=None, force_write=False): # requests is not default installed import requests if local_fname is None: local_fname = url.split('/')[-1] if not force_write and os.path.exists(local_fname): return local_fname dir_name = os.path.dirname(local_fname) if dir_name != "": if not os.path.exists(dir_name): try: # try to create the directory if it doesn't exists os.makedirs(dir_name) except OSError as exc: if exc.errno != errno.EEXIST: raise r = requests.get(url, stream=True) assert r.status_code == 200, "failed to open %s" % url with open(local_fnam
e, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_fname def get_gpus(): """ return a list of GPUs """ try: re = subprocess.check_output(["nvidia-smi", "-L"]
, universal_newlines=True) except OSError: return [] return range(len([i for i in re.split('\n') if 'GPU' in i]))
#!/usr/bin/env python #coding=utf-8 from toughradius.radiusd.plugins import error_auth from toughradius.radiusd import utils def process(req=None,resp=None,user=None,radiusd=None,**kwargs): store = radiusd.store if store.is_white_roster(req.get_mac_addr())
: return resp if not user: return error_auth(resp,'user %s not exists'%req.get_user_name()) if store.get_param("radiusd_bypass") == '1': if not req.is_valid_pwd(utils.decrypt(user['password'])): return error_auth(resp, 'user password no
t match') if user['status'] == 4: resp['Framed-Pool'] = store.get_param("expire_addrpool") return resp if user['status'] in (0,2,3): return error_auth(resp,'user status not ok') return resp
from gooey.gui.lang.i18n import _ from gooey.gui.processor import ProcessController from gooey.gui.util.wx_util import transactUI from gooey.gui.components import modals from gooey.gui import seeder class GooeyApplication(wx.Frame): """ Main window for Gooey. """ def __init__(self, buildSpec, *args, **kwargs): super(GooeyApplication, self).__init__(None, *args, **kwargs) self._state = {} self.buildSpec = buildSpec self.header = FrameHeader(self, buildSpec) self.configs = self.buildConfigPanels(self) self.navbar = self.buildNavigation() self.footer = Footer(self, buildSpec) self.console = Console(self, buildSpec) self.layoutComponent() self.clientRunner = ProcessController( self.buildSpec.get('progress_regex'), self.buildSpec.get('progress_expr'), self.buildSpec.get('encoding') ) pub.subscribe(events.WINDOW_START, self.onStart) pub.subscribe(events.WINDOW_RESTART, self.onStart) pub.subscribe(events.WINDOW_STOP, self.onStopExecution) pub.subscribe(events.WINDOW_CLOSE, self.onClose) pub.subscribe(events.WINDOW_CANCEL, self.onCancel) pub.subscribe(events.WINDOW_EDIT, self.onEdit) pub.subscribe(events.CONSOLE_UPDATE, self.console.logOutput) pub.subscribe(events.EXECUTION_COMPLETE, self.onComplete) pub.subscribe(events.PROGRESS_UPDATE, self.footer.updateProgressBar) # Top level wx close event self.Bind(wx.EVT_CLOSE, self.onClose) if self.buildSpec['poll_external_updates']: self.fetchExternalUpdates() if self.buildSpec.get('auto_start', False): self.onStart() def onStart(self, *args, **kwarg): """ Verify user input and kick off the client's program if valid """ with transactUI(self): config = self.navbar.getActiveConfig() config.resetErrors() if config.isValid(): self.clientRunner.run(self.buildCliString()) self.showConsole() else: config.displayErrors() self.Layout() def onEdit(self): """Return the user to the settings screen for further editing""" with transactUI(self): if self.buildSpec['poll_external_updates']: self.fetchExternalUpdates() self.showSettings() def buildCliString(self): """ Collect all of the required information from the config screen and build a CLI string which can be used to invoke the client program """ config = self.navbar.getActiveConfig() group = self.buildSpec['widgets'][self.navbar.getSelectedGroup()] positional = config.getPositionalArgs() optional = config.getOptionalArgs() print(cli.buildCliString( self.buildSpec['target'], group['command'], positional, optional )) return cli.buildCliString( self.buildSpec['target'], group['command'], positional, optional ) def onComplete(self, *args, **kwargs): """ Display the appropriate screen based on the success/fail of the host program """ with transactUI(self): if self.clientRunner.was_success(): if self.buildSpec.get('return_to_config', False): self.showSettings() else: self.showSuccess() if self.buildSpec.get('show_success_modal', True): wx.CallAfter(modals.showSuccess) else: if self.clientRunner.wasForcefullyStopped: self.showForceStopped() else: self.showError() wx.CallAfter(modals.showFailure) def onStopExecution(self): """Displays a scary message and then force-quits the executing client code if the user accepts""" if self.buildSpec['show_stop_warning'] and modals.confirmForceStop(): self.clientRunner.stop() def fetchExternalUpdates(self): """ !Experimental! Calls out to the client code requesting seed values to use in the UI !Experimental! """ seeds = seeder.fetchDynamicProperties( self.buildSpec['target'], self.buildSpec['encoding'] ) for config in self.configs: config.seedUI(seeds) def onCancel(self): """Close the program after confirming""" if modals.confirmExit(): self.onClose() def onClose(self, *args, **kwargs): """Cleanup the top level WxFrame and shutdown the process""" self.Destroy() sys.exit() def layoutComponent(self): sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(self.header, 0, wx.EXPAND) sizer.Add(wx_util.horizontal_rule(self), 0, wx.EXPAND) sizer.Add(self.navbar, 1, wx.EXPAND) sizer.Add(self.console, 1, wx.EXPAND) sizer.Add(wx_util.horizontal_rule(self), 0, wx.EXPAND) sizer.Add(self.footer, 0, wx.EXPAND) self.SetMinSize((400, 300)) self.SetSize(self.buildSpec['default_size']) self.SetSizer(sizer) self.console.Hide() self.Layout() self.SetIcon(wx.Icon(self.buildSpec['images']['programIcon'], wx.BIT
MAP_TYPE_ICO)) def buildNavigation(self): """ Chooses the appropriate layout navigation component based on user prefs """ if self.buildSpec['navigation'] == constants.TABBED: navigation = Tabbar(self, self.buildSpec, self.configs) else: navigation = Sidebar(self, self.buildSpec, self.configs) if self.buildSpec['navigation'] == constants.HIDDE
N: navigation.Hide() return navigation def buildConfigPanels(self, parent): page_class = TabbedConfigPage if self.buildSpec['tabbed_groups'] else ConfigPage return [page_class(parent, widgets) for widgets in self.buildSpec['widgets'].values()] def showSettings(self): self.navbar.Show(True) self.console.Show(False) self.header.setImage('settings_img') self.header.setTitle(_("settings_title")) self.header.setSubtitle(self.buildSpec['program_description']) self.footer.showButtons('cancel_button', 'start_button') self.footer.progress_bar.Show(False) def showConsole(self): self.navbar.Show(False) self.console.Show(True) self.header.setImage('running_img') self.header.setTitle(_("running_title")) self.header.setSubtitle(_('running_msg')) self.footer.showButtons('stop_button') self.footer.progress_bar.Show(True) if not self.buildSpec['progress_regex']: self.footer.progress_bar.Pulse() def showComplete(self): self.navbar.Show(False) self.console.Show(True) self.footer.showButtons('edit_button', 'restart_button', 'close_button') self.footer.progress_bar.Show(False) def showSuccess(self): self.showComplete() self.header.setImage('check_mark') self.header.setTitle(_('finished_title')) self.header.setSubtitle(_('finished_msg')) self.Layout() def showError(self): self.showComplete() self.header.setImage('error_symbol') self.header.setTitle(_('finished_title')) self.header.setSubtitle(_('finished_error')) def showForceStopped(self): self.showComplete() if self.buildSpec.get('force_stop_is_error', True): self.showError() else: self.showSuccess() self.header.setSubtitle
def getLocations(listrep, checkFunc): t = list
rep locs = [] for i in range(len(t)): for j in range(len(t[0])): locs.append((i,j)) places = filter(lambda (i,j): checkFunc(lis
trep[i][j]), locs) return places
# coding: utf-8 from .base import Base from .config_auth import ConfigAuth from .config import Config from .user import User from .resource import Resource from .common import VisibilityFlags from .meta import ItemMeta from .meta import Pa
geMeta f
rom .story import Story from .module_config import ModuleConfig
# Outspline - A highly modular and extensible outliner. # Copyright (C) 2011-2014 Dario Giovannetti <dev@dariogiovannetti.net> # # This file is part of Outspline.
# # Outspline is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Outspline is distributed in the hope that it
will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Outspline. If not, see <http://www.gnu.org/licenses/>. authors = ("Dario Giovannetti <dev@dariogiovannetti.net>", ) version = "1.3" description = "Adds the backend for managing alarm events." website = "https://kynikos.github.io/outspline/" affects_database = True provides_tables = ("AlarmsProperties", "Alarms", "CopyAlarms", "AlarmsOffLog") dependencies = (("core", 4), ("extensions.organism", 2), ("extensions.organism_timer", 1)) optional_dependencies = (("extensions.copypaste", 2), ) database_dependency_group_1 = (("core", 4), ("extensions.organism", 2), ("extensions.organism_timer", 1), ("extensions.organism_alarms", 1))
# -*- coding: utf-8 -*- ############################################################################## # # Copyright 2012 Camptocamp SA # Copyright 2012 Endian Solutions BV # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more detai
ls. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## f
rom openerp.osv.orm import Model from openerp.osv import fields import decimal_precision as dp class product_product(Model): _inherit = 'product.product' def _cost_price(self, cr, uid, ids, field_name, arg, context=None): if context is None: context = {} product_uom = context.get('product_uom') bom_properties = context.get('properties') res = self._compute_purchase_price(cr, uid, ids, product_uom, bom_properties, context=context) for self_obj in self.browse(cr, uid, ids, context=context): res[self_obj.id] = res[self_obj.id] + self_obj.fixed_cost_price return res _columns = { 'fixed_cost_price': fields.float( 'Fixed Cost Price', digits_compute = dp.get_precision('Sale Price')), 'cost_price': fields.function(_cost_price, string='Cost Price (incl. BoM)', digits_compute=dp.get_precision('Sale Price'), help="The cost price is the standard price or, if the product has a BoM, " "the sum of all standard prices of its components. It also takes care of the " "BoM costing like cost per cylce.") } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# CC150 8.1 # Desig
n the data structure for a generic deck of ca
rds. class suit(): def __init__(self, v): self.value = v
from office365.runtime.queries.service_operation_query import ServiceOperationQuery from office365.sharepoint.base_entity_collection
import BaseEntityCollection from office365.sharepoint.webs.web import Web class WebCollection(BaseEntityCollection): """Web collection""" def __init__(self, context, resource_path=None, parent_web=None): """ :type parent_web: Web """ super(WebCollection, self).__init__(context, Web, resource_path, parent_web)
def add(self, web_creation_information): """ Create WebSite :type web_creation_information: office365.sharepoint.webs.web_creation_information.WebCreationInformation """ target_web = Web(self.context) self.add_child(target_web) qry = ServiceOperationQuery(self, "add", None, web_creation_information, "parameters", target_web) self.context.add_query(qry) return target_web @property def resource_url(self): val = super(WebCollection, self).resource_url parent_web_url = self._parent.get_property("Url") if parent_web_url is not None: val = val.replace(self.context.service_root_url(), parent_web_url + '/_api') return val
#!/usr/bin/env python # -*- coding: utf-8 from __future__ import print_function import argparse import codecs import cPickle as pickle import numpy as np import os from load_tweets import load_tweets NAME='ef_list_punctuation' prefix='list_punctuation' if __name__ == "__main__": # Las opciones de línea de comando p = argparse.ArgumentParser(NAME) p.add_argument("DIR",default=None, action="store", help="Directory with corpus") p.add_argument("LIST1",default=None, action="store", help="File with list of emoticons") p.add_argument("-d", "--dir", action="store", dest="dir",default="feats", help="Default directory for features [feats]") p.add_argument("-p", "--pref", action="store", dest="pref",default=prefix, help="Prefix to save the file of features %s"%prefix) p.add_argument("--mix", action="store_true", dest="mix",default=True, help="Mix tweets into pefiles") p.add_argument("--format", action="store_true", dest="format",default="pan15", help="Change to pan14 to use format from 2015 [feats]") p.add_argument("-v", "--verbose", action="store_true", dest="verbose", help="Verbose mode [Off]") p.add_argument("--stopwords", default=None, action="store", dest="stopwords", help="List of stop words [data/stopwords.txt]") opts = p.parse_args() if opts.verbose: def verbose(*args): print(*args) else: verbose = lambda *a: None # Colecta los tweets y sus identificadores (idtweet y idusuario) tweets,ids=load_tweets(opts.DIR,opts.format,mix=opts.mix) # Imprime alguna información sobre los tweets if opts.verbose: for i,tweet in enumerate(tweets[:10]): verbose('Tweet example',i+1,tweet[:100]) verbose("Total tweets : ",len(tweets)) try: verbose("Total usuarios : ",len(set([id for x,id in ids]))) except ValueError: verbose("Total usuarios : ",len(ids)) # Calculamos los features # - Cargar lista de palabras uno list_of_words1 = [line.strip() for line in codecs.open(opts.LIST1,encoding='utf-8') if len(line.strip())>0] counts=[] for usuario in tweets: usuario=usuario vec1=[usuario.count(item) for item in list_of_words1] vec=vec1 counts.append(vec) # - Contamos las palabras en los tweets feats = np.asarray(counts) # Guarda la matrix de features with open(os.path.join(opts.dir,opts.pref+'.dat'),
'wb') as idxf: pickle.dump(feats, idxf, pickle.HIGHEST_PROTOCOL) # Imprimimos información de la matrix verbose("Total de features :",feats.shape[1]) verbose("Total de renglones:",feats.shape[0]) # Guarda los indices por renglones de
la matrix (usuario o tweet, usuario) with open(os.path.join(opts.dir,opts.pref+'.idx'),'wb') as idxf: pickle.dump(ids, idxf, pickle.HIGHEST_PROTOCOL)
"""Test package""" import os.path import sys def setup_package(): # i
nsert top-level dir to python path # so we could easily import stuff from # tests sys.path.insert(0,
os.path.abspath("../"))
from game.models import User # 存储在内存中的所有房间 rooms_list = [] class Room(object): id = int # 房间id name = str # 房间名 owner = int # 房主
id users = [] # 成员id users_status = {} # 成员准备状态 status = False # 状态 def __init__(self, id, name): self.id = id self.name = name # 获取全部房间 def get_all_rooms(): return rooms_list # 获取某个房间 id def get_room_by_id(id): for room in rooms_list: if room.id == id: return room return None # 获取某个房间name def get_room_by_name(name): for room in r
ooms_list: if room.name == name: return room return None # 添加 def add_room(new_room): rooms_list.append(new_room) return 1 # 删除 def del_room(id): for room in rooms_list: if room.id == id: rooms_list.remove(room) return 1 return -1