repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
csmengwan/autorest
AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/BodyComplex/autorestcomplextestservice/operations/polymorphism.py
5
7554
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.pipeline import ClientRawResponse from .. import models class Polymorphism(object): """Polymorphism operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. """ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.config = config def get_valid( self, custom_headers={}, raw=False, **operation_config): """ Get complex types that are polymorphic :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`Fish <fixtures.acceptancetestsbodycomplex.models.Fish>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/complex/polymorphism/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Fish', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def put_valid( self, complex_body, custom_headers={}, raw=False, **operation_config): """ Put complex types that are polymorphic :param complex_body: Please put a salmon that looks like this: { 'fishtype':'Salmon', 'location':'alaska', 'iswild':true, 'species':'king', 'length':1.0, 'siblings':[ { 'fishtype':'Shark', 'age':6, 'birthday': '2012-01-05T01:00:00Z', 'length':20.0, 'species':'predator', }, { 'fishtype':'Sawshark', 'age':105, 'birthday': '1900-01-05T01:00:00Z', 'length':10.0, 'picture': new Buffer([255, 255, 255, 255, 254]).toString('base64'), 'species':'dangerous', }, { 'fishtype': 'goblin', 'age': 1, 'birthday': '2015-08-08T00:00:00Z', 'length': 30.0, 'species': 'scary', 'jawsize': 5 } ] }; :type complex_body: :class:`Fish <fixtures.acceptancetestsbodycomplex.models.Fish>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/complex/polymorphism/valid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._serialize.body(complex_body, 'Fish') # Construct and send request request = self._client.put(url, query_parameters) response = self._client.send( request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: raise models.ErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def put_valid_missing_required( self, complex_body, custom_headers={}, raw=False, **operation_config): """ Put complex types that are polymorphic, attempting to omit required 'birthday' field - the request should not be allowed from the client :param complex_body: Please attempt put a sawshark that looks like this, the client should not allow this data to be sent: { "fishtype": "sawshark", "species": "snaggle toothed", "length": 18.5, "age": 2, "birthday": "2013-06-01T01:00:00Z", "location": "alaska", "picture": base64(FF FF FF FF FE), "siblings": [ { "fishtype": "shark", "species": "predator", "birthday": "2012-01-05T01:00:00Z", "length": 20, "age": 6 }, { "fishtype": "sawshark", "species": "dangerous", "picture": base64(FF FF FF FF FE), "length": 10, "age": 105 } ] } :type complex_body: :class:`Fish <fixtures.acceptancetestsbodycomplex.models.Fish>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/complex/polymorphism/missingrequired/invalid' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._serialize.body(complex_body, 'Fish') # Construct and send request request = self._client.put(url, query_parameters) response = self._client.send( request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: raise models.ErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
mit
laurentb/weboob
modules/pornhub/pages.py
2
2293
# -*- coding: utf-8 -*- # Copyright(C) 2016 Roger Philibert # # This file is part of a weboob module. # # This weboob module is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This weboob module is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this weboob module. If not, see <http://www.gnu.org/licenses/>. from weboob.browser.elements import ListElement, ItemElement, method from weboob.browser.filters.html import Link, CSS, Attr from weboob.browser.filters.standard import CleanText, Duration, Regexp, Env from weboob.browser.pages import HTMLPage, pagination from weboob.capabilities.base import NotAvailable from weboob.capabilities.image import Thumbnail from weboob.capabilities.video import BaseVideo class IndexPage(HTMLPage): @pagination @method class iter_videos(ListElement): item_xpath = '//li[has-class("videoblock")]' next_page = Link(u'//a[text()="Next"]') class item(ItemElement): klass = BaseVideo obj_id = CSS('a') & Link & Regexp(pattern=r'viewkey=(.+)') obj_title = Attr('.//span[has-class("title")]/a', 'title') & CleanText obj_duration = CSS('var.duration') & CleanText & Duration | NotAvailable obj_nsfw = True def obj_thumbnail(self): thumbnail = Thumbnail(Attr('.//img[has-class("js-videoThumb")]', 'data-path')(self).replace('{index}', '1')) thumbnail.url = thumbnail.id return thumbnail class VideoPage(HTMLPage): @method class get_video(ItemElement): klass = BaseVideo obj_id = Env('id') obj_title = CleanText('//title') obj_nsfw = True obj_ext = u'mp4' obj_url = CleanText('//script') & Regexp(pattern=r'(https:\\/\\/[^"]+\.mp4[^"]+)"') & CleanText(replace=[('\\', '')])
lgpl-3.0
liwushuo/fapistrano
fapistrano/plugins/curl.py
1
2099
# -*- coding: utf-8 -*- import sys from contextlib import contextmanager from fabric.api import cd, env, show, hide from .. import signal, configuration from ..utils import run def init(): configuration.setdefault('curl_url', '') configuration.setdefault('curl_output', '') configuration.setdefault('curl_options', '') configuration.setdefault('curl_extract_tar', '') configuration.setdefault('curl_extract_tgz', '') configuration.setdefault('curl_user', '') configuration.setdefault('curl_postinstall_script', '') configuration.setdefault('curl_postinstall_output', True) signal.register('deploy.updating', download_artifact) class StreamFilter(object): def __init__(self, filter, stream): self.stream = stream self.filter = filter def write(self,data): if not self.filter: self.stream.write(data) self.stream.flush() else: user = self.filter[:self.filter.index(':')] data = data.replace(self.filter, '%s:**************' % user) self.stream.write(data) self.stream.flush() def flush(self): self.stream.flush() @contextmanager def credential_output(): sys_stdout = sys.stdout credential_stdout = StreamFilter(env.curl_user, sys_stdout) sys.stdout = credential_stdout yield sys.stdout = sys_stdout def download_artifact(**kwargs): with cd(env.release_path), credential_output(): cmd = 'curl --max-time 30 --retry 3 %(curl_url)s' % env if env.curl_user: cmd += ' --user %(curl_user)s' % env if env.curl_output: cmd += ' -o %(curl_output)s' % env if env.curl_options: cmd += ' %(curl_options)s' % env if env.curl_extract_tar: cmd += ' | tar -x' elif env.curl_extract_tgz: cmd += ' | tar -xz' run(cmd) if env.curl_postinstall_script: output = show if env.curl_postinstall_output else hide with output('output'): run(env.curl_postinstall_script)
mit
cmsdaq/hltd
lib/cx_Oracle-7.1/samples/ScrollableCursors.py
1
2246
#------------------------------------------------------------------------------ # Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. # # Portions Copyright 2007-2015, Anthony Tuininga. All rights reserved. # # Portions Copyright 2001-2007, Computronix (Canada) Ltd., Edmonton, Alberta, # Canada. All rights reserved. #------------------------------------------------------------------------------ #------------------------------------------------------------------------------ # ScrollableCursors.py # This script demonstrates how to use scrollable cursors. These allow moving # forward and backward in the result set but incur additional overhead on the # server to retain this information. # # This script requires cx_Oracle 5.3 and higher. #------------------------------------------------------------------------------ from __future__ import print_function import cx_Oracle import SampleEnv connection = cx_Oracle.connect(SampleEnv.GetMainConnectString()) # show all of the rows available in the table cursor = connection.cursor() cursor.execute("select * from TestStrings order by IntCol") print("ALL ROWS") for row in cursor: print(row) print() # create a scrollable cursor cursor = connection.cursor(scrollable = True) # set array size smaller than the default (100) to force scrolling by the # database; otherwise, scrolling occurs directly within the buffers cursor.arraysize = 3 cursor.execute("select * from TestStrings order by IntCol") # scroll to last row in the result set; the first parameter is not needed and # is ignored) cursor.scroll(mode = "last") print("LAST ROW") print(cursor.fetchone()) print() # scroll to the first row in the result set; the first parameter not needed and # is ignored cursor.scroll(mode = "first") print("FIRST ROW") print(cursor.fetchone()) print() # scroll to an absolute row number cursor.scroll(5, mode = "absolute") print("ROW 5") print(cursor.fetchone()) print() # scroll forward six rows (the mode parameter defaults to relative) cursor.scroll(3) print("SKIP 3 ROWS") print(cursor.fetchone()) print() # scroll backward four rows (the mode parameter defaults to relative) cursor.scroll(-4) print("SKIP BACK 4 ROWS") print(cursor.fetchone()) print()
lgpl-3.0
erget/Presence
codaPresence/alpha/subsetter.py
1
2096
import netCDF4 as cdf import numpy as np import os import re """ folder paths should end with a forward slash src: root path of product dst: destination of product bounds: bounds of the subset given as [latmin, latmax, longmin, longmax] """ def subSetFolder(src, dst, bounds): FILE_COORDS = "geo_coordinates.nc" #map the bounds in lat/long onto indexes geoFile = cdf.Dataset(src+FILE_COORDS) lats = geoFile.variables['latitude'][:] longs = geoFile.variables['longitude'][:] mask = np.logical_and(np.logical_and(lats >= bounds[0], lats <= bounds[1]), np.logical_and(longs >= bounds[2], longs <= bounds[3])) #subset all files in folder for subdir, dirs, files in os.walk(src): for f in files: if re.match('[a-zA-Z0-9]{3}[0-9]_[a-z0-9]*\.nc', f) or re.match('geo_coordinates.nc', f): print(f) subSetCDF(src+f, dst+f, mask) """ creates a subset of the provided netCDF4 file, that only contains the values with indexes inside the given bounds. bounds: dictionary specifiying the bounds for each dimensions key: dimension dimName value: array [latmin, latmax, longmin, longmax] bound """ def subSetCDF(src, dst, mask): orig = cdf.Dataset(src); copy = cdf.Dataset(dst, "w"); for attr in orig.ncattrs(): copy.setncattr(attr, orig.getncattr(attr)) copyVars = {} for var in orig.variables: copyVars[var] = np.extract(mask[:], orig.variables[var][:]) for var in copyVars: copy.createDimension('dim'+var, copyVars[var].size) v = copy.createVariable(var, orig.variables[var].datatype, 'dim'+var) for attr in orig.variables[var].ncattrs(): v.setncattr(attr, orig.variables[var].getncattr(attr)) v[:] = copyVars[var][:] def testSubSetCDF(): latmin = 39.0 latmax = 43.0 longmin = -21.0 longmax = -19.0 src = "/home/wegner/Documents/testData/testIn/" dst = "/home/wegner/Documents/testData/testOut/" subSetFolder(src, dst, [latmin, latmax, longmin, longmax]) testSubSetCDF()
mit
CiscoSystems/quantum
neutron/agent/firewall.py
11
4124
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2012, Nachi Ueno, NTT MCL, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import contextlib class FirewallDriver(object): """Firewall Driver base class. Defines methods that any driver providing security groups and provider firewall functionality should implement. Note port attribute should have information of security group ids and security group rules. the dict of port should have device : interface name fixed_ips: ips of the device mac_address: mac_address of the device security_groups: [sgid, sgid] security_group_rules : [ rule, rule ] the rule must contain ethertype and direction the rule may contain security_group_id, protocol, port_min, port_max source_ip_prefix, source_port_min, source_port_max, dest_ip_prefix, and remote_group_id Note: source_group_ip in REST API should be converted by this rule if direction is ingress: remote_group_ip will be a source_ip_prefix if direction is egress: remote_group_ip will be a dest_ip_prefix Note: remote_group_id in REST API should be converted by this rule if direction is ingress: remote_group_id will be a list of source_ip_prefix if direction is egress: remote_group_id will be a list of dest_ip_prefix remote_group_id will also remaining membership update management """ __metaclass__ = abc.ABCMeta def prepare_port_filter(self, port): """Prepare filters for the port. This method should be called before the port is created. """ raise NotImplementedError() def apply_port_filter(self, port): """Apply port filter. Once this method returns, the port should be firewalled appropriately. This method should as far as possible be a no-op. It's vastly preferred to get everything set up in prepare_port_filter. """ raise NotImplementedError() def update_port_filter(self, port): """Refresh security group rules from data store Gets called when an port gets added to or removed from the security group the port is a member of or if the group gains or looses a rule. """ raise NotImplementedError() def remove_port_filter(self, port): """Stop filtering port.""" raise NotImplementedError() def filter_defer_apply_on(self): """Defer application of filtering rule.""" pass def filter_defer_apply_off(self): """Turn off deferral of rules and apply the rules now.""" pass @property def ports(self): """Returns filtered ports.""" pass @contextlib.contextmanager def defer_apply(self): """Defer apply context.""" self.filter_defer_apply_on() try: yield finally: self.filter_defer_apply_off() class NoopFirewallDriver(FirewallDriver): """Noop Firewall Driver. Firewall driver which does nothing. This driver is for disabling the firewall functionality. """ def prepare_port_filter(self, port): pass def apply_port_filter(self, port): pass def update_port_filter(self, port): pass def remove_port_filter(self, port): pass def filter_defer_apply_on(self): pass def filter_defer_apply_off(self): pass @property def ports(self): return {}
apache-2.0
mlavin/django
django/db/migrations/operations/special.py
107
7792
from django.db import router from .base import Operation class SeparateDatabaseAndState(Operation): """ Take two lists of operations - ones that will be used for the database, and ones that will be used for the state change. This allows operations that don't support state change to have it applied, or have operations that affect the state or not the database, or so on. """ serialization_expand_args = ['database_operations', 'state_operations'] def __init__(self, database_operations=None, state_operations=None): self.database_operations = database_operations or [] self.state_operations = state_operations or [] def deconstruct(self): kwargs = {} if self.database_operations: kwargs['database_operations'] = self.database_operations if self.state_operations: kwargs['state_operations'] = self.state_operations return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): for state_operation in self.state_operations: state_operation.state_forwards(app_label, state) def database_forwards(self, app_label, schema_editor, from_state, to_state): # We calculate state separately in here since our state functions aren't useful for database_operation in self.database_operations: to_state = from_state.clone() database_operation.state_forwards(app_label, to_state) database_operation.database_forwards(app_label, schema_editor, from_state, to_state) from_state = to_state def database_backwards(self, app_label, schema_editor, from_state, to_state): # We calculate state separately in here since our state functions aren't useful to_states = {} for dbop in self.database_operations: to_states[dbop] = to_state to_state = to_state.clone() dbop.state_forwards(app_label, to_state) # to_state now has the states of all the database_operations applied # which is the from_state for the backwards migration of the last # operation. for database_operation in reversed(self.database_operations): from_state = to_state to_state = to_states[database_operation] database_operation.database_backwards(app_label, schema_editor, from_state, to_state) def describe(self): return "Custom state/database change combination" class RunSQL(Operation): """ Run some raw SQL. A reverse SQL statement may be provided. Also accept a list of operations that represent the state change effected by this SQL change, in case it's custom column/table creation/deletion. """ noop = '' def __init__(self, sql, reverse_sql=None, state_operations=None, hints=None, elidable=False): self.sql = sql self.reverse_sql = reverse_sql self.state_operations = state_operations or [] self.hints = hints or {} self.elidable = elidable def deconstruct(self): kwargs = { 'sql': self.sql, } if self.reverse_sql is not None: kwargs['reverse_sql'] = self.reverse_sql if self.state_operations: kwargs['state_operations'] = self.state_operations if self.hints: kwargs['hints'] = self.hints return ( self.__class__.__qualname__, [], kwargs ) @property def reversible(self): return self.reverse_sql is not None def state_forwards(self, app_label, state): for state_operation in self.state_operations: state_operation.state_forwards(app_label, state) def database_forwards(self, app_label, schema_editor, from_state, to_state): if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints): self._run_sql(schema_editor, self.sql) def database_backwards(self, app_label, schema_editor, from_state, to_state): if self.reverse_sql is None: raise NotImplementedError("You cannot reverse this operation") if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints): self._run_sql(schema_editor, self.reverse_sql) def describe(self): return "Raw SQL operation" def _run_sql(self, schema_editor, sqls): if isinstance(sqls, (list, tuple)): for sql in sqls: params = None if isinstance(sql, (list, tuple)): elements = len(sql) if elements == 2: sql, params = sql else: raise ValueError("Expected a 2-tuple but got %d" % elements) schema_editor.execute(sql, params=params) elif sqls != RunSQL.noop: statements = schema_editor.connection.ops.prepare_sql_script(sqls) for statement in statements: schema_editor.execute(statement, params=None) class RunPython(Operation): """ Run Python code in a context suitable for doing versioned ORM operations. """ reduces_to_sql = False def __init__(self, code, reverse_code=None, atomic=None, hints=None, elidable=False): self.atomic = atomic # Forwards code if not callable(code): raise ValueError("RunPython must be supplied with a callable") self.code = code # Reverse code if reverse_code is None: self.reverse_code = None else: if not callable(reverse_code): raise ValueError("RunPython must be supplied with callable arguments") self.reverse_code = reverse_code self.hints = hints or {} self.elidable = elidable def deconstruct(self): kwargs = { 'code': self.code, } if self.reverse_code is not None: kwargs['reverse_code'] = self.reverse_code if self.atomic is not None: kwargs['atomic'] = self.atomic if self.hints: kwargs['hints'] = self.hints return ( self.__class__.__qualname__, [], kwargs ) @property def reversible(self): return self.reverse_code is not None def state_forwards(self, app_label, state): # RunPython objects have no state effect. To add some, combine this # with SeparateDatabaseAndState. pass def database_forwards(self, app_label, schema_editor, from_state, to_state): # RunPython has access to all models. Ensure that all models are # reloaded in case any are delayed. from_state.clear_delayed_apps_cache() if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints): # We now execute the Python code in a context that contains a 'models' # object, representing the versioned models as an app registry. # We could try to override the global cache, but then people will still # use direct imports, so we go with a documentation approach instead. self.code(from_state.apps, schema_editor) def database_backwards(self, app_label, schema_editor, from_state, to_state): if self.reverse_code is None: raise NotImplementedError("You cannot reverse this operation") if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints): self.reverse_code(from_state.apps, schema_editor) def describe(self): return "Raw Python operation" @staticmethod def noop(apps, schema_editor): return None
bsd-3-clause
marcharper/stationary
examples/entropic_equilibria_plots.py
1
9181
"""Figures for the publication "Entropic Equilibria Selection of Stationary Extrema in Finite Populations" """ from __future__ import print_function import math import os import pickle import sys import matplotlib from matplotlib import pyplot as plt import matplotlib.gridspec as gridspec import numpy as np import scipy.misc import ternary import stationary from stationary.processes import incentives, incentive_process ## Global Font config for plots ### font = {'size': 14} matplotlib.rc('font', **font) def compute_entropy_rate(N=30, n=2, m=None, incentive_func=None, beta=1., mu=None, exact=False, lim=1e-13, logspace=False): if not m: m = np.ones((n, n)) if not incentive_func: incentive_func = incentives.fermi if not mu: # mu = (n-1.)/n * 1./(N+1) mu = 1. / N fitness_landscape = incentives.linear_fitness_landscape(m) incentive = incentive_func(fitness_landscape, beta=beta, q=1) edges = incentive_process.multivariate_transitions( N, incentive, num_types=n, mu=mu) s = stationary.stationary_distribution(edges, exact=exact, lim=lim, logspace=logspace) e = stationary.entropy_rate(edges, s) return e, s # Entropy Characterization Plots def dict_max(d): k0, v0 = list(d.items())[0] for k, v in d.items(): if v > v0: k0, v0 = k, v return k0, v0 def plot_data_sub(domain, plot_data, gs, labels=None, sci=True, use_log=False): # Plot Entropy Rate ax1 = plt.subplot(gs[0, 0]) ax1.plot(domain, [x[0] for x in plot_data[0]], linewidth=2) # Plot Stationary Probabilities and entropies ax2 = plt.subplot(gs[1, 0]) ax3 = plt.subplot(gs[2, 0]) if use_log: transform = math.log else: transform = lambda x: x for i, ax, t in [(1, ax2, lambda x: x), (2, ax3, transform)]: if labels: for data, label in zip(plot_data, labels): ys = list(map(t, [x[i] for x in data])) ax.plot(domain, ys, linewidth=2, label=label) else: for data in plot_data: ys = list(map(t, [x[i] for x in data])) ax.plot(domain, ys, linewidth=2) ax1.set_ylabel("Entropy Rate") ax2.set_ylabel("Stationary\nExtrema") if use_log: ax3.set_ylabel("log RTE $H_v$") else: ax3.set_ylabel("RTE $H_v$") if sci: ax2.yaxis.get_major_formatter().set_powerlimits((0, 0)) ax3.yaxis.get_major_formatter().set_powerlimits((0, 0)) return ax1, ax2, ax3 def ER_figure_beta2(N, m, betas): """Varying Beta, two dimensional example""" # Beta test # m = [[1, 4], [4, 1]] # Compute the data ss = [] plot_data = [[]] for beta in betas: print(beta) e, s = compute_entropy_rate(N=N, m=m, beta=beta, exact=True) ss.append(s) state, s_max = dict_max(s) plot_data[0].append((e, s_max, e / s_max)) gs = gridspec.GridSpec(3, 2) ax1, ax2, ax3 = plot_data_sub(betas, plot_data, gs, sci=False) ax3.set_xlabel("Strength of Selection $\\beta$") # Plot stationary distribution ax4 = plt.subplot(gs[:, 1]) for s in ss[::4]: ax4.plot(range(0, N+1), [s[(i, N-i)] for i in range(0, N+1)]) ax4.set_title("Stationary Distributions") ax4.set_xlabel("Population States $(i , N - i)$") def remove_boundary(s): s1 = dict() for k, v in s.items(): a, b, c = k if a * b * c != 0: s1[k] = v return s1 def ER_figure_beta3(N, m, mu, betas, iss_states, labels, stationary_beta=0.35, pickle_filename="figure_beta3.pickle"): """Varying Beta, three dimensional example""" ss = [] plot_data = [[] for _ in range(len(iss_states))] if os.path.exists(pickle_filename): with open(pickle_filename, 'rb') as f: plot_data = pickle.load(f) else: for beta in betas: print(beta) e, s = compute_entropy_rate( N=N, m=m, n=3, beta=beta, exact=False, mu=mu, lim=1e-10) ss.append(s) for i, iss_state in enumerate(iss_states): s_max = s[iss_state] plot_data[i].append((e, s_max, e / s_max)) with open(pickle_filename, 'wb') as f: pickle.dump(plot_data, f) gs = gridspec.GridSpec(3, 2) ax1, ax2, ax3 = plot_data_sub(betas, plot_data, gs, labels=labels, use_log=True, sci=False) ax3.set_xlabel("Strength of selection $\\beta$") ax2.legend(loc="upper right") # Plot example stationary ax4 = plt.subplot(gs[:, 1]) _, s = compute_entropy_rate( N=N, m=m, n=3, beta=stationary_beta, exact=False, mu=mu, lim=1e-15) _, tax = ternary.figure(ax=ax4, scale=N,) tax.heatmap(s, cmap="jet", style="triangular") tax.ticks(axis='lbr', linewidth=1, multiple=10, offset=0.015) tax.clear_matplotlib_ticks() ax4.set_xlabel("Population States $a_1 + a_2 + a_3 = N$") # tax.left_axis_label("$a_1$") # tax.right_axis_label("$a_2$") # tax.bottom_axis_label("$a_3$") def ER_figure_N(Ns, m, beta=1, labels=None): """Varying population size.""" ss = [] plot_data = [[] for _ in range(3)] n = len(m[0]) for N in Ns: print(N) mu = 1 / N norm = float(scipy.misc.comb(N+n, n)) e, s = compute_entropy_rate( N=N, m=m, n=3, beta=beta, exact=False, mu=mu, lim=1e-10) ss.append(s) iss_states = [(N, 0, 0), (N / 2, N / 2, 0), (N / 3, N / 3, N / 3)] for i, iss_state in enumerate(iss_states): s_max = s[iss_state] plot_data[i].append((e, s_max, e / (s_max * norm))) # Plot data gs = gridspec.GridSpec(3, 1) ax1, ax2, ax3 = plot_data_sub(Ns, plot_data, gs, labels, use_log=True, sci=False) ax2.legend(loc="upper right") ax3.set_xlabel("Population Size $N$") def ER_figure_mu(N, mus, m, iss_states, labels, beta=1., pickle_filename="figure_mu.pickle"): """ Plot entropy rates and trajectory entropies for varying mu. """ # Compute the data ss = [] plot_data = [[] for _ in range(len(iss_states))] if os.path.exists(pickle_filename): with open(pickle_filename, 'rb') as f: plot_data = pickle.load(f) else: for mu in mus: print(mu) e, s = compute_entropy_rate( N=N, m=m, n=3, beta=beta, exact=False, mu=mu, lim=1e-10, logspace=True) ss.append(s) for i, iss_state in enumerate(iss_states): s_max = s[iss_state] plot_data[i].append((e, s_max, e / s_max)) with open(pickle_filename, 'wb') as f: pickle.dump(plot_data, f) # Plot data gs = gridspec.GridSpec(3, 1) gs.update(hspace=0.5) ax1, ax2, ax3 = plot_data_sub(mus, plot_data, gs, labels, use_log=True) ax2.legend(loc="upper right") ax3.set_xlabel("Mutation rate $\mu$") if __name__ == '__main__': fig_num = sys.argv[1] if fig_num == "1": ## Figure 1 # Varying beta, two dimensional N = 30 m = [[1, 2], [2, 1]] betas = np.arange(0, 8, 0.2) ER_figure_beta2(N, m, betas) plt.tight_layout() plt.show() if fig_num == "2": ## Figure 2 # # Varying beta, three dimensional N = 60 mu = 1. / N m = [[0, 1, 1], [1, 0, 1], [1, 1, 0]] iss_states = [(N, 0, 0), (N / 2, N / 2, 0), (N / 3, N / 3, N / 3)] labels = ["$v_0$", "$v_1$", "$v_2$"] betas = np.arange(0.02, 0.6, 0.02) ER_figure_beta3(N, m, mu, betas, iss_states, labels) plt.show() if fig_num == "3": ## Figure 3 # Varying mutation rate figure N = 42 mus = np.arange(0.0001, 0.015, 0.0005) m = [[0, 1, 1], [1, 0, 1], [1, 1, 0]] iss_states = [(N, 0, 0), (N / 2, N / 2, 0), (N / 3, N / 3, N / 3)] labels = ["$v_0$: (42, 0, 0)", "$v_1$: (21, 21, 0)", "$v_2$: (14, 14, 14)"] # labels = ["$v_0$", "$v_1$", "$v_2$"] ER_figure_mu(N, mus, m, iss_states, labels, beta=1.) plt.show() if fig_num == "4": ## Figure 4 # Note: The RPS landscape takes MUCH longer to converge! # Consider using the C++ implementation instead for larger N. N = 120 # Manuscript uses 180 mu = 1. / N m = incentives.rock_paper_scissors(a=-1, b=-1) _, s = compute_entropy_rate( N=N, m=m, n=3, beta=1.5, exact=False, mu=mu, lim=1e-16) _, tax = ternary.figure(scale=N) tax.heatmap(remove_boundary(s), cmap="jet", style="triangular") tax.ticks(axis='lbr', linewidth=1, multiple=60) tax.clear_matplotlib_ticks() plt.show() if fig_num == "5": # ## Figure 5 # Varying Population Size Ns = range(6, 6*6, 6) m = [[0, 1, 1], [1, 0, 1], [1, 1, 0]] labels = ["$v_0$", "$v_1$", "$v_2$"] ER_figure_N(Ns, m, beta=1, labels=labels) plt.show()
mit
ryanjoneil/adventures-in-optimization
published/2012-09-22-lagrangian-relaxation-with-gurobipy/gap-lagrange.py
1
2422
#!/usr/bin/env python # This is the GAP per Wolsey, pg 182, using Lagrangian Relaxation from gurobipy import GRB, Model model = Model('GAP per Wolsey with Lagrangian Relaxation') model.modelSense = GRB.MAXIMIZE model.setParam('OutputFlag', False) # turns off solver chatter b = [15, 15, 15] c = [ [ 6, 10, 1], [12, 12, 5], [15, 4, 3], [10, 3, 9], [ 8, 9, 5] ] a = [ [ 5, 7, 2], [14, 8, 7], [10, 6, 12], [ 8, 4, 15], [ 6, 12, 5] ] # x[i][j] = 1 if i is assigned to j x = [] for i in range(len(c)): x_i = [] for j in c[i]: x_i.append(model.addVar(vtype=GRB.BINARY)) x.append(x_i) # As stated, the GAP has these following constraints. We dualize these into # penalties instead, using variables so we can easily extract their values. penalties = [model.addVar() for _ in x] model.update() # Dualized constraints: sum j: x_ij <= 1 for all i for p, x_i in zip(penalties, x): model.addConstr(p == 1 - sum(x_i)) # sum i: a_ij * x_ij <= b[j] for all j for j in range(len(b)): model.addConstr(sum(a[i][j] * x[i][j] for i in range(len(x))) <= b[j]) # u[i] = Lagrangian Multiplier for the set packing contraint i u = [2.0] * len(x) # Re-optimize until either we have run a certain number of iterations # or complementary slackness conditions apply. for k in range(1, 101): # max sum i,j: c_ij * x_ij model.setObjective( sum( # Original objective function sum(c_ij * x_ij for c_ij, x_ij in zip(c_i, x_i)) for c_i, x_i in zip(c, x) ) + sum ( # Penalties for dualized constraints u_j * p_j for u_j, p_j in zip(u, penalties) ) ) model.optimize() print 'iteration', k, 'obj =', model.objVal, \ 'u =', u, 'penalties =', [p.x for p in penalties] # Test for complementary slackness stop = True eps = 10e-6 for u_i, p_i in zip(u, penalties): if abs(u_i) > eps and abs(p_i.x) > eps: stop = False break if stop: print 'primal feasible & optimal' break else: s = 1.0 / k for i in range(len(x)): u[i] = max(u[i] - s*(penalties[i].x), 0.0) # Pull objective and variable values out of model print 'objective =', model.objVal print 'x = [' for x_i in x: print ' ', [1 if x_ij.x >= 0.5 else 0 for x_ij in x_i] print ']'
mit
ShineFan/odoo
addons/l10n_sg/__openerp__.py
331
2380
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2014 Tech Receptives (<http://techreceptives.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Singapore - Accounting', 'version': '1.0', 'author': 'Tech Receptives', 'website': 'http://www.techreceptives.com', 'category': 'Localization/Account Charts', 'description': """ Singapore accounting chart and localization. ======================================================= After installing this module, the Configuration wizard for accounting is launched. * The Chart of Accounts consists of the list of all the general ledger accounts required to maintain the transactions of Singapore. * On that particular wizard, you will be asked to pass the name of the company, the chart template to follow, the no. of digits to generate, the code for your account and bank account, currency to create journals. * The Chart of Taxes would display the different types/groups of taxes such as Standard Rates, Zeroed, Exempted, MES and Out of Scope. * The tax codes are specified considering the Tax Group and for easy accessibility of submission of GST Tax Report. """, 'depends': ['base', 'account', 'account_chart'], 'demo': [ ], 'data': [ 'l10n_sg_chart_tax_code.xml', 'l10n_sg_chart.xml', 'l10n_sg_chart_tax.xml', 'l10n_sg_wizard.xml', ], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
hgl888/chromium-crosswalk-efl
third_party/google_appengine_cloudstorage/cloudstorage/common.py
120
11732
# Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Helpers shared by cloudstorage_stub and cloudstorage_api.""" __all__ = ['CS_XML_NS', 'CSFileStat', 'dt_str_to_posix', 'local_api_url', 'LOCAL_GCS_ENDPOINT', 'local_run', 'get_access_token', 'get_metadata', 'GCSFileStat', 'http_time_to_posix', 'memory_usage', 'posix_time_to_http', 'posix_to_dt_str', 'set_access_token', 'validate_options', 'validate_bucket_name', 'validate_bucket_path', 'validate_file_path', ] import calendar import datetime from email import utils as email_utils import logging import os import re try: from google.appengine.api import runtime except ImportError: from google.appengine.api import runtime _GCS_BUCKET_REGEX_BASE = r'[a-z0-9\.\-_]{3,63}' _GCS_BUCKET_REGEX = re.compile(_GCS_BUCKET_REGEX_BASE + r'$') _GCS_BUCKET_PATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'$') _GCS_PATH_PREFIX_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'.*') _GCS_FULLPATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'/.*') _GCS_METADATA = ['x-goog-meta-', 'content-disposition', 'cache-control', 'content-encoding'] _GCS_OPTIONS = _GCS_METADATA + ['x-goog-acl'] CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01' LOCAL_GCS_ENDPOINT = '/_ah/gcs' _access_token = '' _MAX_GET_BUCKET_RESULT = 1000 def set_access_token(access_token): """Set the shared access token to authenticate with Google Cloud Storage. When set, the library will always attempt to communicate with the real Google Cloud Storage with this token even when running on dev appserver. Note the token could expire so it's up to you to renew it. When absent, the library will automatically request and refresh a token on appserver, or when on dev appserver, talk to a Google Cloud Storage stub. Args: access_token: you can get one by run 'gsutil -d ls' and copy the str after 'Bearer'. """ global _access_token _access_token = access_token def get_access_token(): """Returns the shared access token.""" return _access_token class GCSFileStat(object): """Container for GCS file stat.""" def __init__(self, filename, st_size, etag, st_ctime, content_type=None, metadata=None, is_dir=False): """Initialize. For files, the non optional arguments are always set. For directories, only filename and is_dir is set. Args: filename: a Google Cloud Storage filename of form '/bucket/filename'. st_size: file size in bytes. long compatible. etag: hex digest of the md5 hash of the file's content. str. st_ctime: posix file creation time. float compatible. content_type: content type. str. metadata: a str->str dict of user specified options when creating the file. Possible keys are x-goog-meta-, content-disposition, content-encoding, and cache-control. is_dir: True if this represents a directory. False if this is a real file. """ self.filename = filename self.is_dir = is_dir self.st_size = None self.st_ctime = None self.etag = None self.content_type = content_type self.metadata = metadata if not is_dir: self.st_size = long(st_size) self.st_ctime = float(st_ctime) if etag[0] == '"' and etag[-1] == '"': etag = etag[1:-1] self.etag = etag def __repr__(self): if self.is_dir: return '(directory: %s)' % self.filename return ( '(filename: %(filename)s, st_size: %(st_size)s, ' 'st_ctime: %(st_ctime)s, etag: %(etag)s, ' 'content_type: %(content_type)s, ' 'metadata: %(metadata)s)' % dict(filename=self.filename, st_size=self.st_size, st_ctime=self.st_ctime, etag=self.etag, content_type=self.content_type, metadata=self.metadata)) def __cmp__(self, other): if not isinstance(other, self.__class__): raise ValueError('Argument to cmp must have the same type. ' 'Expect %s, got %s', self.__class__.__name__, other.__class__.__name__) if self.filename > other.filename: return 1 elif self.filename < other.filename: return -1 return 0 def __hash__(self): if self.etag: return hash(self.etag) return hash(self.filename) CSFileStat = GCSFileStat def get_metadata(headers): """Get user defined options from HTTP response headers.""" return dict((k, v) for k, v in headers.iteritems() if any(k.lower().startswith(valid) for valid in _GCS_METADATA)) def validate_bucket_name(name): """Validate a Google Storage bucket name. Args: name: a Google Storage bucket name with no prefix or suffix. Raises: ValueError: if name is invalid. """ _validate_path(name) if not _GCS_BUCKET_REGEX.match(name): raise ValueError('Bucket should be 3-63 characters long using only a-z,' '0-9, underscore, dash or dot but got %s' % name) def validate_bucket_path(path): """Validate a Google Cloud Storage bucket path. Args: path: a Google Storage bucket path. It should have form '/bucket'. Raises: ValueError: if path is invalid. """ _validate_path(path) if not _GCS_BUCKET_PATH_REGEX.match(path): raise ValueError('Bucket should have format /bucket ' 'but got %s' % path) def validate_file_path(path): """Validate a Google Cloud Storage file path. Args: path: a Google Storage file path. It should have form '/bucket/filename'. Raises: ValueError: if path is invalid. """ _validate_path(path) if not _GCS_FULLPATH_REGEX.match(path): raise ValueError('Path should have format /bucket/filename ' 'but got %s' % path) def _process_path_prefix(path_prefix): """Validate and process a Google Cloud Stoarge path prefix. Args: path_prefix: a Google Cloud Storage path prefix of format '/bucket/prefix' or '/bucket/' or '/bucket'. Raises: ValueError: if path is invalid. Returns: a tuple of /bucket and prefix. prefix can be None. """ _validate_path(path_prefix) if not _GCS_PATH_PREFIX_REGEX.match(path_prefix): raise ValueError('Path prefix should have format /bucket, /bucket/, ' 'or /bucket/prefix but got %s.' % path_prefix) bucket_name_end = path_prefix.find('/', 1) bucket = path_prefix prefix = None if bucket_name_end != -1: bucket = path_prefix[:bucket_name_end] prefix = path_prefix[bucket_name_end + 1:] or None return bucket, prefix def _validate_path(path): """Basic validation of Google Storage paths. Args: path: a Google Storage path. It should have form '/bucket/filename' or '/bucket'. Raises: ValueError: if path is invalid. TypeError: if path is not of type basestring. """ if not path: raise ValueError('Path is empty') if not isinstance(path, basestring): raise TypeError('Path should be a string but is %s (%s).' % (path.__class__, path)) def validate_options(options): """Validate Google Cloud Storage options. Args: options: a str->basestring dict of options to pass to Google Cloud Storage. Raises: ValueError: if option is not supported. TypeError: if option is not of type str or value of an option is not of type basestring. """ if not options: return for k, v in options.iteritems(): if not isinstance(k, str): raise TypeError('option %r should be a str.' % k) if not any(k.lower().startswith(valid) for valid in _GCS_OPTIONS): raise ValueError('option %s is not supported.' % k) if not isinstance(v, basestring): raise TypeError('value %r for option %s should be of type basestring.' % (v, k)) def http_time_to_posix(http_time): """Convert HTTP time format to posix time. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1 for http time format. Args: http_time: time in RFC 2616 format. e.g. "Mon, 20 Nov 1995 19:12:08 GMT". Returns: A float of secs from unix epoch. """ if http_time is not None: return email_utils.mktime_tz(email_utils.parsedate_tz(http_time)) def posix_time_to_http(posix_time): """Convert posix time to HTML header time format. Args: posix_time: unix time. Returns: A datatime str in RFC 2616 format. """ if posix_time: return email_utils.formatdate(posix_time, usegmt=True) _DT_FORMAT = '%Y-%m-%dT%H:%M:%S' def dt_str_to_posix(dt_str): """format str to posix. datetime str is of format %Y-%m-%dT%H:%M:%S.%fZ, e.g. 2013-04-12T00:22:27.978Z. According to ISO 8601, T is a separator between date and time when they are on the same line. Z indicates UTC (zero meridian). A pointer: http://www.cl.cam.ac.uk/~mgk25/iso-time.html This is used to parse LastModified node from GCS's GET bucket XML response. Args: dt_str: A datetime str. Returns: A float of secs from unix epoch. By posix definition, epoch is midnight 1970/1/1 UTC. """ parsable, _ = dt_str.split('.') dt = datetime.datetime.strptime(parsable, _DT_FORMAT) return calendar.timegm(dt.utctimetuple()) def posix_to_dt_str(posix): """Reverse of str_to_datetime. This is used by GCS stub to generate GET bucket XML response. Args: posix: A float of secs from unix epoch. Returns: A datetime str. """ dt = datetime.datetime.utcfromtimestamp(posix) dt_str = dt.strftime(_DT_FORMAT) return dt_str + '.000Z' def local_run(): """Whether we should hit GCS dev appserver stub.""" server_software = os.environ.get('SERVER_SOFTWARE') if server_software is None: return True if 'remote_api' in server_software: return False if server_software.startswith(('Development', 'testutil')): return True return False def local_api_url(): """Return URL for GCS emulation on dev appserver.""" return 'http://%s%s' % (os.environ.get('HTTP_HOST'), LOCAL_GCS_ENDPOINT) def memory_usage(method): """Log memory usage before and after a method.""" def wrapper(*args, **kwargs): logging.info('Memory before method %s is %s.', method.__name__, runtime.memory_usage().current()) result = method(*args, **kwargs) logging.info('Memory after method %s is %s', method.__name__, runtime.memory_usage().current()) return result return wrapper def _add_ns(tagname): return '{%(ns)s}%(tag)s' % {'ns': CS_XML_NS, 'tag': tagname} _T_CONTENTS = _add_ns('Contents') _T_LAST_MODIFIED = _add_ns('LastModified') _T_ETAG = _add_ns('ETag') _T_KEY = _add_ns('Key') _T_SIZE = _add_ns('Size') _T_PREFIX = _add_ns('Prefix') _T_COMMON_PREFIXES = _add_ns('CommonPrefixes') _T_NEXT_MARKER = _add_ns('NextMarker') _T_IS_TRUNCATED = _add_ns('IsTruncated')
bsd-3-clause
hiei23/servo
components/script/dom/bindings/codegen/parser/tests/test_dictionary.py
121
13609
def WebIDLTest(parser, harness): parser.parse(""" dictionary Dict2 : Dict1 { long child = 5; Dict1 aaandAnother; }; dictionary Dict1 { long parent; double otherParent; }; """) results = parser.finish() dict1 = results[1]; dict2 = results[0]; harness.check(len(dict1.members), 2, "Dict1 has two members") harness.check(len(dict2.members), 2, "Dict2 has four members") harness.check(dict1.members[0].identifier.name, "otherParent", "'o' comes before 'p'") harness.check(dict1.members[1].identifier.name, "parent", "'o' really comes before 'p'") harness.check(dict2.members[0].identifier.name, "aaandAnother", "'a' comes before 'c'") harness.check(dict2.members[1].identifier.name, "child", "'a' really comes before 'c'") # Now reset our parser parser = parser.reset() threw = False try: parser.parse(""" dictionary Dict { long prop = 5; long prop; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow name duplication in a dictionary") # Now reset our parser again parser = parser.reset() threw = False try: parser.parse(""" dictionary Dict1 : Dict2 { long prop = 5; }; dictionary Dict2 : Dict3 { long prop2; }; dictionary Dict3 { double prop; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow name duplication in a dictionary and " "its ancestor") # More reset parser = parser.reset() threw = False try: parser.parse(""" interface Iface {}; dictionary Dict : Iface { long prop; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow non-dictionary parents for dictionaries") # Even more reset parser = parser.reset() threw = False try: parser.parse(""" dictionary A : B {}; dictionary B : A {}; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow cycles in dictionary inheritance chains") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { [TreatNullAs=EmptyString] DOMString foo; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow [TreatNullAs] on dictionary members"); parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(A arg); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Trailing dictionary arg must be optional") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo((A or DOMString) arg); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Trailing union arg containing a dictionary must be optional") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(A arg1, optional long arg2); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Dictionary arg followed by optional arg must be optional") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(A arg1, optional long arg2, long arg3); }; """) results = parser.finish() except: threw = True harness.ok(not threw, "Dictionary arg followed by non-optional arg doesn't have to be optional") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo((A or DOMString) arg1, optional long arg2); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Union arg containing dictionary followed by optional arg must " "be optional") parser = parser.reset() parser.parse(""" dictionary A { }; interface X { void doFoo(A arg1, long arg2); }; """) results = parser.finish() harness.ok(True, "Dictionary arg followed by required arg can be required") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(optional A? arg1); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Dictionary arg must not be nullable") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(optional (A or long)? arg1); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Dictionary arg must not be in a nullable union") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(optional (A or long?) arg1); }; """) results = parser.finish() except: threw = True harness.ok(threw, "Dictionary must not be in a union with a nullable type") parser = parser.reset() threw = False try: parser.parse(""" dictionary A { }; interface X { void doFoo(optional (long? or A) arg1); }; """) results = parser.finish() except: threw = True harness.ok(threw, "A nullable type must not be in a union with a dictionary") parser = parser.reset() parser.parse(""" dictionary A { }; interface X { A? doFoo(); }; """) results = parser.finish() harness.ok(True, "Dictionary return value can be nullable") parser = parser.reset() parser.parse(""" dictionary A { }; interface X { void doFoo(optional A arg); }; """) results = parser.finish() harness.ok(True, "Dictionary arg should actually parse") parser = parser.reset() parser.parse(""" dictionary A { }; interface X { void doFoo(optional (A or DOMString) arg); }; """) results = parser.finish() harness.ok(True, "Union arg containing a dictionary should actually parse") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { Foo foo; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo3 : Foo { short d; }; dictionary Foo2 : Foo3 { boolean c; }; dictionary Foo1 : Foo2 { long a; }; dictionary Foo { Foo1 b; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a Dictionary that " "inherits from its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { (Foo or DOMString)[]? b; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a Nullable type " "whose inner type includes its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { (DOMString or Foo) b; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a Union type, one of " "whose member types includes its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { sequence<sequence<sequence<Foo>>> c; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a Sequence type " "whose element type includes its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { (DOMString or Foo)[] d; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be an Array type " "whose element type includes its Dictionary.") parser = parser.reset() threw = False try: parser.parse(""" dictionary Foo { Foo1 b; }; dictionary Foo3 { Foo d; }; dictionary Foo2 : Foo3 { short c; }; dictionary Foo1 : Foo2 { long a; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a Dictionary, one of whose " "members or inherited members has a type that includes " "its Dictionary.") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { }; dictionary Bar { Foo? d; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a nullable dictionary") parser = parser.reset(); parser.parse(""" dictionary Foo { unrestricted float urFloat = 0; unrestricted float urFloat2 = 1.1; unrestricted float urFloat3 = -1.1; unrestricted float? urFloat4 = null; unrestricted float infUrFloat = Infinity; unrestricted float negativeInfUrFloat = -Infinity; unrestricted float nanUrFloat = NaN; unrestricted double urDouble = 0; unrestricted double urDouble2 = 1.1; unrestricted double urDouble3 = -1.1; unrestricted double? urDouble4 = null; unrestricted double infUrDouble = Infinity; unrestricted double negativeInfUrDouble = -Infinity; unrestricted double nanUrDouble = NaN; }; """) results = parser.finish() harness.ok(True, "Parsing default values for unrestricted types succeeded.") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { double f = Infinity; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { double f = -Infinity; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { double f = NaN; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { float f = Infinity; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { float f = -Infinity; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") parser = parser.reset(); threw = False try: parser.parse(""" dictionary Foo { float f = NaN; }; """) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN")
mpl-2.0
dandv/selenium
py/selenium/webdriver/common/touch_actions.py
71
5966
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ The Touch Actions implementation """ from selenium.webdriver.remote.command import Command class TouchActions(object): """ Generate touch actions. Works like ActionChains; actions are stored in the TouchActions object and are fired with perform(). """ def __init__(self, driver): """ Creates a new TouchActions object. :Args: - driver: The WebDriver instance which performs user actions. It should be with touchscreen enabled. """ self._driver = driver self._actions = [] def perform(self): """ Performs all stored actions. """ for action in self._actions: action() def tap(self, on_element): """ Taps on a given element. :Args: - on_element: The element to tap. """ self._actions.append(lambda: self._driver.execute(Command.SINGLE_TAP, {'element': on_element.id})) return self def double_tap(self, on_element): """ Double taps on a given element. :Args: - on_element: The element to tap. """ self._actions.append(lambda: self._driver.execute(Command.DOUBLE_TAP, {'element': on_element.id})) return self def tap_and_hold(self, xcoord, ycoord): """ Touch down at given coordinates. :Args: - xcoord: X Coordinate to touch down. - ycoord: Y Coordinate to touch down. """ self._actions.append(lambda: self._driver.execute(Command.TOUCH_DOWN, { 'x': int(xcoord), 'y': int(ycoord)})) return self def move(self, xcoord, ycoord): """ Move held tap to specified location. :Args: - xcoord: X Coordinate to move. - ycoord: Y Coordinate to move. """ self._actions.append(lambda: self._driver.execute(Command.TOUCH_MOVE, { 'x': int(xcoord), 'y': int(ycoord)})) return self def release(self, xcoord, ycoord): """ Release previously issued tap 'and hold' command at specified location. :Args: - xcoord: X Coordinate to release. - ycoord: Y Coordinate to release. """ self._actions.append(lambda: self._driver.execute(Command.TOUCH_UP, { 'x': int(xcoord), 'y': int(ycoord)})) return self def scroll(self, xoffset, yoffset): """ Touch and scroll, moving by xoffset and yoffset. :Args: - xoffset: X offset to scroll to. - yoffset: Y offset to scroll to. """ self._actions.append(lambda: self._driver.execute(Command.TOUCH_SCROLL, { 'xoffset': int(xoffset), 'yoffset': int(yoffset)})) return self def scroll_from_element(self, on_element, xoffset, yoffset): """ Touch and scroll starting at on_element, moving by xoffset and yoffset. :Args: - on_element: The element where scroll starts. - xoffset: X offset to scroll to. - yoffset: Y offset to scroll to. """ self._actions.append(lambda: self._driver.execute(Command.TOUCH_SCROLL, { 'element': on_element.id, 'xoffset': int(xoffset), 'yoffset': int(yoffset)})) return self def long_press(self, on_element): """ Long press on an element. :Args: - on_element: The element to long press. """ self._actions.append(lambda: self._driver.execute(Command.LONG_PRESS, {'element': on_element.id})) return self def flick(self, xspeed, yspeed): """ Flicks, starting anywhere on the screen. :Args: - xspeed: The X speed in pixels per second. - yspeed: The Y speed in pixels per second. """ self._actions.append(lambda: self._driver.execute(Command.FLICK, { 'xspeed': int(xspeed), 'yspeed': int(yspeed)})) return self def flick_element(self, on_element, xoffset, yoffset, speed): """ Flick starting at on_element, and moving by the xoffset and yoffset with specified speed. :Args: - on_element: Flick will start at center of element. - xoffset: X offset to flick to. - yoffset: Y offset to flick to. - speed: Pixels per second to flick. """ self._actions.append(lambda: self._driver.execute(Command.FLICK, { 'element': on_element.id, 'xoffset': int(xoffset), 'yoffset': int(yoffset), 'speed': int(speed)})) return self # Context manager so TouchActions can be used in a 'with .. as' statements. def __enter__(self): return self # Return created instance of self. def __exit__(self, _type, _value, _traceback): pass # Do nothing, does not require additional cleanup.
apache-2.0
sdague/home-assistant
homeassistant/components/netatmo/config_flow.py
12
7124
"""Config flow for Netatmo.""" import logging import uuid import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_SHOW_ON_MAP from homeassistant.core import callback from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv from .const import ( CONF_AREA_NAME, CONF_LAT_NE, CONF_LAT_SW, CONF_LON_NE, CONF_LON_SW, CONF_NEW_AREA, CONF_PUBLIC_MODE, CONF_UUID, CONF_WEATHER_AREAS, DOMAIN, ) class NetatmoFlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): """Config flow to handle Netatmo OAuth2 authentication.""" DOMAIN = DOMAIN CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return NetatmoOptionsFlowHandler(config_entry) @property def logger(self) -> logging.Logger: """Return logger.""" return logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict: """Extra data that needs to be appended to the authorize url.""" scopes = [ "read_camera", "read_homecoach", "read_presence", "read_smokedetector", "read_station", "read_thermostat", "write_camera", "write_presence", "write_thermostat", ] if self.flow_impl.name != "Home Assistant Cloud": scopes.extend(["access_camera", "access_presence"]) scopes.sort() return {"scope": " ".join(scopes)} async def async_step_user(self, user_input=None): """Handle a flow start.""" await self.async_set_unique_id(DOMAIN) if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") return await super().async_step_user(user_input) class NetatmoOptionsFlowHandler(config_entries.OptionsFlow): """Handle Netatmo options.""" def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize Netatmo options flow.""" self.config_entry = config_entry self.options = dict(config_entry.options) self.options.setdefault(CONF_WEATHER_AREAS, {}) async def async_step_init(self, user_input=None): """Manage the Netatmo options.""" return await self.async_step_public_weather_areas() async def async_step_public_weather_areas(self, user_input=None): """Manage configuration of Netatmo public weather areas.""" errors = {} if user_input is not None: new_client = user_input.pop(CONF_NEW_AREA, None) areas = user_input.pop(CONF_WEATHER_AREAS, None) user_input[CONF_WEATHER_AREAS] = { area: self.options[CONF_WEATHER_AREAS][area] for area in areas } self.options.update(user_input) if new_client: return await self.async_step_public_weather( user_input={CONF_NEW_AREA: new_client} ) return self._create_options_entry() weather_areas = list(self.options[CONF_WEATHER_AREAS]) data_schema = vol.Schema( { vol.Optional( CONF_WEATHER_AREAS, default=weather_areas, ): cv.multi_select(weather_areas), vol.Optional(CONF_NEW_AREA): str, } ) return self.async_show_form( step_id="public_weather_areas", data_schema=data_schema, errors=errors, ) async def async_step_public_weather(self, user_input=None): """Manage configuration of Netatmo public weather sensors.""" if user_input is not None and CONF_NEW_AREA not in user_input: self.options[CONF_WEATHER_AREAS][ user_input[CONF_AREA_NAME] ] = fix_coordinates(user_input) self.options[CONF_WEATHER_AREAS][user_input[CONF_AREA_NAME]][ CONF_UUID ] = str(uuid.uuid4()) return await self.async_step_public_weather_areas() orig_options = self.config_entry.options.get(CONF_WEATHER_AREAS, {}).get( user_input[CONF_NEW_AREA], {} ) default_longitude = self.hass.config.longitude default_latitude = self.hass.config.latitude default_size = 0.04 data_schema = vol.Schema( { vol.Optional(CONF_AREA_NAME, default=user_input[CONF_NEW_AREA]): str, vol.Optional( CONF_LAT_NE, default=orig_options.get( CONF_LAT_NE, default_latitude + default_size ), ): cv.latitude, vol.Optional( CONF_LON_NE, default=orig_options.get( CONF_LON_NE, default_longitude + default_size ), ): cv.longitude, vol.Optional( CONF_LAT_SW, default=orig_options.get( CONF_LAT_SW, default_latitude - default_size ), ): cv.latitude, vol.Optional( CONF_LON_SW, default=orig_options.get( CONF_LON_SW, default_longitude - default_size ), ): cv.longitude, vol.Required( CONF_PUBLIC_MODE, default=orig_options.get(CONF_PUBLIC_MODE, "avg"), ): vol.In(["avg", "max"]), vol.Required( CONF_SHOW_ON_MAP, default=orig_options.get(CONF_SHOW_ON_MAP, False), ): bool, } ) return self.async_show_form(step_id="public_weather", data_schema=data_schema) def _create_options_entry(self): """Update config entry options.""" return self.async_create_entry( title="Netatmo Public Weather", data=self.options ) def fix_coordinates(user_input): """Fix coordinates if they don't comply with the Netatmo API.""" # Ensure coordinates have acceptable length for the Netatmo API for coordinate in [CONF_LAT_NE, CONF_LAT_SW, CONF_LON_NE, CONF_LON_SW]: if len(str(user_input[coordinate]).split(".")[1]) < 7: user_input[coordinate] = user_input[coordinate] + 0.0000001 # Swap coordinates if entered in wrong order if user_input[CONF_LAT_NE] < user_input[CONF_LAT_SW]: user_input[CONF_LAT_NE], user_input[CONF_LAT_SW] = ( user_input[CONF_LAT_SW], user_input[CONF_LAT_NE], ) if user_input[CONF_LON_NE] < user_input[CONF_LON_SW]: user_input[CONF_LON_NE], user_input[CONF_LON_SW] = ( user_input[CONF_LON_SW], user_input[CONF_LON_NE], ) return user_input
apache-2.0
xzYue/odoo
addons/l10n_be_hr_payroll/__init__.py
438
1072
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 OpenERP SA (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## import l10n_be_hr_payroll # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
SUSE/azure-sdk-for-python
azure-mgmt-powerbiembedded/azure/mgmt/powerbiembedded/models/update_workspace_collection_request.py
4
1156
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .azure_sku import AzureSku from msrest.serialization import Model class UpdateWorkspaceCollectionRequest(Model): """UpdateWorkspaceCollectionRequest. Variables are only populated by the server, and will be ignored when sending a request. :param tags: :type tags: dict :ivar sku: :vartype sku: :class:`AzureSku <azure.mgmt.powerbiembedded.models.AzureSku>` """ _validation = { 'sku': {'constant': True}, } _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'AzureSku'}, } sku = AzureSku() def __init__(self, tags=None): self.tags = tags
mit
levibostian/myBlanky
googleAppEngine/apiclient/channel.py
120
9848
"""Channel notifications support. Classes and functions to support channel subscriptions and notifications on those channels. Notes: - This code is based on experimental APIs and is subject to change. - Notification does not do deduplication of notification ids, that's up to the receiver. - Storing the Channel between calls is up to the caller. Example setting up a channel: # Create a new channel that gets notifications via webhook. channel = new_webhook_channel("https://example.com/my_web_hook") # Store the channel, keyed by 'channel.id'. Store it before calling the # watch method because notifications may start arriving before the watch # method returns. ... resp = service.objects().watchAll( bucket="some_bucket_id", body=channel.body()).execute() channel.update(resp) # Store the channel, keyed by 'channel.id'. Store it after being updated # since the resource_id value will now be correct, and that's needed to # stop a subscription. ... An example Webhook implementation using webapp2. Note that webapp2 puts headers in a case insensitive dictionary, as headers aren't guaranteed to always be upper case. id = self.request.headers[X_GOOG_CHANNEL_ID] # Retrieve the channel by id. channel = ... # Parse notification from the headers, including validating the id. n = notification_from_headers(channel, self.request.headers) # Do app specific stuff with the notification here. if n.resource_state == 'sync': # Code to handle sync state. elif n.resource_state == 'exists': # Code to handle the exists state. elif n.resource_state == 'not_exists': # Code to handle the not exists state. Example of unsubscribing. service.channels().stop(channel.body()) """ import datetime import uuid from apiclient import errors from oauth2client import util # The unix time epoch starts at midnight 1970. EPOCH = datetime.datetime.utcfromtimestamp(0) # Map the names of the parameters in the JSON channel description to # the parameter names we use in the Channel class. CHANNEL_PARAMS = { 'address': 'address', 'id': 'id', 'expiration': 'expiration', 'params': 'params', 'resourceId': 'resource_id', 'resourceUri': 'resource_uri', 'type': 'type', 'token': 'token', } X_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID' X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER' X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE' X_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI' X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID' def _upper_header_keys(headers): new_headers = {} for k, v in headers.iteritems(): new_headers[k.upper()] = v return new_headers class Notification(object): """A Notification from a Channel. Notifications are not usually constructed directly, but are returned from functions like notification_from_headers(). Attributes: message_number: int, The unique id number of this notification. state: str, The state of the resource being monitored. uri: str, The address of the resource being monitored. resource_id: str, The unique identifier of the version of the resource at this event. """ @util.positional(5) def __init__(self, message_number, state, resource_uri, resource_id): """Notification constructor. Args: message_number: int, The unique id number of this notification. state: str, The state of the resource being monitored. Can be one of "exists", "not_exists", or "sync". resource_uri: str, The address of the resource being monitored. resource_id: str, The identifier of the watched resource. """ self.message_number = message_number self.state = state self.resource_uri = resource_uri self.resource_id = resource_id class Channel(object): """A Channel for notifications. Usually not constructed directly, instead it is returned from helper functions like new_webhook_channel(). Attributes: type: str, The type of delivery mechanism used by this channel. For example, 'web_hook'. id: str, A UUID for the channel. token: str, An arbitrary string associated with the channel that is delivered to the target address with each event delivered over this channel. address: str, The address of the receiving entity where events are delivered. Specific to the channel type. expiration: int, The time, in milliseconds from the epoch, when this channel will expire. params: dict, A dictionary of string to string, with additional parameters controlling delivery channel behavior. resource_id: str, An opaque id that identifies the resource that is being watched. Stable across different API versions. resource_uri: str, The canonicalized ID of the watched resource. """ @util.positional(5) def __init__(self, type, id, token, address, expiration=None, params=None, resource_id="", resource_uri=""): """Create a new Channel. In user code, this Channel constructor will not typically be called manually since there are functions for creating channels for each specific type with a more customized set of arguments to pass. Args: type: str, The type of delivery mechanism used by this channel. For example, 'web_hook'. id: str, A UUID for the channel. token: str, An arbitrary string associated with the channel that is delivered to the target address with each event delivered over this channel. address: str, The address of the receiving entity where events are delivered. Specific to the channel type. expiration: int, The time, in milliseconds from the epoch, when this channel will expire. params: dict, A dictionary of string to string, with additional parameters controlling delivery channel behavior. resource_id: str, An opaque id that identifies the resource that is being watched. Stable across different API versions. resource_uri: str, The canonicalized ID of the watched resource. """ self.type = type self.id = id self.token = token self.address = address self.expiration = expiration self.params = params self.resource_id = resource_id self.resource_uri = resource_uri def body(self): """Build a body from the Channel. Constructs a dictionary that's appropriate for passing into watch() methods as the value of body argument. Returns: A dictionary representation of the channel. """ result = { 'id': self.id, 'token': self.token, 'type': self.type, 'address': self.address } if self.params: result['params'] = self.params if self.resource_id: result['resourceId'] = self.resource_id if self.resource_uri: result['resourceUri'] = self.resource_uri if self.expiration: result['expiration'] = self.expiration return result def update(self, resp): """Update a channel with information from the response of watch(). When a request is sent to watch() a resource, the response returned from the watch() request is a dictionary with updated channel information, such as the resource_id, which is needed when stopping a subscription. Args: resp: dict, The response from a watch() method. """ for json_name, param_name in CHANNEL_PARAMS.iteritems(): value = resp.get(json_name) if value is not None: setattr(self, param_name, value) def notification_from_headers(channel, headers): """Parse a notification from the webhook request headers, validate the notification, and return a Notification object. Args: channel: Channel, The channel that the notification is associated with. headers: dict, A dictionary like object that contains the request headers from the webhook HTTP request. Returns: A Notification object. Raises: errors.InvalidNotificationError if the notification is invalid. ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int. """ headers = _upper_header_keys(headers) channel_id = headers[X_GOOG_CHANNEL_ID] if channel.id != channel_id: raise errors.InvalidNotificationError( 'Channel id mismatch: %s != %s' % (channel.id, channel_id)) else: message_number = int(headers[X_GOOG_MESSAGE_NUMBER]) state = headers[X_GOOG_RESOURCE_STATE] resource_uri = headers[X_GOOG_RESOURCE_URI] resource_id = headers[X_GOOG_RESOURCE_ID] return Notification(message_number, state, resource_uri, resource_id) @util.positional(2) def new_webhook_channel(url, token=None, expiration=None, params=None): """Create a new webhook Channel. Args: url: str, URL to post notifications to. token: str, An arbitrary string associated with the channel that is delivered to the target address with each notification delivered over this channel. expiration: datetime.datetime, A time in the future when the channel should expire. Can also be None if the subscription should use the default expiration. Note that different services may have different limits on how long a subscription lasts. Check the response from the watch() method to see the value the service has set for an expiration time. params: dict, Extra parameters to pass on channel creation. Currently not used for webhook channels. """ expiration_ms = 0 if expiration: delta = expiration - EPOCH expiration_ms = delta.microseconds/1000 + ( delta.seconds + delta.days*24*3600)*1000 if expiration_ms < 0: expiration_ms = 0 return Channel('web_hook', str(uuid.uuid4()), token, url, expiration=expiration_ms, params=params)
mit
cmelange/ansible
lib/ansible/modules/notification/rocketchat.py
49
8618
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2016, Deepak Kothandan <deepak.kothandan@outlook.com> # (c) 2015, Stefan Berggren <nsg@nsg.cc> # (c) 2014, Ramon de la Fuente <ramon@delafuente.nl> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ module: rocketchat short_description: Send notifications to Rocket Chat description: - The C(rocketchat) module sends notifications to Rocket Chat via the Incoming WebHook integration version_added: "2.2" author: "Ramon de la Fuente (@ramondelafuente)" options: domain: description: - The domain for your environment without protocol. (i.e. C(example.com) or C(chat.example.com)) required: true token: description: - Rocket Chat Incoming Webhook integration token. This provides authentication to Rocket Chat's Incoming webhook for posting messages. required: true protocol: description: - Specify the protocol used to send notification messages before the webhook url. (i.e. http or https) required: false default: https choices: - 'http' - 'https' msg: description: - Message to be sent. required: false default: None channel: description: - Channel to send the message to. If absent, the message goes to the channel selected for the I(token) specifed during the creation of webhook. required: false default: None username: description: - This is the sender of the message. required: false default: "Ansible" icon_url: description: - URL for the message sender's icon. required: false default: "https://www.ansible.com/favicon.ico" icon_emoji: description: - Emoji for the message sender. The representation for the available emojis can be got from Rocket Chat. (for example :thumbsup:) (if I(icon_emoji) is set, I(icon_url) will not be used) required: false default: None link_names: description: - Automatically create links for channels and usernames in I(msg). required: false default: 1 choices: - 1 - 0 validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: - 'yes' - 'no' color: description: - Allow text to use default colors - use the default of 'normal' to not send a custom color bar at the start of the message required: false default: 'normal' choices: - 'normal' - 'good' - 'warning' - 'danger' attachments: description: - Define a list of attachments. required: false default: None """ EXAMPLES = """ - name: Send notification message via Rocket Chat rocketchat: token: thetoken/generatedby/rocketchat domain: chat.example.com msg: '{{ inventory_hostname }} completed' delegate_to: localhost - name: Send notification message via Rocket Chat all options rocketchat: domain: chat.example.com token: thetoken/generatedby/rocketchat msg: '{{ inventory_hostname }} completed' channel: #ansible username: 'Ansible on {{ inventory_hostname }}' icon_url: http://www.example.com/some-image-file.png link_names: 0 delegate_to: localhost - name: insert a color bar in front of the message for visibility purposes and use the default webhook icon and name configured in rocketchat rocketchat: token: thetoken/generatedby/rocketchat domain: chat.example.com msg: '{{ inventory_hostname }} is alive!' color: good username: '' icon_url: '' delegate_to: localhost - name: Use the attachments API rocketchat: token: thetoken/generatedby/rocketchat domain: chat.example.com attachments: - text: Display my system load on host A and B color: #ff00dd title: System load fields: - title: System A value: 'load average: 0,74, 0,66, 0,63' short: True - title: System B value: 'load average: 5,16, 4,64, 2,43' short: True delegate_to: localhost """ RETURN = """ changed: description: A flag indicating if any change was made or not. returned: success type: boolean sample: false """ ROCKETCHAT_INCOMING_WEBHOOK = '%s://%s/hooks/%s' def build_payload_for_rocketchat(module, text, channel, username, icon_url, icon_emoji, link_names, color, attachments): payload = {} if color == "normal" and text is not None: payload = dict(text=text) elif text is not None: payload = dict(attachments=[dict(text=text, color=color)]) if channel is not None: if (channel[0] == '#') or (channel[0] == '@'): payload['channel'] = channel else: payload['channel'] = '#' + channel if username is not None: payload['username'] = username if icon_emoji is not None: payload['icon_emoji'] = icon_emoji else: payload['icon_url'] = icon_url if link_names is not None: payload['link_names'] = link_names if attachments is not None: if 'attachments' not in payload: payload['attachments'] = [] if attachments is not None: for attachment in attachments: if 'fallback' not in attachment: attachment['fallback'] = attachment['text'] payload['attachments'].append(attachment) payload="payload=" + module.jsonify(payload) return payload def do_notify_rocketchat(module, domain, token, protocol, payload): if token.count('/') < 1: module.fail_json(msg="Invalid Token specified, provide a valid token") rocketchat_incoming_webhook = ROCKETCHAT_INCOMING_WEBHOOK % (protocol, domain, token) response, info = fetch_url(module, rocketchat_incoming_webhook, data=payload) if info['status'] != 200: module.fail_json(msg="failed to send message, return status=%s" % str(info['status'])) def main(): module = AnsibleModule( argument_spec = dict( domain = dict(type='str', required=True, default=None), token = dict(type='str', required=True, no_log=True), protocol = dict(type='str', default='https', choices=['http', 'https']), msg = dict(type='str', required=False, default=None), channel = dict(type='str', default=None), username = dict(type='str', default='Ansible'), icon_url = dict(type='str', default='https://www.ansible.com/favicon.ico'), icon_emoji = dict(type='str', default=None), link_names = dict(type='int', default=1, choices=[0,1]), validate_certs = dict(default='yes', type='bool'), color = dict(type='str', default='normal', choices=['normal', 'good', 'warning', 'danger']), attachments = dict(type='list', required=False, default=None) ) ) domain = module.params['domain'] token = module.params['token'] protocol = module.params['protocol'] text = module.params['msg'] channel = module.params['channel'] username = module.params['username'] icon_url = module.params['icon_url'] icon_emoji = module.params['icon_emoji'] link_names = module.params['link_names'] color = module.params['color'] attachments = module.params['attachments'] payload = build_payload_for_rocketchat(module, text, channel, username, icon_url, icon_emoji, link_names, color, attachments) do_notify_rocketchat(module, domain, token, protocol, payload) module.exit_json(msg="OK") # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ == '__main__': main()
gpl-3.0
jjas0nn/solvem
tensorflow/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
359
13092
from __future__ import absolute_import import collections import functools import logging try: # Python 3 from urllib.parse import urljoin except ImportError: from urlparse import urljoin from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', 'ssl_version', 'ca_cert_dir') # The base fields to use when determining what pool to get a connection from; # these do not rely on the ``connection_pool_kw`` and can be determined by the # URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary. # # All custom key schemes should include the fields in this key at a minimum. BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port')) # The fields to use when determining what pool to get a HTTP and HTTPS # connection from. All additional fields must be present in the PoolManager's # ``connection_pool_kw`` instance variable. HTTPPoolKey = collections.namedtuple( 'HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict', 'block', 'source_address') ) HTTPSPoolKey = collections.namedtuple( 'HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS ) def _default_key_normalizer(key_class, request_context): """ Create a pool key of type ``key_class`` for a request. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :param request_context: A dictionary-like object that contain the context for a request. It should contain a key for each field in the :class:`HTTPPoolKey` """ context = {} for key in key_class._fields: context[key] = request_context.get(key) context['scheme'] = context['scheme'].lower() context['host'] = context['host'].lower() return key_class(**context) # A dictionary that maps a scheme to a callable that creates a pool key. # This can be used to alter the way pool keys are constructed, if desired. # Each PoolManager makes a copy of this dictionary so they can be configured # globally here, or individually on the instance. key_fn_by_scheme = { 'http': functools.partial(_default_key_normalizer, HTTPPoolKey), 'https': functools.partial(_default_key_normalizer, HTTPSPoolKey), } pool_classes_by_scheme = { 'http': HTTPConnectionPool, 'https': HTTPSConnectionPool, } class PoolManager(RequestMethods): """ Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example:: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 """ proxy = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) # Locally set the pool classes and keys so other PoolManagers can # override them. self.pool_classes_by_scheme = pool_classes_by_scheme self.key_fn_by_scheme = key_fn_by_scheme.copy() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.clear() # Return False to re-raise any potential exceptions return False def _new_pool(self, scheme, host, port): """ Create a new :class:`ConnectionPool` based on host, port and scheme. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls = self.pool_classes_by_scheme[scheme] kwargs = self.connection_pool_kw if scheme == 'http': kwargs = self.connection_pool_kw.copy() for kw in SSL_KEYWORDS: kwargs.pop(kw, None) return pool_cls(host, port, **kwargs) def clear(self): """ Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. """ self.pools.clear() def connection_from_host(self, host, port=None, scheme='http'): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """ if not host: raise LocationValueError("No host specified.") request_context = self.connection_pool_kw.copy() request_context['scheme'] = scheme or 'http' if not port: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context) def connection_from_context(self, request_context): """ Get a :class:`ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context['scheme'].lower() pool_key_constructor = self.key_fn_by_scheme[scheme] pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key) def connection_from_pool_key(self, pool_key): """ Get a :class:`ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. """ with self.pools.lock: # If the scheme, host, or port doesn't match existing open # connections, open a new ConnectionPool. pool = self.pools.get(pool_key) if pool: return pool # Make a fresh ConnectionPool of the desired type pool = self._new_pool(pool_key.scheme, pool_key.host, pool_key.port) self.pools[pool_key] = pool return pool def connection_from_url(self, url): """ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. """ u = parse_url(url) return self.connection_from_host(u.host, port=u.port, scheme=u.scheme) def urlopen(self, method, url, redirect=True, **kw): """ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw['assert_same_host'] = False kw['redirect'] = False if 'headers' not in kw: kw['headers'] = self.headers if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) # RFC 7231, Section 6.4.4 if response.status == 303: method = 'GET' retries = kw.get('retries') if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: raise return response kw['retries'] = retries kw['redirect'] = redirect log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen(method, redirect_location, **kw) class ProxyManager(PoolManager): """ Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 """ def __init__(self, proxy_url, num_pools=10, headers=None, proxy_headers=None, **connection_pool_kw): if isinstance(proxy_url, HTTPConnectionPool): proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, proxy_url.port) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) self.proxy = proxy self.proxy_headers = proxy_headers or {} connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) def connection_from_host(self, host, port=None, scheme='http'): if scheme == "https": return super(ProxyManager, self).connection_from_host( host, port, scheme) return super(ProxyManager, self).connection_from_host( self.proxy.host, self.proxy.port, self.proxy.scheme) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ headers_ = {'Accept': '*/*'} netloc = parse_url(url).netloc if netloc: headers_['Host'] = netloc if headers: headers_.update(headers) return headers_ def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. headers = kw.get('headers', self.headers) kw['headers'] = self._set_proxy_headers(url, headers) return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url, **kw): return ProxyManager(proxy_url=url, **kw)
mit
cmdunkers/DeeperMind
PythonEnv/lib/python2.7/site-packages/scipy/weave/accelerate_tools.py
97
12946
#**************************************************************************# #* FILE ************** accelerate_tools.py ************************# #**************************************************************************# #* Author: Patrick Miller February 9 2002 *# #**************************************************************************# """ accelerate_tools contains the interface for on-the-fly building of C++ equivalents to Python functions. """ #**************************************************************************# from __future__ import absolute_import, print_function from types import InstanceType, XRangeType import inspect from hashlib import sha256 import scipy.weave as weave from numpy.testing import assert_ from .bytecodecompiler import CXXCoder,Type_Descriptor,Function_Descriptor def CStr(s): "Hacky way to get legal C string from Python string" if s is None: return '""' assert_(isinstance(s, str), msg="only None and string allowed") r = repr('"'+s) # Better for embedded quotes return '"'+r[2:-1]+'"' ################################################################## # CLASS INSTANCE # ################################################################## class Instance(Type_Descriptor): cxxtype = 'PyObject*' def __init__(self,prototype): self.prototype = prototype def check(self,s): return "PyInstance_Check(%s)" % s def inbound(self,s): return s def outbound(self,s): return s,0 def get_attribute(self,name): proto = getattr(self.prototype,name) T = lookup_type(proto) code = 'tempPY = PyObject_GetAttrString(%%(rhs)s,"%s");\n' % name convert = T.inbound('tempPY') code += '%%(lhsType)s %%(lhs)s = %s;\n' % convert return T,code def set_attribute(self,name): proto = getattr(self.prototype,name) T = lookup_type(proto) convert,owned = T.outbound('%(rhs)s') code = 'tempPY = %s;' % convert if not owned: code += ' Py_INCREF(tempPY);' code += ' PyObject_SetAttrString(%%(lhs)s,"%s",tempPY);' % name code += ' Py_DECREF(tempPY);\n' return T,code ################################################################## # CLASS BASIC # ################################################################## class Basic(Type_Descriptor): owned = 1 def check(self,s): return "%s(%s)" % (self.checker,s) def inbound(self,s): return "%s(%s)" % (self.inbounder,s) def outbound(self,s): return "%s(%s)" % (self.outbounder,s),self.owned class Basic_Number(Basic): def literalizer(self,s): return str(s) def binop(self,symbol,a,b): assert_(symbol in ['+','-','*','/'], msg=symbol) return '%s %s %s' % (a,symbol,b),self class Integer(Basic_Number): cxxtype = "long" checker = "PyInt_Check" inbounder = "PyInt_AsLong" outbounder = "PyInt_FromLong" class Double(Basic_Number): cxxtype = "double" checker = "PyFloat_Check" inbounder = "PyFloat_AsDouble" outbounder = "PyFloat_FromDouble" class String(Basic): cxxtype = "char*" checker = "PyString_Check" inbounder = "PyString_AsString" outbounder = "PyString_FromString" def literalizer(self,s): return CStr(s) # ----------------------------------------------- # Singletonize the type names # ----------------------------------------------- Integer = Integer() Double = Double() String = String() import numpy as np class Vector(Type_Descriptor): cxxtype = 'PyArrayObject*' refcount = 1 dims = 1 module_init_code = 'import_array();\n' inbounder = "(PyArrayObject*)" outbounder = "(PyObject*)" owned = 0 # Conversion is by casting! prerequisites = Type_Descriptor.prerequisites + \ ['#include "numpy/arrayobject.h"'] dims = 1 def check(self,s): return "PyArray_Check(%s) && ((PyArrayObject*)%s)->nd == %d && ((PyArrayObject*)%s)->descr->type_num == %s" % ( s,s,self.dims,s,self.typecode) def inbound(self,s): return "%s(%s)" % (self.inbounder,s) def outbound(self,s): return "%s(%s)" % (self.outbounder,s),self.owned def getitem(self,A,v,t): assert_(self.dims == len(v), msg='Expect dimension %d' % self.dims) code = '*((%s*)(%s->data' % (self.cxxbase,A) for i in range(self.dims): # assert that ''t[i]'' is an integer code += '+%s*%s->strides[%d]' % (v[i],A,i) code += '))' return code,self.pybase def setitem(self,A,v,t): return self.getitem(A,v,t) class matrix(Vector): dims = 2 class IntegerVector(Vector): typecode = 'PyArray_INT' cxxbase = 'int' pybase = Integer class Integermatrix(matrix): typecode = 'PyArray_INT' cxxbase = 'int' pybase = Integer class LongVector(Vector): typecode = 'PyArray_LONG' cxxbase = 'long' pybase = Integer class Longmatrix(matrix): typecode = 'PyArray_LONG' cxxbase = 'long' pybase = Integer class DoubleVector(Vector): typecode = 'PyArray_DOUBLE' cxxbase = 'double' pybase = Double class Doublematrix(matrix): typecode = 'PyArray_DOUBLE' cxxbase = 'double' pybase = Double ################################################################## # CLASS XRANGE # ################################################################## class XRange(Type_Descriptor): cxxtype = 'XRange' prerequisites = [''' class XRange { public: XRange(long aLow, long aHigh, long aStep=1) : low(aLow),high(aHigh),step(aStep) { } XRange(long aHigh) : low(0),high(aHigh),step(1) { } long low; long high; long step; };'''] # ----------------------------------------------- # Singletonize the type names # ----------------------------------------------- IntegerVector = IntegerVector() Integermatrix = Integermatrix() LongVector = LongVector() Longmatrix = Longmatrix() DoubleVector = DoubleVector() Doublematrix = Doublematrix() XRange = XRange() typedefs = { int: Integer, float: Double, str: String, (np.ndarray,1,int): IntegerVector, (np.ndarray,2,int): Integermatrix, (np.ndarray,1,np.long): LongVector, (np.ndarray,2,np.long): Longmatrix, (np.ndarray,1,float): DoubleVector, (np.ndarray,2,float): Doublematrix, XRangeType: XRange, } import math functiondefs = { (len,(String,)): Function_Descriptor(code='strlen(%s)',return_type=Integer), (len,(LongVector,)): Function_Descriptor(code='PyArray_Size((PyObject*)%s)',return_type=Integer), (float,(Integer,)): Function_Descriptor(code='(double)(%s)',return_type=Double), (range,(Integer,Integer)): Function_Descriptor(code='XRange(%s)',return_type=XRange), (range,(Integer)): Function_Descriptor(code='XRange(%s)',return_type=XRange), (math.sin,(Double,)): Function_Descriptor(code='sin(%s)',return_type=Double), (math.cos,(Double,)): Function_Descriptor(code='cos(%s)',return_type=Double), (math.sqrt,(Double,)): Function_Descriptor(code='sqrt(%s)',return_type=Double), } ################################################################## # FUNCTION LOOKUP_TYPE # ################################################################## def lookup_type(x): T = type(x) try: return typedefs[T] except: if isinstance(T,np.ndarray): return typedefs[(T,len(x.shape),x.dtype.char)] elif issubclass(T, InstanceType): return Instance(x) else: raise NotImplementedError(T) ################################################################## # class ACCELERATE # ################################################################## class accelerate(object): def __init__(self, function, *args, **kw): assert_(inspect.isfunction(function)) self.function = function self.module = inspect.getmodule(function) if self.module is None: import __main__ self.module = __main__ self.__call_map = {} def __cache(self,*args): raise TypeError def __call__(self,*args): try: return self.__cache(*args) except TypeError: # Figure out type info -- Do as tuple so its hashable signature = tuple(map(lookup_type,args)) # If we know the function, call it try: fast = self.__call_map[signature] except: fast = self.singleton(signature) self.__cache = fast self.__call_map[signature] = fast return fast(*args) def signature(self,*args): # Figure out type info -- Do as tuple so its hashable signature = tuple(map(lookup_type,args)) return self.singleton(signature) def singleton(self,signature): identifier = self.identifier(signature) # Generate a new function, then call it f = self.function # See if we have an accelerated version of module try: print('lookup',self.module.__name__+'_weave') accelerated_module = __import__(self.module.__name__+'_weave') print('have accelerated',self.module.__name__+'_weave') fast = getattr(accelerated_module,identifier) return fast except ImportError: accelerated_module = None except AttributeError: pass P = self.accelerate(signature,identifier) E = weave.ext_tools.ext_module(self.module.__name__+'_weave') E.add_function(P) E.generate_file() weave.build_tools.build_extension(self.module.__name__+'_weave.cpp',verbose=2) if accelerated_module: raise NotImplementedError('Reload') else: accelerated_module = __import__(self.module.__name__+'_weave') fast = getattr(accelerated_module,identifier) return fast def identifier(self,signature): # Build a (truncated, see gh-3216) SHA-256 checksum f = self.function co = f.func_code identifier = str(signature) + \ str(co.co_argcount) + \ str(co.co_consts) + \ str(co.co_varnames) + \ co.co_code return 'F' + sha256(identifier).hexdigest()[:32] def accelerate(self,signature,identifier): P = Python2CXX(self.function,signature,name=identifier) return P def code(self,*args): if len(args) != self.function.func_code.co_argcount: raise TypeError('%s() takes exactly %d arguments (%d given)' % (self.function.__name__, self.function.func_code.co_argcount, len(args))) signature = tuple(map(lookup_type,args)) ident = self.function.__name__ return self.accelerate(signature,ident).function_code() ################################################################## # CLASS PYTHON2CXX # ################################################################## class Python2CXX(CXXCoder): def typedef_by_value(self,v): T = lookup_type(v) if T not in self.used: self.used.append(T) return T def function_by_signature(self,signature): descriptor = functiondefs[signature] if descriptor.return_type not in self.used: self.used.append(descriptor.return_type) return descriptor def __init__(self,f,signature,name=None): # Make sure function is a function assert_(inspect.isfunction(f)) # and check the input type signature assert_(reduce(lambda x,y: x and y, map(lambda x: isinstance(x,Type_Descriptor), signature), 1), msg='%s not all type objects' % signature) self.arg_specs = [] self.customize = weave.base_info.custom_info() CXXCoder.__init__(self,f,signature,name) return def function_code(self): code = self.wrapped_code() for T in self.used: if T is not None and T.module_init_code: self.customize.add_module_init_code(T.module_init_code) return code def python_function_definition_code(self): return '{ "%s", wrapper_%s, METH_VARARGS, %s },\n' % ( self.name, self.name, CStr(self.function.__doc__))
bsd-3-clause
andim27/magiccamp
build/lib/django/template/loaders/app_directories.py
65
2766
""" Wrapper for loading templates from "templates" directories in INSTALLED_APPS packages. """ import os import sys from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.template import TemplateDoesNotExist from django.template.loader import BaseLoader from django.utils._os import safe_join from django.utils.importlib import import_module # At compile time, cache the directories to search. fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() app_template_dirs = [] for app in settings.INSTALLED_APPS: try: mod = import_module(app) except ImportError, e: raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0])) template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates') if os.path.isdir(template_dir): app_template_dirs.append(template_dir.decode(fs_encoding)) # It won't change, so convert it to a tuple to save memory. app_template_dirs = tuple(app_template_dirs) class Loader(BaseLoader): is_usable = True def get_template_sources(self, template_name, template_dirs=None): """ Returns the absolute paths to "template_name", when appended to each directory in "template_dirs". Any paths that don't lie inside one of the template dirs are excluded from the result set, for security reasons. """ if not template_dirs: template_dirs = app_template_dirs for template_dir in template_dirs: try: yield safe_join(template_dir, template_name) except UnicodeDecodeError: # The template dir name was a bytestring that wasn't valid UTF-8. raise except ValueError: # The joined path was located outside of template_dir. pass def load_template_source(self, template_name, template_dirs=None): for filepath in self.get_template_sources(template_name, template_dirs): try: file = open(filepath) try: return (file.read().decode(settings.FILE_CHARSET), filepath) finally: file.close() except IOError: pass raise TemplateDoesNotExist(template_name) _loader = Loader() def load_template_source(template_name, template_dirs=None): # For backwards compatibility import warnings warnings.warn( "'django.template.loaders.app_directories.load_template_source' is deprecated; use 'django.template.loaders.app_directories.Loader' instead.", PendingDeprecationWarning ) return _loader.load_template_source(template_name, template_dirs) load_template_source.is_usable = True
bsd-3-clause
whereismyjetpack/ansible
lib/ansible/plugins/connection/funcd.py
96
3713
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com> # (c) 2013, Michael Scherer <misc@zarb.org> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # --- # The func transport permit to use ansible over func. For people who have already setup # func and that wish to play with ansible, this permit to move gradually to ansible # without having to redo completely the setup of the network. from __future__ import (absolute_import, division, print_function) __metaclass__ = type HAVE_FUNC = False try: import func.overlord.client as fc HAVE_FUNC = True except ImportError: pass import os import tempfile import shutil from ansible.errors import AnsibleError try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class Connection(object): ''' Func-based connections ''' def __init__(self, runner, host, port, *args, **kwargs): self.runner = runner self.host = host self.has_pipelining = False # port is unused, this go on func self.port = port def connect(self, port=None): if not HAVE_FUNC: raise AnsibleError("func is not installed") self.client = fc.Client(self.host) return self def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None): ''' run a command on the remote minion ''' if in_data: raise AnsibleError("Internal Error: this module does not support optimized module pipelining") # totally ignores privlege escalation display.vvv("EXEC %s" % (cmd), host=self.host) p = self.client.command.run(cmd)[self.host] return (p[0], p[1], p[2]) def _normalize_path(self, path, prefix): if not path.startswith(os.path.sep): path = os.path.join(os.path.sep, path) normpath = os.path.normpath(path) return os.path.join(prefix, normpath[1:]) def put_file(self, in_path, out_path): ''' transfer a file from local to remote ''' out_path = self._normalize_path(out_path, '/') display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host) self.client.local.copyfile.send(in_path, out_path) def fetch_file(self, in_path, out_path): ''' fetch a file from remote to local ''' in_path = self._normalize_path(in_path, '/') display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host) # need to use a tmp dir due to difference of semantic for getfile # ( who take a # directory as destination) and fetch_file, who # take a file directly tmpdir = tempfile.mkdtemp(prefix="func_ansible") self.client.local.getfile.get(in_path, tmpdir) shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)), out_path) shutil.rmtree(tmpdir) def close(self): ''' terminate the connection; nothing to do here ''' pass
gpl-3.0
ASCrookes/django
django/conf/locale/lt/formats.py
504
1830
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = r'Y \m. E j \d.' TIME_FORMAT = 'H:i' DATETIME_FORMAT = r'Y \m. E j \d., H:i' YEAR_MONTH_FORMAT = r'Y \m. F' MONTH_DAY_FORMAT = r'E j \d.' SHORT_DATE_FORMAT = 'Y-m-d' SHORT_DATETIME_FORMAT = 'Y-m-d H:i' FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = [ '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' ] TIME_INPUT_FORMATS = [ '%H:%M:%S', # '14:30:59' '%H:%M:%S.%f', # '14:30:59.000200' '%H:%M', # '14:30' '%H.%M.%S', # '14.30.59' '%H.%M.%S.%f', # '14.30.59.000200' '%H.%M', # '14.30' ] DATETIME_INPUT_FORMATS = [ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' '%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59' '%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200' '%d.%m.%y %H:%M', # '25.10.06 14:30' '%d.%m.%y %H.%M.%S', # '25.10.06 14.30.59' '%d.%m.%y %H.%M.%S.%f', # '25.10.06 14.30.59.000200' '%d.%m.%y %H.%M', # '25.10.06 14.30' '%d.%m.%y', # '25.10.06' ] DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' NUMBER_GROUPING = 3
bsd-3-clause
sivel/ansible
test/units/module_utils/common/validation/test_check_required_arguments.py
44
2107
# -*- coding: utf-8 -*- # Copyright (c) 2020 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type import pytest from ansible.module_utils._text import to_native from ansible.module_utils.common.validation import check_required_arguments @pytest.fixture def arguments_terms(): return { 'foo': { 'required': True, }, 'bar': { 'required': False, }, 'tomato': { 'irrelevant': 72, }, } @pytest.fixture def arguments_terms_multiple(): return { 'foo': { 'required': True, }, 'bar': { 'required': True, }, 'tomato': { 'irrelevant': 72, }, } def test_check_required_arguments(arguments_terms): params = { 'foo': 'hello', 'bar': 'haha', } assert check_required_arguments(arguments_terms, params) == [] def test_check_required_arguments_missing(arguments_terms): params = { 'apples': 'woohoo', } expected = "missing required arguments: foo" with pytest.raises(TypeError) as e: check_required_arguments(arguments_terms, params) assert to_native(e.value) == expected def test_check_required_arguments_missing_multiple(arguments_terms_multiple): params = { 'apples': 'woohoo', } expected = "missing required arguments: bar, foo" with pytest.raises(TypeError) as e: check_required_arguments(arguments_terms_multiple, params) assert to_native(e.value) == expected def test_check_required_arguments_missing_none(): terms = None params = { 'foo': 'bar', 'baz': 'buzz', } assert check_required_arguments(terms, params) == [] def test_check_required_arguments_no_params(arguments_terms): with pytest.raises(TypeError) as te: check_required_arguments(arguments_terms, None) assert "'NoneType' is not iterable" in to_native(te.value)
gpl-3.0
mollstam/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Jinja2-2.7.3/jinja2/meta.py
659
4190
# -*- coding: utf-8 -*- """ jinja2.meta ~~~~~~~~~~~ This module implements various functions that exposes information about templates that might be interesting for various kinds of applications. :copyright: (c) 2010 by the Jinja Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from jinja2 import nodes from jinja2.compiler import CodeGenerator from jinja2._compat import string_types class TrackingCodeGenerator(CodeGenerator): """We abuse the code generator for introspection.""" def __init__(self, environment): CodeGenerator.__init__(self, environment, '<introspection>', '<introspection>') self.undeclared_identifiers = set() def write(self, x): """Don't write.""" def pull_locals(self, frame): """Remember all undeclared identifiers.""" self.undeclared_identifiers.update(frame.identifiers.undeclared) def find_undeclared_variables(ast): """Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at runtime, all variables are returned. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') >>> meta.find_undeclared_variables(ast) set(['bar']) .. admonition:: Implementation Internally the code generator is used for finding undeclared variables. This is good to know because the code generator might raise a :exc:`TemplateAssertionError` during compilation and as a matter of fact this function can currently raise that exception as well. """ codegen = TrackingCodeGenerator(ast.environment) codegen.visit(ast) return codegen.undeclared_identifiers def find_referenced_templates(ast): """Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be yielded. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') >>> list(meta.find_referenced_templates(ast)) ['layout.html', None] This function is useful for dependency tracking. For example if you want to rebuild parts of the website after a layout template has changed. """ for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)): if not isinstance(node.template, nodes.Const): # a tuple with some non consts in there if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: # something const, only yield the strings and ignore # non-string consts that really just make no sense if isinstance(template_name, nodes.Const): if isinstance(template_name.value, string_types): yield template_name.value # something dynamic in there else: yield None # something dynamic we don't know about here else: yield None continue # constant is a basestring, direct template name if isinstance(node.template.value, string_types): yield node.template.value # a tuple or list (latter *should* not happen) made of consts, # yield the consts that are strings. We could warn here for # non string values elif isinstance(node, nodes.Include) and \ isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: if isinstance(template_name, string_types): yield template_name # something else we don't care about, we could warn here else: yield None
mit
NewPresident1/kitsune
kitsune/sumo/tests/test_api.py
13
3961
import pytz from datetime import datetime from mock import Mock from nose.tools import eq_ from django.test.client import RequestFactory from django.test.utils import override_settings from rest_framework import fields from kitsune.sumo import api from kitsune.sumo.tests import TestCase @override_settings(WIKI_DEFAULT_LANGUAGE='en-US', SUMO_LANGUAGES=['en-US', 'es'], NON_SUPPORTED_LOCALES={'es-es': 'es', 'de': None}) class TestLanguageNegotiation(TestCase): def test_it_works(self): """Make sure that the LocaleNegotiationMixin detects locales.""" factory = RequestFactory() negotiater = api.LocaleNegotiationMixin() request = factory.get('/', HTTP_ACCEPT_LANGUAGE='es,en-US') negotiater.request = request eq_(negotiater.get_locale(), 'es') class TestInequalityFilterBackend(TestCase): def setUp(self): self.request = Mock() self.view = Mock() self.backend = api.InequalityFilterBackend() self.queryset = Mock() self.queryset.filter.return_value = self.queryset def test_gt_whitelist(self): """gt works, and that non-whitelisted variables don't get filtered.""" self.view.filter_fields = ['x'] # `x` should be filtered, but `y` should not, since it is not in # `filter_fields` self.request.QUERY_PARAMS = {'x__gt': 10, 'y': 5} self.backend.filter_queryset(self.request, self.queryset, self.view) self.queryset.filter.assert_called_with(x__gt=10) def test_lt_gte_multiple(self): """multiple fields, gte, and lt.""" self.view.filter_fields = ['x', 'y'] self.request.QUERY_PARAMS = {'x__gte': 10, 'y__lt': 5} self.backend.filter_queryset(self.request, self.queryset, self.view) calls = sorted(self.queryset.method_calls) # Since both variables are in `filter_fields`, they both get processed. expected = [('filter', (), {'x__gte': 10}), ('filter', (), {'y__lt': 5})] eq_(calls, expected) class TestDateTimeUTCField(TestCase): def test_translation_of_nonnaive(self): field = api.DateTimeUTCField() as_pacific = datetime(2014, 11, 12, 13, 49, 59, tzinfo=pytz.timezone('US/Pacific')) as_utc = field.to_native(as_pacific) eq_(as_utc.hour, 21) eq_(as_utc.tzinfo, pytz.utc) # TODO: How can naive datetime conversion be tested? class TestPermissionMod(TestCase): def test_write_only(self): field = api.PermissionMod(fields.WritableField, [])() cases = [ (False, False, False), (False, True, True), (True, False, True), (True, True, True) ] for case in cases: field._write_only, field._stealth, expected = case eq_(field.write_only, expected) def test_follows_permissions(self): allow = True allow_obj = True class MockPermission(object): def has_permission(self, *args): return allow def has_object_permission(self, *args): return allow_obj serializer = Mock() obj = Mock() obj.foo = 'bar' field = api.PermissionMod(fields.WritableField, [MockPermission])() field.initialize(serializer, 'foo') # If either has_permission or has_object_permission returns False, # then the field should act as a write_only field. Otherwise it shld # act as a read/write field . cases = [ (True, True, 'bar', False), (True, False, None, True), (False, True, None, True), (False, False, None, True), ] for case in cases: allow, allow_obj, expected_val, expected_write = case eq_(field.field_to_native(obj, 'foo'), expected_val) eq_(field.write_only, expected_write)
bsd-3-clause
houshengbo/nova_vmware_compute_driver
nova/tests/baremetal/test_ipmi.py
1
8187
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # coding=utf-8 # Copyright 2012 Hewlett-Packard Development Company, L.P. # Copyright (c) 2012 NTT DOCOMO, INC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test class for baremetal IPMI power manager. """ import os import stat import tempfile from nova.openstack.common import cfg from nova import test from nova.tests.baremetal.db import utils as bm_db_utils from nova import utils from nova.virt.baremetal import baremetal_states from nova.virt.baremetal import ipmi from nova.virt.baremetal import utils as bm_utils CONF = cfg.CONF class BareMetalIPMITestCase(test.TestCase): def setUp(self): super(BareMetalIPMITestCase, self).setUp() self.node = bm_db_utils.new_bm_node( id=123, pm_address='fake-address', pm_user='fake-user', pm_password='fake-password') self.ipmi = ipmi.IPMI(self.node) def test_construct(self): self.assertEqual(self.ipmi.node_id, 123) self.assertEqual(self.ipmi.address, 'fake-address') self.assertEqual(self.ipmi.user, 'fake-user') self.assertEqual(self.ipmi.password, 'fake-password') def test_make_password_file(self): pw_file = ipmi._make_password_file(self.node['pm_password']) try: self.assertTrue(os.path.isfile(pw_file)) self.assertEqual(os.stat(pw_file)[stat.ST_MODE] & 0777, 0600) with open(pw_file, "r") as f: pm_password = f.read() self.assertEqual(pm_password, self.node['pm_password']) finally: os.unlink(pw_file) def test_exec_ipmitool(self): pw_file = '/tmp/password_file' self.mox.StubOutWithMock(ipmi, '_make_password_file') self.mox.StubOutWithMock(utils, 'execute') self.mox.StubOutWithMock(bm_utils, 'unlink_without_raise') ipmi._make_password_file(self.ipmi.password).AndReturn(pw_file) args = [ 'ipmitool', '-I', 'lanplus', '-H', self.ipmi.address, '-U', self.ipmi.user, '-f', pw_file, 'A', 'B', 'C', ] utils.execute(*args, attempts=3).AndReturn(('', '')) bm_utils.unlink_without_raise(pw_file).AndReturn(None) self.mox.ReplayAll() self.ipmi._exec_ipmitool('A B C') self.mox.VerifyAll() def test_is_power(self): self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is on\n"]) self.mox.ReplayAll() self.ipmi._is_power("on") self.mox.VerifyAll() def test_power_already_on(self): self.flags(ipmi_power_retry=0, group='baremetal') self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is on\n"]) self.mox.ReplayAll() self.ipmi.state = baremetal_states.DELETED self.ipmi._power_on() self.mox.VerifyAll() self.assertEqual(self.ipmi.state, baremetal_states.ACTIVE) def test_power_on_ok(self): self.flags(ipmi_power_retry=0, group='baremetal') self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.ipmi._exec_ipmitool("power on").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is on\n"]) self.mox.ReplayAll() self.ipmi.state = baremetal_states.DELETED self.ipmi._power_on() self.mox.VerifyAll() self.assertEqual(self.ipmi.state, baremetal_states.ACTIVE) def test_power_on_fail(self): self.flags(ipmi_power_retry=0, group='baremetal') self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.ipmi._exec_ipmitool("power on").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.mox.ReplayAll() self.ipmi.state = baremetal_states.DELETED self.ipmi._power_on() self.mox.VerifyAll() self.assertEqual(self.ipmi.state, baremetal_states.ERROR) def test_power_on_max_retries(self): self.flags(ipmi_power_retry=2, group='baremetal') self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.ipmi._exec_ipmitool("power on").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.ipmi._exec_ipmitool("power on").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.ipmi._exec_ipmitool("power on").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.mox.ReplayAll() self.ipmi.state = baremetal_states.DELETED self.ipmi._power_on() self.mox.VerifyAll() self.assertEqual(self.ipmi.state, baremetal_states.ERROR) self.assertEqual(self.ipmi.retries, 3) def test_power_off_ok(self): self.flags(ipmi_power_retry=0, group='baremetal') self.mox.StubOutWithMock(self.ipmi, '_exec_ipmitool') self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is on\n"]) self.ipmi._exec_ipmitool("power off").AndReturn([]) self.ipmi._exec_ipmitool("power status").AndReturn( ["Chassis Power is off\n"]) self.mox.ReplayAll() self.ipmi.state = baremetal_states.ACTIVE self.ipmi._power_off() self.mox.VerifyAll() self.assertEqual(self.ipmi.state, baremetal_states.DELETED) def test_get_console_pid_path(self): self.flags(terminal_pid_dir='/tmp', group='baremetal') path = ipmi._get_console_pid_path(self.ipmi.node_id) self.assertEqual(path, '/tmp/%s.pid' % self.ipmi.node_id) def test_console_pid(self): fd, path = tempfile.mkstemp() with os.fdopen(fd, 'w') as f: f.write("12345\n") self.mox.StubOutWithMock(ipmi, '_get_console_pid_path') ipmi._get_console_pid_path(self.ipmi.node_id).AndReturn(path) self.mox.ReplayAll() pid = ipmi._get_console_pid(self.ipmi.node_id) bm_utils.unlink_without_raise(path) self.mox.VerifyAll() self.assertEqual(pid, 12345) def test_console_pid_nan(self): fd, path = tempfile.mkstemp() with os.fdopen(fd, 'w') as f: f.write("hello world\n") self.mox.StubOutWithMock(ipmi, '_get_console_pid_path') ipmi._get_console_pid_path(self.ipmi.node_id).AndReturn(path) self.mox.ReplayAll() pid = ipmi._get_console_pid(self.ipmi.node_id) bm_utils.unlink_without_raise(path) self.mox.VerifyAll() self.assertTrue(pid is None) def test_console_pid_file_not_found(self): pid_path = ipmi._get_console_pid_path(self.ipmi.node_id) self.mox.StubOutWithMock(os.path, 'exists') os.path.exists(pid_path).AndReturn(False) self.mox.ReplayAll() pid = ipmi._get_console_pid(self.ipmi.node_id) self.mox.VerifyAll() self.assertTrue(pid is None)
apache-2.0
aaltinisik/OCBAltinkaya
openerp/tools/yaml_import.py
75
43470
# -*- coding: utf-8 -*- import threading import types import time # used to eval time.strftime expressions from datetime import datetime, timedelta import logging import openerp import openerp.sql_db as sql_db import openerp.workflow import misc from config import config import yaml_tag import yaml import re from lxml import etree from openerp import SUPERUSER_ID # YAML import needs both safe and unsafe eval, but let's # default to /safe/. unsafe_eval = eval from safe_eval import safe_eval as eval import assertion_report _logger = logging.getLogger(__name__) class YamlImportException(Exception): pass class YamlImportAbortion(Exception): pass def _is_yaml_mapping(node, tag_constructor): value = isinstance(node, types.DictionaryType) \ and len(node.keys()) == 1 \ and isinstance(node.keys()[0], tag_constructor) return value def is_comment(node): return isinstance(node, types.StringTypes) def is_assert(node): return isinstance(node, yaml_tag.Assert) \ or _is_yaml_mapping(node, yaml_tag.Assert) def is_record(node): return _is_yaml_mapping(node, yaml_tag.Record) def is_python(node): return _is_yaml_mapping(node, yaml_tag.Python) def is_menuitem(node): return isinstance(node, yaml_tag.Menuitem) \ or _is_yaml_mapping(node, yaml_tag.Menuitem) def is_function(node): return isinstance(node, yaml_tag.Function) \ or _is_yaml_mapping(node, yaml_tag.Function) def is_report(node): return isinstance(node, yaml_tag.Report) def is_workflow(node): return isinstance(node, yaml_tag.Workflow) def is_act_window(node): return isinstance(node, yaml_tag.ActWindow) def is_delete(node): return isinstance(node, yaml_tag.Delete) def is_context(node): return isinstance(node, yaml_tag.Context) def is_url(node): return isinstance(node, yaml_tag.Url) def is_eval(node): return isinstance(node, yaml_tag.Eval) def is_ref(node): return isinstance(node, yaml_tag.Ref) \ or _is_yaml_mapping(node, yaml_tag.Ref) def is_ir_set(node): return _is_yaml_mapping(node, yaml_tag.IrSet) def is_string(node): return isinstance(node, basestring) class RecordDictWrapper(dict): """ Used to pass a record as locals in eval: records do not strictly behave like dict, so we force them to. """ def __init__(self, record): self.record = record def __getitem__(self, key): if key in self.record: return self.record[key] return dict.__getitem__(self, key) class YamlInterpreter(object): def __init__(self, cr, module, id_map, mode, filename, report=None, noupdate=False, loglevel=logging.DEBUG): self.cr = cr self.module = module self.id_map = id_map self.mode = mode self.filename = filename if report is None: report = assertion_report.assertion_report() self.assertion_report = report self.noupdate = noupdate self.loglevel = loglevel self.pool = openerp.registry(cr.dbname) self.uid = 1 self.context = {} # opererp context self.eval_context = {'ref': self._ref(), '_ref': self._ref(), # added '_ref' so that record['ref'] is possible 'time': time, 'datetime': datetime, 'timedelta': timedelta} self.env = openerp.api.Environment(self.cr, self.uid, self.context) def _log(self, *args, **kwargs): _logger.log(self.loglevel, *args, **kwargs) def _ref(self): return lambda xml_id: self.get_id(xml_id) def get_model(self, model_name): return self.pool[model_name] def validate_xml_id(self, xml_id): id = xml_id if '.' in xml_id: module, id = xml_id.split('.', 1) assert '.' not in id, "The ID reference '%s' must contain at most one dot.\n" \ "It is used to refer to other modules ID, in the form: module.record_id" \ % (xml_id,) if module != self.module: module_count = self.pool['ir.module.module'].search_count(self.cr, self.uid, \ ['&', ('name', '=', module), ('state', 'in', ['installed'])]) assert module_count == 1, 'The ID "%s" refers to an uninstalled module.' % (xml_id,) if len(id) > 64: # TODO where does 64 come from (DB is 128)? should be a constant or loaded form DB _logger.error('id: %s is to long (max: 64)', id) def get_id(self, xml_id): if xml_id is False or xml_id is None: return False #if not xml_id: # raise YamlImportException("The xml_id should be a non empty string.") elif isinstance(xml_id, types.IntType): id = xml_id elif xml_id in self.id_map: id = self.id_map[xml_id] else: if '.' in xml_id: module, checked_xml_id = xml_id.split('.', 1) else: module = self.module checked_xml_id = xml_id try: _, id = self.pool['ir.model.data'].get_object_reference(self.cr, self.uid, module, checked_xml_id) self.id_map[xml_id] = id except ValueError: raise ValueError("""%r not found when processing %s. This Yaml file appears to depend on missing data. This often happens for tests that belong to a module's test suite and depend on each other.""" % (xml_id, self.filename)) return id def get_record(self, xml_id): if '.' not in xml_id: xml_id = "%s.%s" % (self.module, xml_id) return self.env.ref(xml_id) def get_context(self, node, eval_dict): context = self.context.copy() if node.context: context.update(eval(node.context, eval_dict)) return context def isnoupdate(self, node): return self.noupdate or node.noupdate or False def _get_first_result(self, results, default=False): if len(results): value = results[0] if isinstance(value, types.TupleType): value = value[0] else: value = default return value def process_comment(self, node): return node def _log_assert_failure(self, msg, *args): self.assertion_report.record_failure() _logger.error(msg, *args) def _get_assertion_id(self, assertion): if assertion.id: ids = [self.get_id(assertion.id)] elif assertion.search: q = eval(assertion.search, self.eval_context) ids = self.pool[assertion.model].search(self.cr, self.uid, q, context=assertion.context) else: raise YamlImportException('Nothing to assert: you must give either an id or a search criteria.') return ids def process_assert(self, node): if isinstance(node, dict): assertion, expressions = node.items()[0] else: assertion, expressions = node, [] if self.isnoupdate(assertion) and self.mode != 'init': _logger.warning('This assertion was not evaluated ("%s").', assertion.string) return model = self.get_model(assertion.model) ids = self._get_assertion_id(assertion) if assertion.count is not None and len(ids) != assertion.count: msg = 'assertion "%s" failed!\n' \ ' Incorrect search count:\n' \ ' expected count: %d\n' \ ' obtained count: %d\n' args = (assertion.string, assertion.count, len(ids)) self._log_assert_failure(msg, *args) else: context = self.get_context(assertion, self.eval_context) for id in ids: record = model.browse(self.cr, self.uid, id, context) for test in expressions: try: success = unsafe_eval(test, self.eval_context, RecordDictWrapper(record)) except Exception, e: _logger.debug('Exception during evaluation of !assert block in yaml_file %s.', self.filename, exc_info=True) raise YamlImportAbortion(e) if not success: msg = 'Assertion "%s" FAILED\ntest: %s\n' args = (assertion.string, test) for aop in ('==', '!=', '<>', 'in', 'not in', '>=', '<=', '>', '<'): if aop in test: left, right = test.split(aop,1) lmsg = '' rmsg = '' try: lmsg = unsafe_eval(left, self.eval_context, RecordDictWrapper(record)) except Exception, e: lmsg = '<exc>' try: rmsg = unsafe_eval(right, self.eval_context, RecordDictWrapper(record)) except Exception, e: rmsg = '<exc>' msg += 'values: ! %s %s %s' args += ( lmsg, aop, rmsg ) break self._log_assert_failure(msg, *args) return else: # all tests were successful for this assertion tag (no break) self.assertion_report.record_success() def _coerce_bool(self, value, default=False): if isinstance(value, types.BooleanType): b = value if isinstance(value, types.StringTypes): b = value.strip().lower() not in ('0', 'false', 'off', 'no') elif isinstance(value, types.IntType): b = bool(value) else: b = default return b def create_osv_memory_record(self, record, fields): model = self.get_model(record.model) context = self.get_context(record, self.eval_context) record_dict = self._create_record(model, fields) id_new = model.create(self.cr, self.uid, record_dict, context=context) self.id_map[record.id] = int(id_new) return record_dict def process_record(self, node): record, fields = node.items()[0] model = self.get_model(record.model) view_id = record.view if view_id and (view_id is not True) and isinstance(view_id, basestring): module = self.module if '.' in view_id: module, view_id = view_id.split('.',1) view_id = self.pool['ir.model.data'].get_object_reference(self.cr, SUPERUSER_ID, module, view_id)[1] if model.is_transient(): record_dict=self.create_osv_memory_record(record, fields) else: self.validate_xml_id(record.id) try: self.pool['ir.model.data']._get_id(self.cr, SUPERUSER_ID, self.module, record.id) default = False except ValueError: default = True if self.isnoupdate(record) and self.mode != 'init': id = self.pool['ir.model.data']._update_dummy(self.cr, SUPERUSER_ID, record.model, self.module, record.id) # check if the resource already existed at the last update if id: self.id_map[record] = int(id) return None else: if not self._coerce_bool(record.forcecreate): return None #context = self.get_context(record, self.eval_context) # FIXME: record.context like {'withoutemployee':True} should pass from self.eval_context. example: test_project.yml in project module # TODO: cleaner way to avoid resetting password in auth_signup (makes user creation costly) context = dict(record.context or {}, no_reset_password=True) view_info = False if view_id: varg = view_id if view_id is True: varg = False view_info = model.fields_view_get(self.cr, SUPERUSER_ID, varg, 'form', context) record_dict = self._create_record(model, fields, view_info, default=default) id = self.pool['ir.model.data']._update(self.cr, SUPERUSER_ID, record.model, \ self.module, record_dict, record.id, noupdate=self.isnoupdate(record), mode=self.mode, context=context) self.id_map[record.id] = int(id) if config.get('import_partial'): self.cr.commit() def _create_record(self, model, fields, view_info=None, parent={}, default=True): """This function processes the !record tag in yaml files. It simulates the record creation through an xml view (either specified on the !record tag or the default one for this object), including the calls to on_change() functions, and sending only values for fields that aren't set as readonly. :param model: model instance :param fields: dictonary mapping the field names and their values :param view_info: result of fields_view_get() called on the object :param parent: dictionary containing the values already computed for the parent, in case of one2many fields :param default: if True, the default values must be processed too or not :return: dictionary mapping the field names and their values, ready to use when calling the create() function :rtype: dict """ def _get_right_one2many_view(fg, field_name, view_type): one2many_view = fg[field_name]['views'].get(view_type) # if the view is not defined inline, we call fields_view_get() if not one2many_view: one2many_view = self.pool[fg[field_name]['relation']].fields_view_get(self.cr, SUPERUSER_ID, False, view_type, self.context) return one2many_view def process_val(key, val): if fg[key]['type'] == 'many2one': if type(val) in (tuple,list): val = val[0] elif fg[key]['type'] == 'one2many': if val and isinstance(val, (list,tuple)) and isinstance(val[0], dict): # we want to return only the fields that aren't readonly # For that, we need to first get the right tree view to consider for the field `key´ one2many_tree_view = _get_right_one2many_view(fg, key, 'tree') arch = etree.fromstring(one2many_tree_view['arch'].encode('utf-8')) for rec in val: # make a copy for the iteration, as we will alter `rec´ rec_copy = rec.copy() for field_key in rec_copy: # if field is missing in view or has a readonly modifier, drop it field_elem = arch.xpath("//field[@name='%s']" % field_key) if field_elem and (field_elem[0].get('modifiers', '{}').find('"readonly": true') >= 0): # TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in # order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]} del rec[field_key] # now that unwanted values have been removed from val, we can encapsulate it in a tuple as returned value val = map(lambda x: (0,0,x), val) elif fg[key]['type'] == 'many2many': if val and isinstance(val,(list,tuple)) and isinstance(val[0], (int,long)): val = [(6,0,val)] # we want to return only the fields that aren't readonly if el.get('modifiers', '{}').find('"readonly": true') >= 0: # TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in # order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]} return False return val if view_info: arch = etree.fromstring(view_info['arch'].decode('utf-8')) view = arch if len(arch) else False else: view = False fields = fields or {} if view is not False: fg = view_info['fields'] onchange_spec = model._onchange_spec(self.cr, SUPERUSER_ID, view_info, context=self.context) # gather the default values on the object. (Can't use `fields´ as parameter instead of {} because we may # have references like `base.main_company´ in the yaml file and it's not compatible with the function) defaults = default and model._add_missing_default_values(self.cr, self.uid, {}, context=self.context) or {} # copy the default values in record_dict, only if they are in the view (because that's what the client does) # the other default values will be added later on by the create(). record_dict = dict([(key, val) for key, val in defaults.items() if key in fg]) # Process all on_change calls nodes = [view] while nodes: el = nodes.pop(0) if el.tag=='field': field_name = el.attrib['name'] assert field_name in fg, "The field '%s' is defined in the form view but not on the object '%s'!" % (field_name, model._name) if field_name in fields: one2many_form_view = None if (view is not False) and (fg[field_name]['type']=='one2many'): # for one2many fields, we want to eval them using the inline form view defined on the parent one2many_form_view = _get_right_one2many_view(fg, field_name, 'form') field_value = self._eval_field(model, field_name, fields[field_name], one2many_form_view or view_info, parent=record_dict, default=default) #call process_val to not update record_dict if values were given for readonly fields val = process_val(field_name, field_value) if val: record_dict[field_name] = val #if (field_name in defaults) and defaults[field_name] == field_value: # print '*** You can remove these lines:', field_name, field_value #if field_name has a default value or a value is given in the yaml file, we must call its on_change() elif field_name not in defaults: continue if not el.attrib.get('on_change', False): continue if el.attrib['on_change'] in ('1', 'true'): # New-style on_change recs = model.browse(self.cr, SUPERUSER_ID, [], self.context) result = recs.onchange(record_dict, field_name, onchange_spec) else: match = re.match("([a-z_1-9A-Z]+)\((.*)\)", el.attrib['on_change'], re.DOTALL) assert match, "Unable to parse the on_change '%s'!" % (el.attrib['on_change'], ) # creating the context class parent2(object): def __init__(self, d): self.d = d def __getattr__(self, name): return self.d.get(name, False) ctx = record_dict.copy() ctx['context'] = self.context ctx['uid'] = SUPERUSER_ID ctx['parent'] = parent2(parent) for a in fg: if a not in ctx: ctx[a] = process_val(a, defaults.get(a, False)) # Evaluation args args = map(lambda x: eval(x, ctx), match.group(2).split(',')) result = getattr(model, match.group(1))(self.cr, self.uid, [], *args) for key, val in (result or {}).get('value', {}).items(): if key in fg: if key not in fields: # do not shadow values explicitly set in yaml. record_dict[key] = process_val(key, val) else: _logger.debug("The returning field '%s' from your on_change call '%s'" " does not exist either on the object '%s', either in" " the view '%s'", key, match.group(1), model._name, view_info['name']) else: nodes = list(el) + nodes else: record_dict = {} for field_name, expression in fields.items(): if field_name in record_dict: continue field_value = self._eval_field(model, field_name, expression, default=False) record_dict[field_name] = field_value return record_dict def process_ref(self, node, field=None): assert node.search or node.id, '!ref node should have a `search` attribute or `id` attribute' if node.search: if node.model: model_name = node.model elif field: model_name = field.comodel_name else: raise YamlImportException('You need to give a model for the search, or a field to infer it.') model = self.get_model(model_name) q = eval(node.search, self.eval_context) ids = model.search(self.cr, self.uid, q) if node.use: instances = model.browse(self.cr, self.uid, ids) value = [inst[node.use] for inst in instances] else: value = ids elif node.id: if field and field.type == 'reference': record = self.get_record(node.id) value = "%s,%s" % (record._name, record.id) else: value = self.get_id(node.id) else: value = None return value def process_eval(self, node): return eval(node.expression, self.eval_context) def _eval_field(self, model, field_name, expression, view_info=False, parent={}, default=True): # TODO this should be refactored as something like model.get_field() in bin/osv if field_name not in model._fields: raise KeyError("Object '%s' does not contain field '%s'" % (model, field_name)) field = model._fields[field_name] if is_ref(expression): elements = self.process_ref(expression, field) if field.type in ("many2many", "one2many"): value = [(6, 0, elements)] else: # many2one or reference if isinstance(elements, (list,tuple)): value = self._get_first_result(elements) else: value = elements elif field.type == "many2one": value = self.get_id(expression) elif field.type == "one2many": other_model = self.get_model(field.comodel_name) value = [(0, 0, self._create_record(other_model, fields, view_info, parent, default=default)) for fields in expression] elif field.type == "many2many": ids = [self.get_id(xml_id) for xml_id in expression] value = [(6, 0, ids)] elif field.type == "date" and is_string(expression): # enforce ISO format for string date values, to be locale-agnostic during tests time.strptime(expression, misc.DEFAULT_SERVER_DATE_FORMAT) value = expression elif field.type == "datetime" and is_string(expression): # enforce ISO format for string datetime values, to be locale-agnostic during tests time.strptime(expression, misc.DEFAULT_SERVER_DATETIME_FORMAT) value = expression elif field.type == "reference": record = self.get_record(expression) value = "%s,%s" % (record._name, record.id) else: # scalar field if is_eval(expression): value = self.process_eval(expression) else: value = expression # raise YamlImportException('Unsupported field "%s" or value %s:%s' % (field_name, type(expression), expression)) return value def process_context(self, node): self.context = node.__dict__ if node.uid: self.uid = self.get_id(node.uid) if node.noupdate: self.noupdate = node.noupdate self.env = openerp.api.Environment(self.cr, self.uid, self.context) def process_python(self, node): python, statements = node.items()[0] assert python.model or python.id, "!python node must have attribute `model` or `id`" if python.id is None: record = self.pool[python.model] elif isinstance(python.id, basestring): record = self.get_record(python.id) else: record = self.env[python.model].browse(python.id) if python.model: assert record._name == python.model, "`id` is not consistent with `model`" statements = "\n" * python.first_line + statements.replace("\r\n", "\n") code_context = { 'self': record, 'model': record._model, 'cr': self.cr, 'uid': self.uid, 'log': self._log, 'context': self.context, 'openerp': openerp, } try: code_obj = compile(statements, self.filename, 'exec') unsafe_eval(code_obj, {'ref': self.get_id}, code_context) except AssertionError, e: self._log_assert_failure('AssertionError in Python code %s (line %d): %s', python.name, python.first_line, e) return except Exception, e: _logger.debug('Exception during evaluation of !python block in yaml_file %s.', self.filename, exc_info=True) raise else: self.assertion_report.record_success() def process_workflow(self, node): workflow, values = node.items()[0] if self.isnoupdate(workflow) and self.mode != 'init': return if workflow.ref: id = self.get_id(workflow.ref) else: if not values: raise YamlImportException('You must define a child node if you do not give a ref.') if not len(values) == 1: raise YamlImportException('Only one child node is accepted (%d given).' % len(values)) value = values[0] if not 'model' in value and (not 'eval' in value or not 'search' in value): raise YamlImportException('You must provide a "model" and an "eval" or "search" to evaluate.') value_model = self.get_model(value['model']) local_context = {'obj': lambda x: value_model.browse(self.cr, self.uid, x, context=self.context)} local_context.update(self.id_map) id = eval(value['eval'], self.eval_context, local_context) if workflow.uid is not None: uid = workflow.uid else: uid = self.uid self.cr.execute('select distinct signal, sequence, id from wkf_transition ORDER BY sequence,id') signals=[x['signal'] for x in self.cr.dictfetchall()] if workflow.action not in signals: raise YamlImportException('Incorrect action %s. No such action defined' % workflow.action) openerp.workflow.trg_validate(uid, workflow.model, id, workflow.action, self.cr) def _eval_params(self, model, params): args = [] for i, param in enumerate(params): if isinstance(param, types.ListType): value = self._eval_params(model, param) elif is_ref(param): value = self.process_ref(param) elif is_eval(param): value = self.process_eval(param) elif isinstance(param, types.DictionaryType): # supports XML syntax param_model = self.get_model(param.get('model', model)) if 'search' in param: q = eval(param['search'], self.eval_context) ids = param_model.search(self.cr, self.uid, q) value = self._get_first_result(ids) elif 'eval' in param: local_context = {'obj': lambda x: param_model.browse(self.cr, self.uid, x, self.context)} local_context.update(self.id_map) value = eval(param['eval'], self.eval_context, local_context) else: raise YamlImportException('You must provide either a !ref or at least a "eval" or a "search" to function parameter #%d.' % i) else: value = param # scalar value args.append(value) return args def process_function(self, node): function, params = node.items()[0] if self.isnoupdate(function) and self.mode != 'init': return model = self.get_model(function.model) if function.eval: args = self.process_eval(function.eval) else: args = self._eval_params(function.model, params) method = function.name getattr(model, method)(self.cr, self.uid, *args) def _set_group_values(self, node, values): if node.groups: group_names = node.groups.split(',') groups_value = [] for group in group_names: if group.startswith('-'): group_id = self.get_id(group[1:]) groups_value.append((3, group_id)) else: group_id = self.get_id(group) groups_value.append((4, group_id)) values['groups_id'] = groups_value def process_menuitem(self, node): self.validate_xml_id(node.id) if not node.parent: parent_id = False self.cr.execute('select id from ir_ui_menu where parent_id is null and name=%s', (node.name,)) res = self.cr.fetchone() values = {'parent_id': parent_id, 'name': node.name} else: parent_id = self.get_id(node.parent) values = {'parent_id': parent_id} if node.name: values['name'] = node.name try: res = [ self.get_id(node.id) ] except: # which exception ? res = None if node.action: action_type = node.type or 'act_window' icons = { "act_window": 'STOCK_NEW', "report.xml": 'STOCK_PASTE', "wizard": 'STOCK_EXECUTE', "url": 'STOCK_JUMP_TO', } values['icon'] = icons.get(action_type, 'STOCK_NEW') if action_type == 'act_window': action_id = self.get_id(node.action) self.cr.execute('select view_type,view_mode,name,view_id,target from ir_act_window where id=%s', (action_id,)) ir_act_window_result = self.cr.fetchone() assert ir_act_window_result, "No window action defined for this id %s !\n" \ "Verify that this is a window action or add a type argument." % (node.action,) action_type, action_mode, action_name, view_id, target = ir_act_window_result if view_id: self.cr.execute('SELECT type FROM ir_ui_view WHERE id=%s', (view_id,)) # TODO guess why action_mode is ir_act_window.view_mode above and ir_ui_view.type here action_mode = self.cr.fetchone() self.cr.execute('SELECT view_mode FROM ir_act_window_view WHERE act_window_id=%s ORDER BY sequence LIMIT 1', (action_id,)) if self.cr.rowcount: action_mode = self.cr.fetchone() if action_type == 'tree': values['icon'] = 'STOCK_INDENT' elif action_mode and action_mode.startswith('tree'): values['icon'] = 'STOCK_JUSTIFY_FILL' elif action_mode and action_mode.startswith('graph'): values['icon'] = 'terp-graph' elif action_mode and action_mode.startswith('calendar'): values['icon'] = 'terp-calendar' if target == 'new': values['icon'] = 'STOCK_EXECUTE' if not values.get('name', False): values['name'] = action_name elif action_type == 'wizard': action_id = self.get_id(node.action) self.cr.execute('select name from ir_act_wizard where id=%s', (action_id,)) ir_act_wizard_result = self.cr.fetchone() if (not values.get('name', False)) and ir_act_wizard_result: values['name'] = ir_act_wizard_result[0] else: raise YamlImportException("Unsupported type '%s' in menuitem tag." % action_type) if node.sequence: values['sequence'] = node.sequence if node.icon: values['icon'] = node.icon self._set_group_values(node, values) pid = self.pool['ir.model.data']._update(self.cr, SUPERUSER_ID, \ 'ir.ui.menu', self.module, values, node.id, mode=self.mode, \ noupdate=self.isnoupdate(node), res_id=res and res[0] or False) if node.id and parent_id: self.id_map[node.id] = int(pid) if node.action and pid: action_type = node.type or 'act_window' action_id = self.get_id(node.action) action = "ir.actions.%s,%d" % (action_type, action_id) self.pool['ir.model.data'].ir_set(self.cr, SUPERUSER_ID, 'action', \ 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))], action, True, True, xml_id=node.id) def process_act_window(self, node): assert getattr(node, 'id'), "Attribute %s of act_window is empty !" % ('id',) assert getattr(node, 'name'), "Attribute %s of act_window is empty !" % ('name',) assert getattr(node, 'res_model'), "Attribute %s of act_window is empty !" % ('res_model',) self.validate_xml_id(node.id) view_id = False if node.view: view_id = self.get_id(node.view) if not node.context: node.context={} context = eval(str(node.context), self.eval_context) values = { 'name': node.name, 'type': node.type or 'ir.actions.act_window', 'view_id': view_id, 'domain': node.domain, 'context': context, 'res_model': node.res_model, 'src_model': node.src_model, 'view_type': node.view_type or 'form', 'view_mode': node.view_mode or 'tree,form', 'usage': node.usage, 'limit': node.limit, 'auto_refresh': node.auto_refresh, 'multi': getattr(node, 'multi', False), } self._set_group_values(node, values) if node.target: values['target'] = node.target id = self.pool['ir.model.data']._update(self.cr, SUPERUSER_ID, \ 'ir.actions.act_window', self.module, values, node.id, mode=self.mode) self.id_map[node.id] = int(id) if node.src_model: keyword = 'client_action_relate' value = 'ir.actions.act_window,%s' % id replace = node.replace or True self.pool['ir.model.data'].ir_set(self.cr, SUPERUSER_ID, 'action', keyword, \ node.id, [node.src_model], value, replace=replace, noupdate=self.isnoupdate(node), isobject=True, xml_id=node.id) # TODO add remove ir.model.data def process_delete(self, node): assert getattr(node, 'model'), "Attribute %s of delete tag is empty !" % ('model',) if node.model in self.pool: if node.search: ids = self.pool[node.model].search(self.cr, self.uid, eval(node.search, self.eval_context)) else: ids = [self.get_id(node.id)] if len(ids): self.pool[node.model].unlink(self.cr, self.uid, ids) else: self._log("Record not deleted.") def process_url(self, node): self.validate_xml_id(node.id) res = {'name': node.name, 'url': node.url, 'target': node.target} id = self.pool['ir.model.data']._update(self.cr, SUPERUSER_ID, \ "ir.actions.act_url", self.module, res, node.id, mode=self.mode) self.id_map[node.id] = int(id) # ir_set if (not node.menu or eval(node.menu)) and id: keyword = node.keyword or 'client_action_multi' value = 'ir.actions.act_url,%s' % id replace = node.replace or True self.pool['ir.model.data'].ir_set(self.cr, SUPERUSER_ID, 'action', \ keyword, node.url, ["ir.actions.act_url"], value, replace=replace, \ noupdate=self.isnoupdate(node), isobject=True, xml_id=node.id) def process_ir_set(self, node): if not self.mode == 'init': return False _, fields = node.items()[0] res = {} for fieldname, expression in fields.items(): if is_eval(expression): value = eval(expression.expression, self.eval_context) else: value = expression res[fieldname] = value self.pool['ir.model.data'].ir_set(self.cr, SUPERUSER_ID, res['key'], res['key2'], \ res['name'], res['models'], res['value'], replace=res.get('replace',True), \ isobject=res.get('isobject', False), meta=res.get('meta',None)) def process_report(self, node): values = {} for dest, f in (('name','string'), ('model','model'), ('report_name','name')): values[dest] = getattr(node, f) assert values[dest], "Attribute %s of report is empty !" % (f,) for field,dest in (('rml','report_rml'),('file','report_rml'),('xml','report_xml'),('xsl','report_xsl'),('attachment','attachment'),('attachment_use','attachment_use')): if getattr(node, field): values[dest] = getattr(node, field) if node.auto: values['auto'] = eval(node.auto) if node.sxw: sxw_file = misc.file_open(node.sxw) try: sxw_content = sxw_file.read() values['report_sxw_content'] = sxw_content finally: sxw_file.close() if node.header: values['header'] = eval(node.header) values['multi'] = node.multi and eval(node.multi) xml_id = node.id self.validate_xml_id(xml_id) self._set_group_values(node, values) id = self.pool['ir.model.data']._update(self.cr, SUPERUSER_ID, "ir.actions.report.xml", \ self.module, values, xml_id, noupdate=self.isnoupdate(node), mode=self.mode) self.id_map[xml_id] = int(id) if not node.menu or eval(node.menu): keyword = node.keyword or 'client_print_multi' value = 'ir.actions.report.xml,%s' % id replace = node.replace or True self.pool['ir.model.data'].ir_set(self.cr, SUPERUSER_ID, 'action', \ keyword, values['name'], [values['model']], value, replace=replace, isobject=True, xml_id=xml_id) def process_none(self): """ Empty node or commented node should not pass silently. """ self._log_assert_failure("You have an empty block in your tests.") def process(self, yaml_string): """ Processes a Yaml string. Custom tags are interpreted by 'process_' instance methods. """ yaml_tag.add_constructors() is_preceded_by_comment = False for node in yaml.load(yaml_string): is_preceded_by_comment = self._log_node(node, is_preceded_by_comment) try: self._process_node(node) except Exception, e: _logger.exception(e) raise def _process_node(self, node): if is_comment(node): self.process_comment(node) elif is_assert(node): self.process_assert(node) elif is_record(node): self.process_record(node) elif is_python(node): self.process_python(node) elif is_menuitem(node): self.process_menuitem(node) elif is_delete(node): self.process_delete(node) elif is_url(node): self.process_url(node) elif is_context(node): self.process_context(node) elif is_ir_set(node): self.process_ir_set(node) elif is_act_window(node): self.process_act_window(node) elif is_report(node): self.process_report(node) elif is_workflow(node): if isinstance(node, types.DictionaryType): self.process_workflow(node) else: self.process_workflow({node: []}) elif is_function(node): if isinstance(node, types.DictionaryType): self.process_function(node) else: self.process_function({node: []}) elif node is None: self.process_none() else: raise YamlImportException("Can not process YAML block: %s" % node) def _log_node(self, node, is_preceded_by_comment): if is_comment(node): is_preceded_by_comment = True self._log(node) elif not is_preceded_by_comment: if isinstance(node, types.DictionaryType): msg = "Creating %s\n with %s" args = node.items()[0] self._log(msg, *args) else: self._log(node) else: is_preceded_by_comment = False return is_preceded_by_comment def yaml_import(cr, module, yamlfile, kind, idref=None, mode='init', noupdate=False, report=None): if idref is None: idref = {} loglevel = logging.DEBUG yaml_string = yamlfile.read() yaml_interpreter = YamlInterpreter(cr, module, idref, mode, filename=yamlfile.name, report=report, noupdate=noupdate, loglevel=loglevel) yaml_interpreter.process(yaml_string) # keeps convention of convert.py convert_yaml_import = yaml_import # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
abutcher/Taboot
taboot/tasks/mod_jk.py
2
3518
# -*- coding: utf-8 -*- # Taboot - Client utility for performing deployments with Func. # Copyright © 2009-2011, Red Hat, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from taboot.tasks import BaseTask, FuncTask, TaskResult JK_ENABLE = 0 JK_DISABLE = 1 JK_STOP = 2 class ToggleHost(FuncTask): def __init__(self, action, proxyhost, **kwargs): super(ToggleHost, self).__init__(proxyhost, **kwargs) self._action = action if action == JK_ENABLE: self._command = 'taboot.modjk.enable_host' elif action == JK_DISABLE: self._command = 'taboot.modjk.disable_host' elif action == JK_STOP: self._command = 'taboot.modjk.stop_host' else: raise Exception("Undefined toggle action") def _process_result(self, result): t = TaskResult(self) if len(result) > 0: t.success = True if self._action == JK_ENABLE: verb = 'Enabled' elif self._action == JK_DISABLE: verb = 'Disabled' elif self._action == JK_STOP: verb = 'Stopped' t.output = "%s AJP on the following balancer/worker " \ "pairs:\n" % verb for balancer, worker in result: t.output += "%s: %s\n" % (balancer, worker) else: t.success = False t.output = "Failed to find worker host" return t class JKBaseTask(BaseTask): def __init__(self, proxies, action, **kwargs): super(JKBaseTask, self).__init__(**kwargs) from sys import modules self.proxies = proxies self.jkaction = getattr(modules[self.__module__], "JK_%s" % action.upper()) def run(self, runner): output = "" success = True for proxy in self.proxies: toggler = ToggleHost(self.jkaction, self._host, host=proxy) result = toggler.run(runner) output += "%s:\n" % proxy output += "%s\n" % result.output if result.success == False: success = False break return TaskResult(self, success=success, output=output) class OutOfRotation(JKBaseTask): """ Remove an AJP node from rotation on a proxy via modjkapi access on the proxy with func. :Parameters: - `proxies`: A list of URLs to AJP jkmanage interfaces """ def __init__(self, proxies, action="stop", **kwargs): super(OutOfRotation, self).__init__(proxies, action, **kwargs) class InRotation(JKBaseTask): """ Put an AJP node in rotation on a proxy via modjkapi access on the proxy with func. :Parameters: - `proxies`: A list of URLs to AJP jkmanage interfaces """ def __init__(self, proxies, action="enable", **kwargs): super(InRotation, self).__init__(proxies, action, **kwargs)
gpl-3.0
probcomp/bayeslite
tests/test_guess.py
1
7801
# -*- coding: utf-8 -*- # Copyright (c) 2010-2016, MIT Probabilistic Computing Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import itertools import math import pytest import bayeslite from bayeslite.guess import bayesdb_guess_population from bayeslite.guess import bayesdb_guess_stattypes from bayeslite.exception import BQLError def test_guess_stattypes(): n = ['a', 'b'] a_z = range(ord('a'), ord('z') + 1) rows = [[chr(c), c % 2] for c in a_z] with pytest.raises(ValueError): # Duplicate column names. bayesdb_guess_stattypes(['a', 'a'], rows) with pytest.raises(ValueError): # Too many columns in data. bayesdb_guess_stattypes(['a'], rows) with pytest.raises(ValueError): # Too few columns in data. bayesdb_guess_stattypes(['a', 'b', 'c'], rows) assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'nominal'] rows = [[chr(c), c % 2] for c in a_z] + [['q', ord('q') % 2]] # Ignore the first column, rather than calling it nominal, because # it's almost entirely unique, so one category cannot say much about others. assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['ignore', 'nominal'] rows = [[c % 2, chr(c)] for c in a_z] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['nominal', 'key'] rows = [[c % 2, chr(c)] for c in a_z] + [[0, 'k']] # Ignore the second column because it is almost unique, as above. assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['nominal', 'ignore'] rows = [[chr(c), i] for i, c in enumerate(a_z)] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'numerical'] rows = [[chr(c), math.sqrt(i)] for i, c in enumerate(a_z)] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'numerical'] rows = [[chr(c) + chr(d), isqrt(i)] for i, (c, d) in enumerate(itertools.product(a_z, a_z))] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'numerical'] rows = [[chr(c) + chr(d) + chr(e), isqrt(i)] for i, (c, d, e) in enumerate(itertools.product(a_z, a_z, a_z))] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'nominal'] rows = [[i, chr(c)] for i, c in enumerate(a_z)] # second field is unique, and we already have a key. assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['key', 'ignore'] rows = [[isqrt(i), chr(c) + chr(d)] for i, (c, d) in enumerate(itertools.product(a_z, a_z))] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['numerical', 'key'] rows = [[isqrt(i), chr(c) + chr(d) + chr(e)] for i, (c, d, e) in enumerate(itertools.product(a_z, a_z, a_z))] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['nominal', 'key'] with pytest.raises(ValueError): # Nonunique key. bayesdb_guess_stattypes(n, rows, overrides=[('a', 'key')]) with pytest.raises(ValueError): # Two keys. bayesdb_guess_stattypes(n, rows, overrides=[('a', 'key'), ('b', 'key')]) with pytest.raises(ValueError): # No such column. bayesdb_guess_stattypes(n, rows, overrides=[('c', 'numerical')]) with pytest.raises(ValueError): # Column overridden twice. bayesdb_guess_stattypes(n, rows, overrides=[('a', 'key'), ('a', 'ignore')]) with pytest.raises(ValueError): # Column overridden twice, even to the same stattype. bayesdb_guess_stattypes(n, rows, overrides=[('a', 'key'), ('a', 'key')]) assert [st[0] for st in bayesdb_guess_stattypes(n, rows, overrides=[('b', 'key')])] == \ ['nominal', 'key'] assert [st[0] for st in bayesdb_guess_stattypes(n, rows, overrides=[('b', 'ignore')])] == \ ['nominal', 'ignore'] assert [st[0] for st in bayesdb_guess_stattypes(n, rows, overrides=[('a', 'numerical')])] \ == ['numerical', 'key'] rows = [['none' if c < ord('m') else c, chr(c)] for c in a_z] # Nullify 'none' because it is in the nullify list. # Nominal because <20 remaining. assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == ['nominal', 'key'] rows = [[3 if c < ord('y') else 5, chr(c)] for c in a_z] # Nullify 3 because it holds so many of the values. # Ignore because <2 remaining. assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == \ ['ignore', 'key'] # Ensure columns of unique floats are only taken to be keys when they are # integer-valued, not otherwise. rows = [[math.sqrt(c), c + 0.5] for c in a_z] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == \ ['numerical', 'numerical'] rows = [[c + 0.5, float(c)] for c in a_z] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == \ ['numerical', 'key'] # A column with a mix of ints and non-integer-valued floats should be # numerical. rows = [[c + 0.5, float(c + 0.5) if c % 2 == 0 else int(c)] for c in a_z] assert [st[0] for st in bayesdb_guess_stattypes(n, rows)] == \ ['numerical', 'numerical'] def test_guess_population(): with bayeslite.bayesdb_open() as bdb: bdb.sql_execute('CREATE TABLE t(x NUMERIC, y NUMERIC, z NUMERIC)') a_z = range(ord('a'), ord('z') + 1) aa_zz = ((c, d) for c in a_z for d in a_z) data = ((chr(c) + chr(d), (c + d) % 2, math.sqrt(c + d)) for c, d in aa_zz) for row in data: bdb.sql_execute('INSERT INTO t (x, y, z) VALUES (?, ?, ?)', row) with pytest.raises(ValueError): # No modeled columns. (x is key.) bayesdb_guess_population(bdb, 'p', 't', overrides=[('y', 'ignore'), ('z', 'ignore')]) bayesdb_guess_population(bdb, 'p', 't') with pytest.raises(ValueError): # Population already exists. bayesdb_guess_population(bdb, 'p', 't') assert bdb.sql_execute('SELECT * FROM bayesdb_variable').fetchall() == [ (1, None, 1, 'y', 'nominal'), (1, None, 2, 'z', 'numerical'), ] def test_guess_schema(): with bayeslite.bayesdb_open() as bdb: bdb.sql_execute('CREATE TABLE t(x NUMERIC, y NUMERIC, z NUMERIC)') a_z = range(ord('a'), ord('z') + 1) aa_zz = ((c, d) for c in a_z for d in a_z) data = ((chr(c) + chr(d), (c + d) % 2, math.sqrt(c + d)) for c, d in aa_zz) for row in data: bdb.sql_execute('INSERT INTO t (x, y, z) VALUES (?, ?, ?)', row) with pytest.raises(BQLError): bdb.execute('GUESS SCHEMA FOR non_existant_table') guess = bdb.execute('GUESS SCHEMA FOR t') assert len(guess.description) == 4 assert guess.description[0][0] == u'column' assert guess.description[1][0] == u'stattype' assert guess.description[2][0] == u'num_distinct' assert guess.description[3][0] == u'reason' assert len(guess.fetchall()) == 3 def isqrt(n): x = n y = (x + 1)//2 while y < x: x = y y = (x + n//x)//2 return x
apache-2.0
ClearCorp/odoo-clearcorp
TODO-7.0/sneldev_magento/wizard/sneldev_magento_products_import.py
4
1759
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import pooler import os from export_tools import * from osv import osv, fields class wiz_sneldev_products_import(osv.osv_memory): _name = 'sneldev.products.import' _description = 'Import orders' _columns = { } _defaults = { } def do_products_import(self, cr, uid, ids, context=None): product_id = ''; self.pool.get('sneldev.magento').import_categories(cr, uid) if (self.pool.get('sneldev.magento').import_products(cr, uid,product_id) < 0): raise osv.except_osv(('Warning'), ('Import failed, please refer to log file for failure details.')) return {'type': 'ir.actions.act_window_close'} wiz_sneldev_products_import() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
josircg/raizcidadanista
raizcidadanista/financeiro/migrations/0004_auto__add_field_receita_nota.py
1
11730
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Receita.nota' db.add_column('financeiro_receita', 'nota', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Receita.nota' db.delete_column('financeiro_receita', 'nota') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'cadastro.membro': { 'Meta': {'ordering': "['nome']", 'object_name': 'Membro', '_ormbases': ['cadastro.Pessoa']}, 'aprovador': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'membro_aprovador'", 'null': 'True', 'to': "orm['auth.User']"}), 'assinado': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'atividade_profissional': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'contrib_prox_pgto': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'contrib_tipo': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}), 'contrib_valor': ('utils.fields.BRDecimalField', [], {'default': '0', 'max_digits': '7', 'decimal_places': '2'}), 'cpf': ('django.db.models.fields.CharField', [], {'max_length': '14', 'null': 'True', 'blank': 'True'}), 'dt_prefiliacao': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'dtnascimento': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'endereco': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'endereco_cep': ('django.db.models.fields.CharField', [], {'max_length': '9', 'null': 'True', 'blank': 'True'}), 'endereco_complemento': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'endereco_num': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), 'estadocivil': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}), 'facebook_access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), 'filiacao_partidaria': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'filiado': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'fundador': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'municipio_eleitoral': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'municipio_naturalidade': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'nome_da_mae': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}), 'pessoa_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cadastro.Pessoa']", 'unique': 'True', 'primary_key': 'True'}), 'rg': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'secao_eleitoral': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}), 'titulo_eleitoral': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'twitter_id': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), 'twitter_oauth_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'twitter_oauth_token_secret': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'uf_eleitoral': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['municipios.UF']", 'null': 'True', 'blank': 'True'}), 'uf_naturalidade': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'uf_naturalidade'", 'null': 'True', 'to': "orm['municipios.UF']"}), 'usuario': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'membro'", 'null': 'True', 'to': "orm['auth.User']"}), 'zona_eleitoral': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}) }, 'cadastro.pessoa': { 'Meta': {'ordering': "['nome']", 'object_name': 'Pessoa'}, 'celular': ('django.db.models.fields.CharField', [], {'max_length': '14', 'null': 'True', 'blank': 'True'}), 'dtcadastro': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'municipio': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'nome': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'residencial': ('django.db.models.fields.CharField', [], {'max_length': '14', 'null': 'True', 'blank': 'True'}), 'sexo': ('django.db.models.fields.CharField', [], {'default': "'O'", 'max_length': '1'}), 'status_email': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}), 'uf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['municipios.UF']"}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'financeiro.conta': { 'Meta': {'ordering': "('conta',)", 'object_name': 'Conta'}, 'ativa': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'conta': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}), 'descricao': ('django.db.models.fields.CharField', [], {'max_length': '60'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'nota': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'tipo': ('django.db.models.fields.CharField', [], {'default': "'M'", 'max_length': '1'}) }, 'financeiro.metaarrecadacao': { 'Meta': {'object_name': 'MetaArrecadacao'}, 'data_inicial': ('django.db.models.fields.DateField', [], {}), 'data_limite': ('django.db.models.fields.DateField', [], {}), 'descricao': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'valor': ('utils.fields.BRDecimalField', [], {'max_digits': '12', 'decimal_places': '2'}) }, 'financeiro.receita': { 'Meta': {'ordering': "('conta__conta',)", 'object_name': 'Receita'}, 'colaborador': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cadastro.Membro']", 'null': 'True', 'blank': 'True'}), 'conta': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['financeiro.Conta']"}), 'dtaviso': ('django.db.models.fields.DateField', [], {}), 'dtpgto': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'nota': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'valor': ('utils.fields.BRDecimalField', [], {'max_digits': '12', 'decimal_places': '2'}) }, 'municipios.uf': { 'Meta': {'ordering': "(u'nome',)", 'object_name': 'UF'}, 'id_ibge': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}), 'nome': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'regiao': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'uf': ('django.db.models.fields.CharField', [], {'max_length': '2'}) } } complete_apps = ['financeiro']
gpl-3.0
tumbl3w33d/ansible
lib/ansible/modules/network/fortios/fortios_switch_controller_802_1X_settings.py
13
9400
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_switch_controller_802_1X_settings short_description: Configure global 802.1X settings in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify switch_controller feature and 802_1X_settings category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.9" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true switch_controller_802_1X_settings: description: - Configure global 802.1X settings. default: null type: dict suboptions: link_down_auth: description: - Interface-reauthentication state to set if a link is down. type: str choices: - set-unauth - no-action max_reauth_attempt: description: - Maximum number of authentication attempts (0 - 15). type: int reauth_period: description: - Period of time to allow for reauthentication (1 - 1440 sec). type: int ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure global 802.1X settings. fortios_switch_controller_802_1X_settings: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" switch_controller_802_1X_settings: link_down_auth: "set-unauth" max_reauth_attempt: "4" reauth_period: "5" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_switch_controller_802_1X_settings_data(json): option_list = ['link_down_auth', 'max_reauth_attempt', 'reauth_period'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for elem in data: elem = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def switch_controller_802_1X_settings(data, fos): vdom = data['vdom'] switch_controller_802_1X_settings_data = data['switch_controller_802_1X_settings'] filtered_data = underscore_to_hyphen(filter_switch_controller_802_1X_settings_data(switch_controller_802_1X_settings_data)) return fos.set('switch-controller', '802-1X-settings', data=filtered_data, vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_switch_controller(data, fos): if data['switch_controller_802_1X_settings']: resp = switch_controller_802_1X_settings(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "switch_controller_802_1X_settings": { "required": False, "type": "dict", "default": None, "options": { "link_down_auth": {"required": False, "type": "str", "choices": ["set-unauth", "no-action"]}, "max_reauth_attempt": {"required": False, "type": "int"}, "reauth_period": {"required": False, "type": "int"} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_switch_controller(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_switch_controller(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
gpl-3.0
projectcalico/calico-nova
nova/tests/unit/objects/test_instance.py
2
61984
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import iso8601 import mock from mox3 import mox import netaddr from oslo.serialization import jsonutils from oslo.utils import timeutils from nova.cells import rpcapi as cells_rpcapi from nova.compute import flavors from nova import db from nova import exception from nova.network import model as network_model from nova import notifications from nova import objects from nova.objects import instance from nova.objects import instance_info_cache from nova.objects import pci_device from nova.objects import security_group from nova import test from nova.tests.unit.api.openstack import fakes from nova.tests.unit import fake_instance from nova.tests.unit.objects import test_instance_fault from nova.tests.unit.objects import test_instance_info_cache from nova.tests.unit.objects import test_instance_numa_topology from nova.tests.unit.objects import test_instance_pci_requests from nova.tests.unit.objects import test_objects from nova.tests.unit.objects import test_security_group from nova import utils class _TestInstanceObject(object): @property def fake_instance(self): fake_instance = fakes.stub_instance(id=2, access_ipv4='1.2.3.4', access_ipv6='::1') fake_instance['cell_name'] = 'api!child' fake_instance['scheduled_at'] = None fake_instance['terminated_at'] = None fake_instance['deleted_at'] = None fake_instance['created_at'] = None fake_instance['updated_at'] = None fake_instance['launched_at'] = ( fake_instance['launched_at'].replace( tzinfo=iso8601.iso8601.Utc(), microsecond=0)) fake_instance['deleted'] = False fake_instance['info_cache']['instance_uuid'] = fake_instance['uuid'] fake_instance['security_groups'] = [] fake_instance['pci_devices'] = [] fake_instance['user_id'] = self.context.user_id fake_instance['project_id'] = self.context.project_id fake_instance['tags'] = [] return fake_instance def test_datetime_deserialization(self): red_letter_date = timeutils.parse_isotime( timeutils.isotime(datetime.datetime(1955, 11, 5))) inst = instance.Instance(uuid='fake-uuid', launched_at=red_letter_date) primitive = inst.obj_to_primitive() expected = {'nova_object.name': 'Instance', 'nova_object.namespace': 'nova', 'nova_object.version': '1.17', 'nova_object.data': {'uuid': 'fake-uuid', 'launched_at': '1955-11-05T00:00:00Z'}, 'nova_object.changes': ['launched_at', 'uuid']} self.assertEqual(primitive, expected) inst2 = instance.Instance.obj_from_primitive(primitive) self.assertIsInstance(inst2.launched_at, datetime.datetime) self.assertEqual(inst2.launched_at, red_letter_date) def test_ip_deserialization(self): inst = instance.Instance(uuid='fake-uuid', access_ip_v4='1.2.3.4', access_ip_v6='::1') primitive = inst.obj_to_primitive() expected = {'nova_object.name': 'Instance', 'nova_object.namespace': 'nova', 'nova_object.version': '1.17', 'nova_object.data': {'uuid': 'fake-uuid', 'access_ip_v4': '1.2.3.4', 'access_ip_v6': '::1'}, 'nova_object.changes': ['uuid', 'access_ip_v6', 'access_ip_v4']} self.assertEqual(primitive, expected) inst2 = instance.Instance.obj_from_primitive(primitive) self.assertIsInstance(inst2.access_ip_v4, netaddr.IPAddress) self.assertIsInstance(inst2.access_ip_v6, netaddr.IPAddress) self.assertEqual(inst2.access_ip_v4, netaddr.IPAddress('1.2.3.4')) self.assertEqual(inst2.access_ip_v6, netaddr.IPAddress('::1')) def test_get_without_expected(self): self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, 'uuid', columns_to_join=[], use_slave=False ).AndReturn(self.fake_instance) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, 'uuid', expected_attrs=[]) for attr in instance.INSTANCE_OPTIONAL_ATTRS: self.assertFalse(inst.obj_attr_is_set(attr)) self.assertRemotes() def test_get_with_expected(self): self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids') self.mox.StubOutWithMock( db, 'instance_extra_get_by_instance_uuid') exp_cols = instance.INSTANCE_OPTIONAL_ATTRS[:] exp_cols.remove('fault') exp_cols.remove('numa_topology') exp_cols.remove('pci_requests') exp_cols.extend(['extra', 'extra.numa_topology', 'extra.pci_requests']) fake_topology = (test_instance_numa_topology. fake_db_topology['numa_topology']) fake_requests = jsonutils.dumps(test_instance_pci_requests. fake_pci_requests) fake_instance = dict(self.fake_instance, extra={ 'numa_topology': fake_topology, 'pci_requests': fake_requests, }) db.instance_get_by_uuid( self.context, 'uuid', columns_to_join=exp_cols, use_slave=False ).AndReturn(fake_instance) fake_faults = test_instance_fault.fake_faults db.instance_fault_get_by_instance_uuids( self.context, [fake_instance['uuid']] ).AndReturn(fake_faults) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid( self.context, 'uuid', expected_attrs=instance.INSTANCE_OPTIONAL_ATTRS) for attr in instance.INSTANCE_OPTIONAL_ATTRS: self.assertTrue(inst.obj_attr_is_set(attr)) self.assertRemotes() def test_get_by_id(self): self.mox.StubOutWithMock(db, 'instance_get') db.instance_get(self.context, 'instid', columns_to_join=['info_cache', 'security_groups'] ).AndReturn(self.fake_instance) self.mox.ReplayAll() inst = instance.Instance.get_by_id(self.context, 'instid') self.assertEqual(inst.uuid, self.fake_instance['uuid']) self.assertRemotes() def test_load(self): self.mox.StubOutWithMock(db, 'instance_get_by_uuid') fake_uuid = self.fake_instance['uuid'] db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(self.fake_instance) fake_inst2 = dict(self.fake_instance, system_metadata=[{'key': 'foo', 'value': 'bar'}]) db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['system_metadata'], use_slave=False ).AndReturn(fake_inst2) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) self.assertFalse(hasattr(inst, '_system_metadata')) sys_meta = inst.system_metadata self.assertEqual(sys_meta, {'foo': 'bar'}) self.assertTrue(hasattr(inst, '_system_metadata')) # Make sure we don't run load again sys_meta2 = inst.system_metadata self.assertEqual(sys_meta2, {'foo': 'bar'}) self.assertRemotes() def test_load_invalid(self): inst = instance.Instance(context=self.context, uuid='fake-uuid') self.assertRaises(exception.ObjectActionError, inst.obj_load_attr, 'foo') def test_get_remote(self): # isotime doesn't have microseconds and is always UTC self.mox.StubOutWithMock(db, 'instance_get_by_uuid') fake_instance = self.fake_instance db.instance_get_by_uuid(self.context, 'fake-uuid', columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_instance) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, 'fake-uuid') self.assertEqual(inst.id, fake_instance['id']) self.assertEqual(inst.launched_at, fake_instance['launched_at']) self.assertEqual(str(inst.access_ip_v4), fake_instance['access_ip_v4']) self.assertEqual(str(inst.access_ip_v6), fake_instance['access_ip_v6']) self.assertRemotes() def test_refresh(self): self.mox.StubOutWithMock(db, 'instance_get_by_uuid') fake_uuid = self.fake_instance['uuid'] db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(dict(self.fake_instance, host='orig-host')) db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(dict(self.fake_instance, host='new-host')) self.mox.StubOutWithMock(instance_info_cache.InstanceInfoCache, 'refresh') instance_info_cache.InstanceInfoCache.refresh() self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) self.assertEqual(inst.host, 'orig-host') inst.refresh() self.assertEqual(inst.host, 'new-host') self.assertRemotes() self.assertEqual(set([]), inst.obj_what_changed()) def test_refresh_does_not_recurse(self): inst = instance.Instance(context=self.context, uuid='fake-uuid', metadata={}) inst_copy = instance.Instance() inst_copy.uuid = inst.uuid self.mox.StubOutWithMock(instance.Instance, 'get_by_uuid') instance.Instance.get_by_uuid(self.context, uuid=inst.uuid, expected_attrs=['metadata'], use_slave=False ).AndReturn(inst_copy) self.mox.ReplayAll() self.assertRaises(exception.OrphanedObjectError, inst.refresh) def _save_test_helper(self, cell_type, save_kwargs): """Common code for testing save() for cells/non-cells.""" if cell_type: self.flags(enable=True, cell_type=cell_type, group='cells') else: self.flags(enable=False, group='cells') old_ref = dict(self.fake_instance, host='oldhost', user_data='old', vm_state='old', task_state='old') fake_uuid = old_ref['uuid'] expected_updates = dict(vm_state='meow', task_state='wuff', user_data='new') new_ref = dict(old_ref, host='newhost', **expected_updates) exp_vm_state = save_kwargs.get('expected_vm_state') exp_task_state = save_kwargs.get('expected_task_state') admin_reset = save_kwargs.get('admin_state_reset', False) if exp_vm_state: expected_updates['expected_vm_state'] = exp_vm_state if exp_task_state: if (exp_task_state == 'image_snapshot' and 'instance_version' in save_kwargs and save_kwargs['instance_version'] == '1.9'): expected_updates['expected_task_state'] = [ 'image_snapshot', 'image_snapshot_pending'] else: expected_updates['expected_task_state'] = exp_task_state self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') self.mox.StubOutWithMock(db, 'instance_info_cache_update') cells_api_mock = self.mox.CreateMock(cells_rpcapi.CellsAPI) self.mox.StubOutWithMock(cells_api_mock, 'instance_update_at_top') self.mox.StubOutWithMock(cells_api_mock, 'instance_update_from_api') self.mox.StubOutWithMock(cells_rpcapi, 'CellsAPI', use_mock_anything=True) self.mox.StubOutWithMock(notifications, 'send_update') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(old_ref) db.instance_update_and_get_original( self.context, fake_uuid, expected_updates, update_cells=False, columns_to_join=['info_cache', 'security_groups', 'system_metadata'] ).AndReturn((old_ref, new_ref)) if cell_type == 'api': cells_rpcapi.CellsAPI().AndReturn(cells_api_mock) cells_api_mock.instance_update_from_api( self.context, mox.IsA(instance.Instance), exp_vm_state, exp_task_state, admin_reset) elif cell_type == 'compute': cells_rpcapi.CellsAPI().AndReturn(cells_api_mock) cells_api_mock.instance_update_at_top(self.context, new_ref) notifications.send_update(self.context, mox.IgnoreArg(), mox.IgnoreArg()) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid']) if 'instance_version' in save_kwargs: inst.VERSION = save_kwargs.pop('instance_version') self.assertEqual('old', inst.task_state) self.assertEqual('old', inst.vm_state) self.assertEqual('old', inst.user_data) inst.vm_state = 'meow' inst.task_state = 'wuff' inst.user_data = 'new' inst.save(**save_kwargs) self.assertEqual('newhost', inst.host) self.assertEqual('meow', inst.vm_state) self.assertEqual('wuff', inst.task_state) self.assertEqual('new', inst.user_data) self.assertEqual(set([]), inst.obj_what_changed()) def test_save(self): self._save_test_helper(None, {}) def test_save_in_api_cell(self): self._save_test_helper('api', {}) def test_save_in_compute_cell(self): self._save_test_helper('compute', {}) def test_save_exp_vm_state(self): self._save_test_helper(None, {'expected_vm_state': ['meow']}) def test_save_exp_task_state(self): self._save_test_helper(None, {'expected_task_state': ['meow']}) def test_save_exp_task_state_havana(self): self._save_test_helper(None, { 'expected_task_state': 'image_snapshot', 'instance_version': '1.9'}) def test_save_exp_vm_state_api_cell(self): self._save_test_helper('api', {'expected_vm_state': ['meow']}) def test_save_exp_task_state_api_cell(self): self._save_test_helper('api', {'expected_task_state': ['meow']}) def test_save_exp_task_state_api_cell_admin_reset(self): self._save_test_helper('api', {'admin_state_reset': True}) def test_save_rename_sends_notification(self): # Tests that simply changing the 'display_name' on the instance # will send a notification. self.flags(enable=False, group='cells') old_ref = dict(self.fake_instance, display_name='hello') fake_uuid = old_ref['uuid'] expected_updates = dict(display_name='goodbye') new_ref = dict(old_ref, **expected_updates) self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') self.mox.StubOutWithMock(notifications, 'send_update') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(old_ref) db.instance_update_and_get_original( self.context, fake_uuid, expected_updates, update_cells=False, columns_to_join=['info_cache', 'security_groups', 'system_metadata'] ).AndReturn((old_ref, new_ref)) notifications.send_update(self.context, mox.IgnoreArg(), mox.IgnoreArg()) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid'], use_slave=False) self.assertEqual('hello', inst.display_name) inst.display_name = 'goodbye' inst.save() self.assertEqual('goodbye', inst.display_name) self.assertEqual(set([]), inst.obj_what_changed()) def test_save_related_object_if_none(self): with mock.patch.object(instance.Instance, '_save_pci_requests' ) as save_mock: inst = instance.Instance() inst = instance.Instance._from_db_object(self.context, inst, self.fake_instance) inst.pci_requests = None inst.save() self.assertTrue(save_mock.called) @mock.patch('nova.db.instance_update_and_get_original') @mock.patch('nova.objects.Instance._from_db_object') def test_save_does_not_refresh_pci_devices(self, mock_fdo, mock_update): # NOTE(danms): This tests that we don't update the pci_devices # field from the contents of the database. This is not because we # don't necessarily want to, but because the way pci_devices is # currently implemented it causes versioning issues. When that is # resolved, this test should go away. mock_update.return_value = None, None inst = instance.Instance(context=self.context, id=123) inst.uuid = 'foo' inst.pci_devices = pci_device.PciDeviceList() inst.save() self.assertNotIn('pci_devices', mock_fdo.call_args_list[0][1]['expected_attrs']) @mock.patch('nova.db.instance_extra_update_by_uuid') @mock.patch('nova.db.instance_update_and_get_original') @mock.patch('nova.objects.Instance._from_db_object') def test_save_updates_numa_topology(self, mock_fdo, mock_update, mock_extra_update): fake_obj_numa_topology = objects.InstanceNUMATopology(cells=[ objects.InstanceNUMACell(id=0, cpuset=set([0]), memory=128), objects.InstanceNUMACell(id=1, cpuset=set([1]), memory=128)]) fake_obj_numa_topology.instance_uuid = 'fake-uuid' jsonified = fake_obj_numa_topology._to_json() mock_update.return_value = None, None inst = instance.Instance( context=self.context, id=123, uuid='fake-uuid') inst.numa_topology = fake_obj_numa_topology inst.save() mock_extra_update.assert_called_once_with( self.context, inst.uuid, {'numa_topology': jsonified}) mock_extra_update.reset_mock() inst.numa_topology = None inst.save() mock_extra_update.assert_called_once_with( self.context, inst.uuid, {'numa_topology': None}) def test_get_deleted(self): fake_inst = dict(self.fake_instance, id=123, deleted=123) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) # NOTE(danms): Make sure it's actually a bool self.assertEqual(inst.deleted, True) def test_get_not_cleaned(self): fake_inst = dict(self.fake_instance, id=123, cleaned=None) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) # NOTE(mikal): Make sure it's actually a bool self.assertEqual(inst.cleaned, False) def test_get_cleaned(self): fake_inst = dict(self.fake_instance, id=123, cleaned=1) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) # NOTE(mikal): Make sure it's actually a bool self.assertEqual(inst.cleaned, True) def test_with_info_cache(self): fake_inst = dict(self.fake_instance) fake_uuid = fake_inst['uuid'] nwinfo1 = network_model.NetworkInfo.hydrate([{'address': 'foo'}]) nwinfo2 = network_model.NetworkInfo.hydrate([{'address': 'bar'}]) nwinfo1_json = nwinfo1.json() nwinfo2_json = nwinfo2.json() fake_inst['info_cache'] = dict( test_instance_info_cache.fake_info_cache, network_info=nwinfo1_json, instance_uuid=fake_uuid) self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') self.mox.StubOutWithMock(db, 'instance_info_cache_update') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) db.instance_info_cache_update(self.context, fake_uuid, {'network_info': nwinfo2_json}) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) self.assertEqual(inst.info_cache.network_info, nwinfo1) self.assertEqual(inst.info_cache.instance_uuid, fake_uuid) inst.info_cache.network_info = nwinfo2 inst.save() def test_with_info_cache_none(self): fake_inst = dict(self.fake_instance, info_cache=None) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid, ['info_cache']) self.assertIsNone(inst.info_cache) def test_with_security_groups(self): fake_inst = dict(self.fake_instance) fake_uuid = fake_inst['uuid'] fake_inst['security_groups'] = [ {'id': 1, 'name': 'secgroup1', 'description': 'fake-desc', 'user_id': 'fake-user', 'project_id': 'fake_project', 'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': False}, {'id': 2, 'name': 'secgroup2', 'description': 'fake-desc', 'user_id': 'fake-user', 'project_id': 'fake_project', 'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': False}, ] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') self.mox.StubOutWithMock(db, 'security_group_update') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) db.security_group_update(self.context, 1, {'description': 'changed'} ).AndReturn(fake_inst['security_groups'][0]) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) self.assertEqual(len(inst.security_groups), 2) for index, group in enumerate(fake_inst['security_groups']): for key in group: self.assertEqual(group[key], inst.security_groups[index][key]) self.assertIsInstance(inst.security_groups[index], security_group.SecurityGroup) self.assertEqual(inst.security_groups.obj_what_changed(), set()) inst.security_groups[0].description = 'changed' inst.save() self.assertEqual(inst.security_groups.obj_what_changed(), set()) def test_with_empty_security_groups(self): fake_inst = dict(self.fake_instance, security_groups=[]) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['info_cache', 'security_groups'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid) self.assertEqual(0, len(inst.security_groups)) def test_with_empty_pci_devices(self): fake_inst = dict(self.fake_instance, pci_devices=[]) fake_uuid = fake_inst['uuid'] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['pci_devices'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid, ['pci_devices']) self.assertEqual(len(inst.pci_devices), 0) def test_with_pci_devices(self): fake_inst = dict(self.fake_instance) fake_uuid = fake_inst['uuid'] fake_inst['pci_devices'] = [ {'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': None, 'id': 2, 'compute_node_id': 1, 'address': 'a1', 'vendor_id': 'v1', 'numa_node': 0, 'product_id': 'p1', 'dev_type': 't', 'status': 'allocated', 'dev_id': 'i', 'label': 'l', 'instance_uuid': fake_uuid, 'request_id': None, 'extra_info': '{}'}, { 'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': None, 'id': 1, 'compute_node_id': 1, 'address': 'a', 'vendor_id': 'v', 'numa_node': 1, 'product_id': 'p', 'dev_type': 't', 'status': 'allocated', 'dev_id': 'i', 'label': 'l', 'instance_uuid': fake_uuid, 'request_id': None, 'extra_info': '{}'}, ] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=['pci_devices'], use_slave=False ).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid, ['pci_devices']) self.assertEqual(len(inst.pci_devices), 2) self.assertEqual(inst.pci_devices[0].instance_uuid, fake_uuid) self.assertEqual(inst.pci_devices[1].instance_uuid, fake_uuid) def test_with_fault(self): fake_inst = dict(self.fake_instance) fake_uuid = fake_inst['uuid'] fake_faults = [dict(x, instance_uuid=fake_uuid) for x in test_instance_fault.fake_faults['fake-uuid']] self.mox.StubOutWithMock(db, 'instance_get_by_uuid') self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids') db.instance_get_by_uuid(self.context, fake_uuid, columns_to_join=[], use_slave=False ).AndReturn(self.fake_instance) db.instance_fault_get_by_instance_uuids( self.context, [fake_uuid]).AndReturn({fake_uuid: fake_faults}) self.mox.ReplayAll() inst = instance.Instance.get_by_uuid(self.context, fake_uuid, expected_attrs=['fault']) self.assertEqual(fake_faults[0], dict(inst.fault.items())) self.assertRemotes() def test_iteritems_with_extra_attrs(self): self.stubs.Set(instance.Instance, 'name', 'foo') inst = instance.Instance(uuid='fake-uuid') self.assertEqual(inst.items(), {'uuid': 'fake-uuid', 'name': 'foo', }.items()) def _test_metadata_change_tracking(self, which): inst = instance.Instance(uuid='fake-uuid') setattr(inst, which, {}) inst.obj_reset_changes() getattr(inst, which)['foo'] = 'bar' self.assertEqual(set([which]), inst.obj_what_changed()) inst.obj_reset_changes() self.assertEqual(set(), inst.obj_what_changed()) def test_metadata_change_tracking(self): self._test_metadata_change_tracking('metadata') def test_system_metadata_change_tracking(self): self._test_metadata_change_tracking('system_metadata') def test_create_stubbed(self): self.mox.StubOutWithMock(db, 'instance_create') vals = {'host': 'foo-host', 'memory_mb': 128, 'system_metadata': {'foo': 'bar'}, 'extra': {}} fake_inst = fake_instance.fake_db_instance(**vals) db.instance_create(self.context, vals).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance(context=self.context, host='foo-host', memory_mb=128, system_metadata={'foo': 'bar'}) inst.create() def test_create(self): self.mox.StubOutWithMock(db, 'instance_create') db.instance_create(self.context, {'extra': {}}).AndReturn( self.fake_instance) self.mox.ReplayAll() inst = instance.Instance(context=self.context) inst.create() self.assertEqual(self.fake_instance['id'], inst.id) def test_create_with_values(self): inst1 = instance.Instance(context=self.context, user_id=self.context.user_id, project_id=self.context.project_id, host='foo-host') inst1.create() self.assertEqual(inst1.host, 'foo-host') inst2 = instance.Instance.get_by_uuid(self.context, inst1.uuid) self.assertEqual(inst2.host, 'foo-host') def test_create_with_extras(self): inst = instance.Instance(context=self.context, uuid=self.fake_instance['uuid'], numa_topology=test_instance_numa_topology.fake_obj_numa_topology, pci_requests=objects.InstancePCIRequests( requests=[ objects.InstancePCIRequest(count=123, spec=[])])) inst.create() self.assertIsNotNone(inst.numa_topology) self.assertIsNotNone(inst.pci_requests) got_numa_topo = objects.InstanceNUMATopology.get_by_instance_uuid( self.context, inst.uuid) self.assertEqual(inst.numa_topology.instance_uuid, got_numa_topo.instance_uuid) got_pci_requests = objects.InstancePCIRequests.get_by_instance_uuid( self.context, inst.uuid) self.assertEqual(123, got_pci_requests.requests[0].count) def test_recreate_fails(self): inst = instance.Instance(context=self.context, user_id=self.context.user_id, project_id=self.context.project_id, host='foo-host') inst.create() self.assertRaises(exception.ObjectActionError, inst.create, self.context) def test_create_with_special_things(self): self.mox.StubOutWithMock(db, 'instance_create') fake_inst = fake_instance.fake_db_instance() db.instance_create(self.context, {'host': 'foo-host', 'security_groups': ['foo', 'bar'], 'info_cache': {'network_info': '[]'}, 'extra': {}, } ).AndReturn(fake_inst) self.mox.ReplayAll() secgroups = security_group.SecurityGroupList() secgroups.objects = [] for name in ('foo', 'bar'): secgroup = security_group.SecurityGroup() secgroup.name = name secgroups.objects.append(secgroup) info_cache = instance_info_cache.InstanceInfoCache() info_cache.network_info = network_model.NetworkInfo() inst = instance.Instance(context=self.context, host='foo-host', security_groups=secgroups, info_cache=info_cache) inst.create() def test_destroy_stubbed(self): self.mox.StubOutWithMock(db, 'instance_destroy') deleted_at = datetime.datetime(1955, 11, 6) fake_inst = fake_instance.fake_db_instance(deleted_at=deleted_at, deleted=True) db.instance_destroy(self.context, 'fake-uuid', constraint=None).AndReturn(fake_inst) self.mox.ReplayAll() inst = instance.Instance(context=self.context, id=1, uuid='fake-uuid', host='foo') inst.destroy() self.assertEqual(timeutils.normalize_time(inst.deleted_at), timeutils.normalize_time(deleted_at)) self.assertTrue(inst.deleted) def test_destroy(self): values = {'user_id': self.context.user_id, 'project_id': self.context.project_id} db_inst = db.instance_create(self.context, values) inst = instance.Instance(context=self.context, id=db_inst['id'], uuid=db_inst['uuid']) inst.destroy() self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid, self.context, db_inst['uuid']) def test_destroy_host_constraint(self): values = {'user_id': self.context.user_id, 'project_id': self.context.project_id, 'host': 'foo'} db_inst = db.instance_create(self.context, values) inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid']) inst.host = None self.assertRaises(exception.ObjectActionError, inst.destroy) def test_name_does_not_trigger_lazy_loads(self): values = {'user_id': self.context.user_id, 'project_id': self.context.project_id, 'host': 'foo'} db_inst = db.instance_create(self.context, values) inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid']) self.assertFalse(inst.obj_attr_is_set('fault')) self.flags(instance_name_template='foo-%(uuid)s') self.assertEqual('foo-%s' % db_inst['uuid'], inst.name) self.assertFalse(inst.obj_attr_is_set('fault')) def test_from_db_object_not_overwrite_info_cache(self): info_cache = instance_info_cache.InstanceInfoCache() inst = instance.Instance(context=self.context, info_cache=info_cache) db_inst = fake_instance.fake_db_instance() db_inst['info_cache'] = dict( test_instance_info_cache.fake_info_cache) inst._from_db_object(self.context, inst, db_inst, expected_attrs=['info_cache']) self.assertIs(info_cache, inst.info_cache) def test_compat_strings(self): unicode_attributes = ['user_id', 'project_id', 'image_ref', 'kernel_id', 'ramdisk_id', 'hostname', 'key_name', 'key_data', 'host', 'node', 'user_data', 'availability_zone', 'display_name', 'display_description', 'launched_on', 'locked_by', 'os_type', 'architecture', 'vm_mode', 'root_device_name', 'default_ephemeral_device', 'default_swap_device', 'config_drive', 'cell_name'] inst = instance.Instance() expected = {} for key in unicode_attributes: inst[key] = u'\u2603' expected[key] = '?' primitive = inst.obj_to_primitive(target_version='1.6') self.assertEqual(expected, primitive['nova_object.data']) self.assertEqual('1.6', primitive['nova_object.version']) def test_compat_pci_devices(self): inst = instance.Instance() inst.pci_devices = pci_device.PciDeviceList() primitive = inst.obj_to_primitive(target_version='1.5') self.assertNotIn('pci_devices', primitive) def test_compat_info_cache(self): inst = instance.Instance() inst.info_cache = instance_info_cache.InstanceInfoCache() primitive = inst.obj_to_primitive(target_version='1.9') self.assertEqual( '1.4', primitive['nova_object.data']['info_cache']['nova_object.version']) @mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid') def test_get_with_pci_requests(self, mock_get): mock_get.return_value = objects.InstancePCIRequests() db_instance = db.instance_create(self.context, { 'user_id': self.context.user_id, 'project_id': self.context.project_id}) instance = objects.Instance.get_by_uuid( self.context, db_instance['uuid'], expected_attrs=['pci_requests']) self.assertTrue(instance.obj_attr_is_set('pci_requests')) self.assertIsNotNone(instance.pci_requests) def _test_get_flavor(self, namespace): prefix = '%s_' % namespace if namespace is not None else '' db_inst = db.instance_create(self.context, { 'user_id': self.context.user_id, 'project_id': self.context.project_id, 'system_metadata': flavors.save_flavor_info( {}, flavors.get_default_flavor(), prefix)}) db_flavor = flavors.extract_flavor(db_inst, prefix) inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid']) flavor = inst.get_flavor(namespace) self.assertEqual(db_flavor['flavorid'], flavor.flavorid) def test_get_flavor(self): self._test_get_flavor(None) self._test_get_flavor('foo') def _test_set_flavor(self, namespace): prefix = '%s_' % namespace if namespace is not None else '' db_inst = db.instance_create(self.context, { 'user_id': self.context.user_id, 'project_id': self.context.project_id, }) inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid']) db_flavor = flavors.get_default_flavor() inst.set_flavor(db_flavor, namespace) db_inst = db.instance_get(self.context, db_inst['id']) self.assertEqual( db_flavor['flavorid'], flavors.extract_flavor( db_inst, prefix)['flavorid']) def test_set_flavor(self): self._test_set_flavor(None) self._test_set_flavor('foo') def test_delete_flavor(self): namespace = 'foo' prefix = '%s_' % namespace db_inst = db.instance_create(self.context, { 'user_id': self.context.user_id, 'project_id': self.context.project_id, 'system_metadata': flavors.save_flavor_info( {}, flavors.get_default_flavor(), prefix)}) inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid']) inst.delete_flavor(namespace) db_inst = db.instance_get(self.context, db_inst['id']) self.assertEqual({}, utils.instance_sys_meta(db_inst)) def test_delete_flavor_no_namespace_fails(self): inst = instance.Instance(system_metadata={}) self.assertRaises(KeyError, inst.delete_flavor, None) self.assertRaises(KeyError, inst.delete_flavor, '') @mock.patch.object(db, 'instance_metadata_delete') def test_delete_metadata_key(self, db_delete): inst = instance.Instance(context=self.context, id=1, uuid='fake-uuid') inst.metadata = {'foo': '1', 'bar': '2'} inst.obj_reset_changes() inst.delete_metadata_key('foo') self.assertEqual({'bar': '2'}, inst.metadata) self.assertEqual({}, inst.obj_get_changes()) db_delete.assert_called_once_with(self.context, inst.uuid, 'foo') def test_reset_changes(self): inst = instance.Instance() inst.metadata = {'1985': 'present'} inst.system_metadata = {'1955': 'past'} self.assertEqual({}, inst._orig_metadata) inst.obj_reset_changes(['metadata']) self.assertEqual({'1985': 'present'}, inst._orig_metadata) self.assertEqual({}, inst._orig_system_metadata) def test_load_generic_calls_handler(self): inst = instance.Instance(context=self.context, uuid='fake-uuid') with mock.patch.object(inst, '_load_generic') as mock_load: def fake_load(name): inst.system_metadata = {} mock_load.side_effect = fake_load inst.system_metadata mock_load.assert_called_once_with('system_metadata') def test_load_fault_calls_handler(self): inst = instance.Instance(context=self.context, uuid='fake-uuid') with mock.patch.object(inst, '_load_fault') as mock_load: def fake_load(): inst.fault = None mock_load.side_effect = fake_load inst.fault mock_load.assert_called_once_with() @mock.patch('nova.objects.Instance.get_by_uuid') def test_load_generic(self, mock_get): inst2 = instance.Instance(metadata={'foo': 'bar'}) mock_get.return_value = inst2 inst = instance.Instance(context=self.context, uuid='fake-uuid') inst.metadata self.assertEqual({'foo': 'bar'}, inst.metadata) mock_get.assert_called_once_with(self.context, uuid='fake-uuid', expected_attrs=['metadata']) self.assertNotIn('metadata', inst.obj_what_changed()) @mock.patch('nova.db.instance_fault_get_by_instance_uuids') def test_load_fault(self, mock_get): fake_fault = test_instance_fault.fake_faults['fake-uuid'][0] mock_get.return_value = {'fake': [fake_fault]} inst = instance.Instance(context=self.context, uuid='fake') fault = inst.fault mock_get.assert_called_once_with(self.context, ['fake']) self.assertEqual(fake_fault['id'], fault.id) self.assertNotIn('metadata', inst.obj_what_changed()) def test_get_with_extras(self): pci_requests = objects.InstancePCIRequests(requests=[ objects.InstancePCIRequest(count=123, spec=[])]) inst = objects.Instance(context=self.context, user_id=self.context.user_id, project_id=self.context.project_id, pci_requests=pci_requests) inst.create() uuid = inst.uuid inst = objects.Instance.get_by_uuid(self.context, uuid) self.assertFalse(inst.obj_attr_is_set('pci_requests')) inst = objects.Instance.get_by_uuid( self.context, uuid, expected_attrs=['pci_requests']) self.assertTrue(inst.obj_attr_is_set('pci_requests')) class TestInstanceObject(test_objects._LocalTest, _TestInstanceObject): pass class TestRemoteInstanceObject(test_objects._RemoteTest, _TestInstanceObject): pass class _TestInstanceListObject(object): def fake_instance(self, id, updates=None): fake_instance = fakes.stub_instance(id=2, access_ipv4='1.2.3.4', access_ipv6='::1') fake_instance['scheduled_at'] = None fake_instance['terminated_at'] = None fake_instance['deleted_at'] = None fake_instance['created_at'] = None fake_instance['updated_at'] = None fake_instance['launched_at'] = ( fake_instance['launched_at'].replace( tzinfo=iso8601.iso8601.Utc(), microsecond=0)) fake_instance['info_cache'] = {'network_info': '[]', 'instance_uuid': fake_instance['uuid']} fake_instance['security_groups'] = [] fake_instance['deleted'] = 0 if updates: fake_instance.update(updates) return fake_instance def test_get_all_by_filters(self): fakes = [self.fake_instance(1), self.fake_instance(2)] self.mox.StubOutWithMock(db, 'instance_get_all_by_filters') db.instance_get_all_by_filters(self.context, {'foo': 'bar'}, 'uuid', 'asc', limit=None, marker=None, columns_to_join=['metadata'], use_slave=False).AndReturn(fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_filters( self.context, {'foo': 'bar'}, 'uuid', 'asc', expected_attrs=['metadata'], use_slave=False) for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertRemotes() def test_get_all_by_filters_sorted(self): fakes = [self.fake_instance(1), self.fake_instance(2)] self.mox.StubOutWithMock(db, 'instance_get_all_by_filters_sort') db.instance_get_all_by_filters_sort(self.context, {'foo': 'bar'}, limit=None, marker=None, columns_to_join=['metadata'], use_slave=False, sort_keys=['uuid'], sort_dirs=['asc']).AndReturn(fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_filters( self.context, {'foo': 'bar'}, expected_attrs=['metadata'], use_slave=False, sort_keys=['uuid'], sort_dirs=['asc']) for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertRemotes() @mock.patch.object(db, 'instance_get_all_by_filters_sort') @mock.patch.object(db, 'instance_get_all_by_filters') def test_get_all_by_filters_calls_non_sort(self, mock_get_by_filters, mock_get_by_filters_sort): '''Verifies InstanceList.get_by_filters calls correct DB function.''' # Single sort key/direction is set, call non-sorted DB function instance.InstanceList.get_by_filters( self.context, {'foo': 'bar'}, sort_key='key', sort_dir='dir', limit=100, marker='uuid', use_slave=True) mock_get_by_filters.assert_called_once_with( self.context, {'foo': 'bar'}, 'key', 'dir', limit=100, marker='uuid', columns_to_join=None, use_slave=True) self.assertEqual(0, mock_get_by_filters_sort.call_count) @mock.patch.object(db, 'instance_get_all_by_filters_sort') @mock.patch.object(db, 'instance_get_all_by_filters') def test_get_all_by_filters_calls_sort(self, mock_get_by_filters, mock_get_by_filters_sort): '''Verifies InstanceList.get_by_filters calls correct DB function.''' # Multiple sort keys/directions are set, call sorted DB function instance.InstanceList.get_by_filters( self.context, {'foo': 'bar'}, limit=100, marker='uuid', use_slave=True, sort_keys=['key1', 'key2'], sort_dirs=['dir1', 'dir2']) mock_get_by_filters_sort.assert_called_once_with( self.context, {'foo': 'bar'}, limit=100, marker='uuid', columns_to_join=None, use_slave=True, sort_keys=['key1', 'key2'], sort_dirs=['dir1', 'dir2']) self.assertEqual(0, mock_get_by_filters.call_count) def test_get_all_by_filters_works_for_cleaned(self): fakes = [self.fake_instance(1), self.fake_instance(2, updates={'deleted': 2, 'cleaned': None})] self.context.read_deleted = 'yes' self.mox.StubOutWithMock(db, 'instance_get_all_by_filters') db.instance_get_all_by_filters(self.context, {'deleted': True, 'cleaned': False}, 'uuid', 'asc', limit=None, marker=None, columns_to_join=['metadata'], use_slave=False).AndReturn( [fakes[1]]) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_filters( self.context, {'deleted': True, 'cleaned': False}, 'uuid', 'asc', expected_attrs=['metadata'], use_slave=False) self.assertEqual(1, len(inst_list)) self.assertIsInstance(inst_list.objects[0], instance.Instance) self.assertEqual(inst_list.objects[0].uuid, fakes[1]['uuid']) self.assertRemotes() def test_get_by_host(self): fakes = [self.fake_instance(1), self.fake_instance(2)] self.mox.StubOutWithMock(db, 'instance_get_all_by_host') db.instance_get_all_by_host(self.context, 'foo', columns_to_join=None, use_slave=False).AndReturn(fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_host(self.context, 'foo') for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertEqual(inst_list.objects[i]._context, self.context) self.assertEqual(inst_list.obj_what_changed(), set()) self.assertRemotes() def test_get_by_host_and_node(self): fakes = [self.fake_instance(1), self.fake_instance(2)] self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_node') db.instance_get_all_by_host_and_node(self.context, 'foo', 'bar', columns_to_join=None).AndReturn( fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_host_and_node(self.context, 'foo', 'bar') for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertRemotes() def test_get_by_host_and_not_type(self): fakes = [self.fake_instance(1), self.fake_instance(2)] self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_not_type') db.instance_get_all_by_host_and_not_type(self.context, 'foo', type_id='bar').AndReturn( fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_by_host_and_not_type( self.context, 'foo', 'bar') for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertRemotes() def test_get_hung_in_rebooting(self): fakes = [self.fake_instance(1), self.fake_instance(2)] dt = timeutils.isotime() self.mox.StubOutWithMock(db, 'instance_get_all_hung_in_rebooting') db.instance_get_all_hung_in_rebooting(self.context, dt).AndReturn( fakes) self.mox.ReplayAll() inst_list = instance.InstanceList.get_hung_in_rebooting(self.context, dt) for i in range(0, len(fakes)): self.assertIsInstance(inst_list.objects[i], instance.Instance) self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid']) self.assertRemotes() def test_get_active_by_window_joined(self): fakes = [self.fake_instance(1), self.fake_instance(2)] # NOTE(mriedem): Send in a timezone-naive datetime since the # InstanceList.get_active_by_window_joined method should convert it # to tz-aware for the DB API call, which we'll assert with our stub. dt = timeutils.utcnow() def fake_instance_get_active_by_window_joined(context, begin, end, project_id, host, columns_to_join): # make sure begin is tz-aware self.assertIsNotNone(begin.utcoffset()) self.assertIsNone(end) self.assertEqual(['metadata'], columns_to_join) return fakes with mock.patch.object(db, 'instance_get_active_by_window_joined', fake_instance_get_active_by_window_joined): inst_list = instance.InstanceList.get_active_by_window_joined( self.context, dt, expected_attrs=['metadata']) for fake, obj in zip(fakes, inst_list.objects): self.assertIsInstance(obj, instance.Instance) self.assertEqual(obj.uuid, fake['uuid']) self.assertRemotes() def test_with_fault(self): fake_insts = [ fake_instance.fake_db_instance(uuid='fake-uuid', host='host'), fake_instance.fake_db_instance(uuid='fake-inst2', host='host'), ] fake_faults = test_instance_fault.fake_faults self.mox.StubOutWithMock(db, 'instance_get_all_by_host') self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids') db.instance_get_all_by_host(self.context, 'host', columns_to_join=[], use_slave=False ).AndReturn(fake_insts) db.instance_fault_get_by_instance_uuids( self.context, [x['uuid'] for x in fake_insts] ).AndReturn(fake_faults) self.mox.ReplayAll() instances = instance.InstanceList.get_by_host(self.context, 'host', expected_attrs=['fault'], use_slave=False) self.assertEqual(2, len(instances)) self.assertEqual(fake_faults['fake-uuid'][0], dict(instances[0].fault.iteritems())) self.assertIsNone(instances[1].fault) def test_fill_faults(self): self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids') inst1 = instance.Instance(uuid='uuid1') inst2 = instance.Instance(uuid='uuid2') insts = [inst1, inst2] for inst in insts: inst.obj_reset_changes() db_faults = { 'uuid1': [{'id': 123, 'instance_uuid': 'uuid1', 'code': 456, 'message': 'Fake message', 'details': 'No details', 'host': 'foo', 'deleted': False, 'deleted_at': None, 'updated_at': None, 'created_at': None, } ]} db.instance_fault_get_by_instance_uuids(self.context, [x.uuid for x in insts], ).AndReturn(db_faults) self.mox.ReplayAll() inst_list = instance.InstanceList() inst_list._context = self.context inst_list.objects = insts faulty = inst_list.fill_faults() self.assertEqual(faulty, ['uuid1']) self.assertEqual(inst_list[0].fault.message, db_faults['uuid1'][0]['message']) self.assertIsNone(inst_list[1].fault) for inst in inst_list: self.assertEqual(inst.obj_what_changed(), set()) def test_get_by_security_group(self): fake_secgroup = dict(test_security_group.fake_secgroup) fake_secgroup['instances'] = [ fake_instance.fake_db_instance(id=1, system_metadata={'foo': 'bar'}), fake_instance.fake_db_instance(id=2), ] with mock.patch.object(db, 'security_group_get') as sgg: sgg.return_value = fake_secgroup secgroup = security_group.SecurityGroup() secgroup.id = fake_secgroup['id'] instances = instance.InstanceList.get_by_security_group( self.context, secgroup) self.assertEqual(2, len(instances)) self.assertEqual([1, 2], [x.id for x in instances]) self.assertTrue(instances[0].obj_attr_is_set('system_metadata')) self.assertEqual({'foo': 'bar'}, instances[0].system_metadata) class TestInstanceListObject(test_objects._LocalTest, _TestInstanceListObject): pass class TestRemoteInstanceListObject(test_objects._RemoteTest, _TestInstanceListObject): pass class TestInstanceObjectMisc(test.NoDBTestCase): def test_expected_cols(self): self.stubs.Set(instance, '_INSTANCE_OPTIONAL_JOINED_FIELDS', ['bar']) self.assertEqual(['bar'], instance._expected_cols(['foo', 'bar'])) self.assertIsNone(instance._expected_cols(None)) def test_expected_cols_extra(self): self.assertEqual(['metadata', 'extra', 'extra.numa_topology'], instance._expected_cols(['metadata', 'numa_topology']))
apache-2.0
rossoldfield/kms
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
12980
5411
# SchedGui.py - Python extension for perf script, basic GUI code for # traces drawing and overview. # # Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com> # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. try: import wx except ImportError: raise ImportError, "You need to install the wxpython lib for this script" class RootFrame(wx.Frame): Y_OFFSET = 100 RECT_HEIGHT = 100 RECT_SPACE = 50 EVENT_MARKING_WIDTH = 5 def __init__(self, sched_tracer, title, parent = None, id = -1): wx.Frame.__init__(self, parent, id, title) (self.screen_width, self.screen_height) = wx.GetDisplaySize() self.screen_width -= 10 self.screen_height -= 10 self.zoom = 0.5 self.scroll_scale = 20 self.sched_tracer = sched_tracer self.sched_tracer.set_root_win(self) (self.ts_start, self.ts_end) = sched_tracer.interval() self.update_width_virtual() self.nr_rects = sched_tracer.nr_rectangles() + 1 self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) # whole window panel self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height)) # scrollable container self.scroll = wx.ScrolledWindow(self.panel) self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale) self.scroll.EnableScrolling(True, True) self.scroll.SetFocus() # scrollable drawing area self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2)) self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint) self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Bind(wx.EVT_PAINT, self.on_paint) self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Fit() self.Fit() self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING) self.txt = None self.Show(True) def us_to_px(self, val): return val / (10 ** 3) * self.zoom def px_to_us(self, val): return (val / self.zoom) * (10 ** 3) def scroll_start(self): (x, y) = self.scroll.GetViewStart() return (x * self.scroll_scale, y * self.scroll_scale) def scroll_start_us(self): (x, y) = self.scroll_start() return self.px_to_us(x) def paint_rectangle_zone(self, nr, color, top_color, start, end): offset_px = self.us_to_px(start - self.ts_start) width_px = self.us_to_px(end - self.ts_start) offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) width_py = RootFrame.RECT_HEIGHT dc = self.dc if top_color is not None: (r, g, b) = top_color top_color = wx.Colour(r, g, b) brush = wx.Brush(top_color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH) width_py -= RootFrame.EVENT_MARKING_WIDTH offset_py += RootFrame.EVENT_MARKING_WIDTH (r ,g, b) = color color = wx.Colour(r, g, b) brush = wx.Brush(color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, width_py) def update_rectangles(self, dc, start, end): start += self.ts_start end += self.ts_start self.sched_tracer.fill_zone(start, end) def on_paint(self, event): dc = wx.PaintDC(self.scroll_panel) self.dc = dc width = min(self.width_virtual, self.screen_width) (x, y) = self.scroll_start() start = self.px_to_us(x) end = self.px_to_us(x + width) self.update_rectangles(dc, start, end) def rect_from_ypixel(self, y): y -= RootFrame.Y_OFFSET rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT: return -1 return rect def update_summary(self, txt): if self.txt: self.txt.Destroy() self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50)) def on_mouse_down(self, event): (x, y) = event.GetPositionTuple() rect = self.rect_from_ypixel(y) if rect == -1: return t = self.px_to_us(x) + self.ts_start self.sched_tracer.mouse_down(rect, t) def update_width_virtual(self): self.width_virtual = self.us_to_px(self.ts_end - self.ts_start) def __zoom(self, x): self.update_width_virtual() (xpos, ypos) = self.scroll.GetViewStart() xpos = self.us_to_px(x) / self.scroll_scale self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos) self.Refresh() def zoom_in(self): x = self.scroll_start_us() self.zoom *= 2 self.__zoom(x) def zoom_out(self): x = self.scroll_start_us() self.zoom /= 2 self.__zoom(x) def on_key_press(self, event): key = event.GetRawKeyCode() if key == ord("+"): self.zoom_in() return if key == ord("-"): self.zoom_out() return key = event.GetKeyCode() (x, y) = self.scroll.GetViewStart() if key == wx.WXK_RIGHT: self.scroll.Scroll(x + 1, y) elif key == wx.WXK_LEFT: self.scroll.Scroll(x - 1, y) elif key == wx.WXK_DOWN: self.scroll.Scroll(x, y + 1) elif key == wx.WXK_UP: self.scroll.Scroll(x, y - 1)
gpl-2.0
sjsucohort6/openstack
python/venv/lib/python2.7/site-packages/neutronclient/neutron/v2_0/network.py
2
6507
# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import argparse from neutronclient.common import exceptions from neutronclient.common import utils from neutronclient.i18n import _ from neutronclient.neutron import v2_0 as neutronV20 from neutronclient.neutron.v2_0.qos import policy as qos_policy def _format_subnets(network): try: return '\n'.join([' '.join([s['id'], s.get('cidr', '')]) for s in network['subnets']]) except (TypeError, KeyError): return '' class ListNetwork(neutronV20.ListCommand): """List networks that belong to a given tenant.""" # Length of a query filter on subnet id # id=<uuid>& (with len(uuid)=36) subnet_id_filter_len = 40 resource = 'network' _formatters = {'subnets': _format_subnets, } list_columns = ['id', 'name', 'subnets'] pagination_support = True sorting_support = True def extend_list(self, data, parsed_args): """Add subnet information to a network list.""" neutron_client = self.get_client() search_opts = {'fields': ['id', 'cidr']} if self.pagination_support: page_size = parsed_args.page_size if page_size: search_opts.update({'limit': page_size}) subnet_ids = [] for n in data: if 'subnets' in n: subnet_ids.extend(n['subnets']) def _get_subnet_list(sub_ids): search_opts['id'] = sub_ids return neutron_client.list_subnets( **search_opts).get('subnets', []) try: subnets = _get_subnet_list(subnet_ids) except exceptions.RequestURITooLong as uri_len_exc: # The URI is too long because of too many subnet_id filters # Use the excess attribute of the exception to know how many # subnet_id filters can be inserted into a single request subnet_count = len(subnet_ids) max_size = ((self.subnet_id_filter_len * subnet_count) - uri_len_exc.excess) chunk_size = max_size // self.subnet_id_filter_len subnets = [] for i in range(0, subnet_count, chunk_size): subnets.extend( _get_subnet_list(subnet_ids[i: i + chunk_size])) subnet_dict = dict([(s['id'], s) for s in subnets]) for n in data: if 'subnets' in n: n['subnets'] = [(subnet_dict.get(s) or {"id": s}) for s in n['subnets']] class ListExternalNetwork(ListNetwork): """List external networks that belong to a given tenant.""" pagination_support = True sorting_support = True def retrieve_list(self, parsed_args): external = '--router:external=True' if external not in self.values_specs: self.values_specs.append('--router:external=True') return super(ListExternalNetwork, self).retrieve_list(parsed_args) class ShowNetwork(neutronV20.ShowCommand): """Show information of a given network.""" resource = 'network' class CreateNetwork(neutronV20.CreateCommand, qos_policy.CreateQosPolicyMixin): """Create a network for a given tenant.""" resource = 'network' def add_known_arguments(self, parser): parser.add_argument( '--admin-state-down', dest='admin_state', action='store_false', help=_('Set admin state up to false.')) parser.add_argument( '--admin_state_down', dest='admin_state', action='store_false', help=argparse.SUPPRESS) parser.add_argument( '--shared', action='store_true', help=_('Set the network as shared.'), default=argparse.SUPPRESS) parser.add_argument( '--provider:network_type', metavar='<network_type>', help=_('The physical mechanism by which the virtual network' ' is implemented.')) parser.add_argument( '--provider:physical_network', metavar='<physical_network_name>', help=_('Name of the physical network over which the virtual' ' network is implemented.')) parser.add_argument( '--provider:segmentation_id', metavar='<segmentation_id>', help=_('VLAN ID for VLAN networks or tunnel-id for GRE/VXLAN' ' networks.')) utils.add_boolean_argument( parser, '--vlan-transparent', default=argparse.SUPPRESS, help=_('Create a vlan transparent network.')) parser.add_argument( 'name', metavar='NAME', help=_('Name of network to create.')) self.add_arguments_qos_policy(parser) def args2body(self, parsed_args): body = {'network': { 'name': parsed_args.name, 'admin_state_up': parsed_args.admin_state}, } neutronV20.update_dict(parsed_args, body['network'], ['shared', 'tenant_id', 'vlan_transparent', 'provider:network_type', 'provider:physical_network', 'provider:segmentation_id']) self.args2body_qos_policy(parsed_args, body['network']) return body class DeleteNetwork(neutronV20.DeleteCommand): """Delete a given network.""" resource = 'network' class UpdateNetwork(neutronV20.UpdateCommand, qos_policy.UpdateQosPolicyMixin): """Update network's information.""" resource = 'network' def add_known_arguments(self, parser): self.add_arguments_qos_policy(parser) def args2body(self, parsed_args): body = {'network': {}} self.args2body_qos_policy(parsed_args, body['network']) return body
mit
msabramo/ansible
lib/ansible/modules/cloud/ovirt/ovirt_vms.py
26
46681
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_vms short_description: "Module to manage Virtual Machines in oVirt" version_added: "2.2" author: "Ondra Machacek (@machacekondra)" description: - "This module manages whole lifecycle of the Virtual Machine(VM) in oVirt. Since VM can hold many states in oVirt, this see notes to see how the states of the VM are handled." options: name: description: - "Name of the the Virtual Machine to manage. If VM don't exists C(name) is required. Otherwise C(id) or C(name) can be used." id: description: - "ID of the the Virtual Machine to manage." state: description: - "Should the Virtual Machine be running/stopped/present/absent/suspended/next_run." - "I(present) and I(running) are equal states." - "I(next_run) state updates the VM and if the VM has next run configuration it will be rebooted." - "Please check I(notes) to more detailed description of states." choices: ['running', 'stopped', 'present', 'absent', 'suspended', 'next_run'] default: present cluster: description: - "Name of the cluster, where Virtual Machine should be created. Required if creating VM." template: description: - "Name of the template, which should be used to create Virtual Machine. Required if creating VM." - "If template is not specified and VM doesn't exist, VM will be created from I(Blank) template." template_version: description: - "Version number of the template to be used for VM." - "By default the latest available version of the template is used." version_added: "2.3" use_latest_template_version: description: - "Specify if latest template version should be used, when running a stateless VM." - "If this parameter is set to I(true) stateless VM is created." version_added: "2.3" memory: description: - "Amount of memory of the Virtual Machine. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB)." - "Default value is set by engine." memory_guaranteed: description: - "Amount of minimal guaranteed memory of the Virtual Machine. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB)." - "C(memory_guaranteed) parameter can't be lower than C(memory) parameter. Default value is set by engine." cpu_shares: description: - "Set a CPU shares for this Virtual Machine. Default value is set by oVirt engine." cpu_cores: description: - "Number of virtual CPUs cores of the Virtual Machine. Default value is set by oVirt engine." cpu_sockets: description: - "Number of virtual CPUs sockets of the Virtual Machine. Default value is set by oVirt engine." type: description: - "Type of the Virtual Machine. Default value is set by oVirt engine." choices: [server, desktop] operating_system: description: - "Operating system of the Virtual Machine. Default value is set by oVirt engine." choices: [ rhel_6_ppc64, other, freebsd, windows_2003x64, windows_10, rhel_6x64, rhel_4x64, windows_2008x64, windows_2008R2x64, debian_7, windows_2012x64, ubuntu_14_04, ubuntu_12_04, ubuntu_13_10, windows_8x64, other_linux_ppc64, windows_2003, other_linux, windows_10x64, windows_2008, rhel_3, rhel_5, rhel_4, other_ppc64, sles_11, rhel_6, windows_xp, rhel_7x64, freebsdx64, rhel_7_ppc64, windows_7, rhel_5x64, ubuntu_14_04_ppc64, sles_11_ppc64, windows_8, windows_2012R2x64, windows_2008r2x64, ubuntu_13_04, ubuntu_12_10, windows_7x64 ] boot_devices: description: - "List of boot devices which should be used to boot. Choices I(network), I(hd) and I(cdrom)." - "For example: ['cdrom', 'hd']. Default value is set by oVirt engine." host: description: - "Specify host where Virtual Machine should be running. By default the host is chosen by engine scheduler." - "This parameter is used only when C(state) is I(running) or I(present)." high_availability: description: - "If I(True) Virtual Machine will be set as highly available." - "If I(False) Virtual Machine won't be set as highly available." - "If no value is passed, default value is set by oVirt engine." delete_protected: description: - "If I(True) Virtual Machine will be set as delete protected." - "If I(False) Virtual Machine won't be set as delete protected." - "If no value is passed, default value is set by oVirt engine." stateless: description: - "If I(True) Virtual Machine will be set as stateless." - "If I(False) Virtual Machine will be unset as stateless." - "If no value is passed, default value is set by oVirt engine." clone: description: - "If I(True) then the disks of the created virtual machine will be cloned and independent of the template." - "This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before." default: False clone_permissions: description: - "If I(True) then the permissions of the template (only the direct ones, not the inherited ones) will be copied to the created virtual machine." - "This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before." default: False cd_iso: description: - "ISO file from ISO storage domain which should be attached to Virtual Machine." - "If you pass empty string the CD will be ejected from VM." - "If used with C(state) I(running) or I(present) and VM is running the CD will be attached to VM." - "If used with C(state) I(running) or I(present) and VM is down the CD will be attached to VM persistently." force: description: - "Please check to I(Synopsis) to more detailed description of force parameter, it can behave differently in different situations." default: False nics: description: - "List of NICs, which should be attached to Virtual Machine. NIC is described by following dictionary:" - "C(name) - Name of the NIC." - "C(profile_name) - Profile name where NIC should be attached." - "C(interface) - Type of the network interface. One of following: I(virtio), I(e1000), I(rtl8139), default is I(virtio)." - "C(mac_address) - Custom MAC address of the network interface, by default it's obtained from MAC pool." - "C(Note:)" - "This parameter is used only when C(state) is I(running) or I(present) and is able to only create NICs. To manage NICs of the VM in more depth please use M(ovirt_nics) module instead." disks: description: - "List of disks, which should be attached to Virtual Machine. Disk is described by following dictionary:" - "C(name) - Name of the disk. Either C(name) or C(id) is reuqired." - "C(id) - ID of the disk. Either C(name) or C(id) is reuqired." - "C(interface) - Interface of the disk, either I(virtio) or I(IDE), default is I(virtio)." - "C(bootable) - I(True) if the disk should be bootable, default is non bootable." - "C(activate) - I(True) if the disk should be activated, default is activated." - "C(Note:)" - "This parameter is used only when C(state) is I(running) or I(present) and is able to only attach disks. To manage disks of the VM in more depth please use M(ovirt_disks) module instead." sysprep: description: - "Dictionary with values for Windows Virtual Machine initialization using sysprep:" - "C(host_name) - Hostname to be set to Virtual Machine when deployed." - "C(active_directory_ou) - Active Directory Organizational Unit, to be used for login of user." - "C(org_name) - Organization name to be set to Windows Virtual Machine." - "C(domain) - Domain to be set to Windows Virtual Machine." - "C(timezone) - Timezone to be set to Windows Virtual Machine." - "C(ui_language) - UI language of the Windows Virtual Machine." - "C(system_locale) - System localization of the Windows Virtual Machine." - "C(input_locale) - Input localization of the Windows Virtual Machine." - "C(windows_license_key) - License key to be set to Windows Virtual Machine." - "C(user_name) - Username to be used for set password to Windows Virtual Machine." - "C(root_password) - Password to be set for username to Windows Virtual Machine." cloud_init: description: - "Dictionary with values for Unix-like Virtual Machine initialization using cloud init:" - "C(host_name) - Hostname to be set to Virtual Machine when deployed." - "C(timezone) - Timezone to be set to Virtual Machine when deployed." - "C(user_name) - Username to be used to set password to Virtual Machine when deployed." - "C(root_password) - Password to be set for user specified by C(user_name) parameter." - "C(authorized_ssh_keys) - Use this SSH keys to login to Virtual Machine." - "C(regenerate_ssh_keys) - If I(True) SSH keys will be regenerated on Virtual Machine." - "C(custom_script) - Cloud-init script which will be executed on Virtual Machine when deployed." - "C(dns_servers) - DNS servers to be configured on Virtual Machine." - "C(dns_search) - DNS search domains to be configured on Virtual Machine." - "C(nic_boot_protocol) - Set boot protocol of the network interface of Virtual Machine. Can be one of none, dhcp or static." - "C(nic_ip_address) - If boot protocol is static, set this IP address to network interface of Virtual Machine." - "C(nic_netmask) - If boot protocol is static, set this netmask to network interface of Virtual Machine." - "C(nic_gateway) - If boot protocol is static, set this gateway to network interface of Virtual Machine." - "C(nic_name) - Set name to network interface of Virtual Machine." - "C(nic_on_boot) - If I(True) network interface will be set to start on boot." cloud_init_nics: description: - "List of dictionaries representing network interafaces to be setup by cloud init." - "This option is used, when user needs to setup more network interfaces via cloud init." - "If one network interface is enough, user should use C(cloud_init) I(nic_*) parameters. C(cloud_init) I(nic_*) parameters are merged with C(cloud_init_nics) parameters." - "Dictionary can contain following values:" - "C(nic_boot_protocol) - Set boot protocol of the network interface of Virtual Machine. Can be one of none, dhcp or static." - "C(nic_ip_address) - If boot protocol is static, set this IP address to network interface of Virtual Machine." - "C(nic_netmask) - If boot protocol is static, set this netmask to network interface of Virtual Machine." - "C(nic_gateway) - If boot protocol is static, set this gateway to network interface of Virtual Machine." - "C(nic_name) - Set name to network interface of Virtual Machine." - "C(nic_on_boot) - If I(True) network interface will be set to start on boot." version_added: "2.3" kernel_path: description: - "Path to a kernel image used to boot the virtual machine." - "Kernel image must be stored on either the ISO domain or on the host's storage." version_added: "2.3" initrd_path: description: - "Path to an initial ramdisk to be used with the kernel specified by C(kernel_path) option." - "Ramdisk image must be stored on either the ISO domain or on the host's storage." version_added: "2.3" kernel_params: description: - "Kernel command line parameters (formatted as string) to be used with the kernel specified by C(kernel_path) option." version_added: "2.3" instance_type: description: - "Name of virtual machine's hardware configuration." - "By default no instance type is used." version_added: "2.3" description: description: - "Description of the Virtual Machine." version_added: "2.3" comment: description: - "Comment of the Virtual Machine." version_added: "2.3" timezone: description: - "Sets time zone offset of the guest hardware clock." - "For example: Etc/GMT" version_added: "2.3" serial_policy: description: - "Specify a serial number policy for the Virtual Machine." - "Following options are supported:" - "C(vm) - Sets the Virtual Machine's UUID as its serial number." - "C(host) - Sets the host's UUID as the Virtual Machine's serial number." - "C(custom) - Allows you to specify a custom serial number in C(serial_policy_value)." version_added: "2.3" serial_policy_value: description: - "Allows you to specify a custom serial number." - "This parameter is used only when C(serial_policy) is I(custom)." version_added: "2.3" notes: - "If VM is in I(UNASSIGNED) or I(UNKNOWN) state before any operation, the module will fail. If VM is in I(IMAGE_LOCKED) state before any operation, we try to wait for VM to be I(DOWN). If VM is in I(SAVING_STATE) state before any operation, we try to wait for VM to be I(SUSPENDED). If VM is in I(POWERING_DOWN) state before any operation, we try to wait for VM to be I(UP) or I(DOWN). VM can get into I(UP) state from I(POWERING_DOWN) state, when there is no ACPI or guest agent running inside VM, or if the shutdown operation fails. When user specify I(UP) C(state), we always wait to VM to be in I(UP) state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). In other states we run start operation on VM. When user specify I(stopped) C(state), and If user pass C(force) parameter set to I(true) we forcibly stop the VM in any state. If user don't pass C(force) parameter, we always wait to VM to be in UP state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or I(SUSPENDED) state, we start the VM. Then we gracefully shutdown the VM. When user specify I(suspended) C(state), we always wait to VM to be in UP state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or I(DOWN) state, we start the VM. Then we suspend the VM. When user specify I(absent) C(state), we forcibly stop the VM in any state and remove it." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Creates a new Virtual Machine from template named 'rhel7_template' ovirt_vms: state: present name: myvm template: rhel7_template # Creates a stateless VM which will always use latest template version: ovirt_vms: name: myvm template: rhel7 cluster: mycluster use_latest_template_version: true # Creates a new server rhel7 Virtual Machine from Blank template # on brq01 cluster with 2GiB memory and 2 vcpu cores/sockets # and attach bootable disk with name rhel7_disk and attach virtio NIC ovirt_vms: state: present cluster: brq01 name: myvm memory: 2GiB cpu_cores: 2 cpu_sockets: 2 cpu_shares: 1024 type: server operating_system: rhel_7x64 disks: - name: rhel7_disk bootable: True nics: - name: nic1 # Run VM with cloud init: ovirt_vms: name: rhel7 template: rhel7 cluster: Default memory: 1GiB high_availability: true cloud_init: nic_boot_protocol: static nic_ip_address: 10.34.60.86 nic_netmask: 255.255.252.0 nic_gateway: 10.34.63.254 nic_name: eth1 nic_on_boot: true host_name: example.com custom_script: | write_files: - content: | Hello, world! path: /tmp/greeting.txt permissions: '0644' user_name: root root_password: super_password # Run VM with cloud init, with multiple network interfaces: ovirt_vms: name: rhel7_4 template: rhel7 cluster: mycluster cloud_init_nics: - nic_name: eth0 nic_boot_protocol: dhcp nic_on_boot: true - nic_name: eth1 nic_boot_protocol: static nic_ip_address: 10.34.60.86 nic_netmask: 255.255.252.0 nic_gateway: 10.34.63.254 nic_on_boot: true # Run VM with sysprep: ovirt_vms: name: windows2012R2_AD template: windows2012R2 cluster: Default memory: 3GiB high_availability: true sysprep: host_name: windowsad.example.com user_name: Administrator root_password: SuperPassword123 # Migrate/Run VM to/on host named 'host1' ovirt_vms: state: running name: myvm host: host1 # Change Vm's CD: ovirt_vms: name: myvm cd_iso: drivers.iso # Eject Vm's CD: ovirt_vms: name: myvm cd_iso: '' # Boot VM from CD: ovirt_vms: name: myvm cd_iso: centos7_x64.iso boot_devices: - cdrom # Stop vm: ovirt_vms: state: stopped name: myvm # Upgrade memory to already created VM: ovirt_vms: name: myvm memory: 4GiB # Hot plug memory to already created and running VM: # (VM won't be restarted) ovirt_vms: name: myvm memory: 4GiB # When change on the VM needs restart of the VM, use next_run state, # The VM will be updated and rebooted if there are any changes. # If present state would be used, VM won't be restarted. ovirt_vms: state: next_run name: myvm boot_devices: - network # Remove VM, if VM is running it will be stopped: ovirt_vms: state: absent name: myvm ''' RETURN = ''' id: description: ID of the VM which is managed returned: On success if VM is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c vm: description: "Dictionary of all the VM attributes. VM attributes can be found on your oVirt instance at following url: https://ovirt.example.com/ovirt-engine/api/model#types/vm." returned: On success if VM is found. ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_params, check_sdk, convert_to_bytes, create_connection, equal, get_entity, get_link_name, get_id_by_name, ovirt_full_argument_spec, search_by_name, wait, ) class VmsModule(BaseModule): def __get_template_with_version(self): """ oVirt in version 4.1 doesn't support search by template+version_number, so we need to list all templates with specific name and then iterate through it's version until we find the version we look for. """ template = None if self.param('template'): templates_service = self._connection.system_service().templates_service() templates = templates_service.list(search='name=%s' % self.param('template')) if self.param('template_version'): templates = [ t for t in templates if t.version.version_number == self.param('template_version') ] if templates: template = templates[0] return template def build_entity(self): template = self.__get_template_with_version() return otypes.Vm( name=self.param('name'), cluster=otypes.Cluster( name=self.param('cluster') ) if self.param('cluster') else None, template=otypes.Template( id=template.id, ) if template else None, use_latest_template_version=self.param('use_latest_template_version'), stateless=self.param('stateless') or self.param('use_latest_template_version'), delete_protected=self.param('delete_protected'), high_availability=otypes.HighAvailability( enabled=self.param('high_availability') ) if self.param('high_availability') is not None else None, cpu=otypes.Cpu( topology=otypes.CpuTopology( cores=self.param('cpu_cores'), sockets=self.param('cpu_sockets'), ) ) if ( self.param('cpu_cores') or self.param('cpu_sockets') ) else None, cpu_shares=self.param('cpu_shares'), os=otypes.OperatingSystem( type=self.param('operating_system'), boot=otypes.Boot( devices=[ otypes.BootDevice(dev) for dev in self.param('boot_devices') ], ) if self.param('boot_devices') else None, ) if ( self.param('operating_system') or self.param('boot_devices') ) else None, type=otypes.VmType( self.param('type') ) if self.param('type') else None, memory=convert_to_bytes( self.param('memory') ) if self.param('memory') else None, memory_policy=otypes.MemoryPolicy( guaranteed=convert_to_bytes(self.param('memory_guaranteed')), ) if self.param('memory_guaranteed') else None, instance_type=otypes.InstanceType( id=get_id_by_name( self._connection.system_service().instance_types_service(), self.param('instance_type'), ), ) if self.param('instance_type') else None, description=self.param('description'), comment=self.param('comment'), time_zone=otypes.TimeZone( name=self.param('timezone'), ) if self.param('timezone') else None, serial_number=otypes.SerialNumber( policy=otypes.SerialNumberPolicy(self.param('serial_policy')), value=self.param('serial_policy_value'), ) if ( self.param('serial_policy') is not None or self.param('serial_policy_value') is not None ) else None, ) def update_check(self, entity): return ( equal(self.param('cluster'), get_link_name(self._connection, entity.cluster)) and equal(convert_to_bytes(self.param('memory')), entity.memory) and equal(convert_to_bytes(self.param('memory_guaranteed')), entity.memory_policy.guaranteed) and equal(self.param('cpu_cores'), entity.cpu.topology.cores) and equal(self.param('cpu_sockets'), entity.cpu.topology.sockets) and equal(self.param('type'), str(entity.type)) and equal(self.param('operating_system'), str(entity.os.type)) and equal(self.param('high_availability'), entity.high_availability.enabled) and equal(self.param('stateless'), entity.stateless) and equal(self.param('cpu_shares'), entity.cpu_shares) and equal(self.param('delete_protected'), entity.delete_protected) and equal(self.param('use_latest_template_version'), entity.use_latest_template_version) and equal(self.param('boot_devices'), [str(dev) for dev in getattr(entity.os, 'devices', [])]) and equal(self.param('instance_type'), get_link_name(self._connection, entity.instance_type), ignore_case=True) and equal(self.param('description'), entity.description) and equal(self.param('comment'), entity.comment) and equal(self.param('timezone'), entity.time_zone.name) and equal(self.param('serial_policy'), str(getattr(entity.serial_number, 'policy', None))) and equal(self.param('serial_policy_value'), getattr(entity.serial_number, 'value', None)) ) def pre_create(self, entity): # If VM don't exists, and template is not specified, set it to Blank: if entity is None: if self.param('template') is None: self._module.params['template'] = 'Blank' def post_update(self, entity): self.post_create(entity) def post_create(self, entity): # After creation of the VM, attach disks and NICs: self.changed = self.__attach_disks(entity) self.changed = self.__attach_nics(entity) def pre_remove(self, entity): # Forcibly stop the VM, if it's not in DOWN state: if entity.status != otypes.VmStatus.DOWN: if not self._module.check_mode: self.changed = self.action( action='stop', action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=lambda vm: vm.status == otypes.VmStatus.DOWN, )['changed'] def __suspend_shutdown_common(self, vm_service): if vm_service.get().status in [ otypes.VmStatus.MIGRATING, otypes.VmStatus.POWERING_UP, otypes.VmStatus.REBOOT_IN_PROGRESS, otypes.VmStatus.WAIT_FOR_LAUNCH, otypes.VmStatus.UP, otypes.VmStatus.RESTORING_STATE, ]: self._wait_for_UP(vm_service) def _pre_shutdown_action(self, entity): vm_service = self._service.vm_service(entity.id) self.__suspend_shutdown_common(vm_service) if entity.status in [otypes.VmStatus.SUSPENDED, otypes.VmStatus.PAUSED]: vm_service.start() self._wait_for_UP(vm_service) return vm_service.get() def _pre_suspend_action(self, entity): vm_service = self._service.vm_service(entity.id) self.__suspend_shutdown_common(vm_service) if entity.status in [otypes.VmStatus.PAUSED, otypes.VmStatus.DOWN]: vm_service.start() self._wait_for_UP(vm_service) return vm_service.get() def _post_start_action(self, entity): vm_service = self._service.service(entity.id) self._wait_for_UP(vm_service) self._attach_cd(vm_service.get()) self._migrate_vm(vm_service.get()) def _attach_cd(self, entity): cd_iso = self.param('cd_iso') if cd_iso is not None: vm_service = self._service.service(entity.id) current = vm_service.get().status == otypes.VmStatus.UP cdroms_service = vm_service.cdroms_service() cdrom_device = cdroms_service.list()[0] cdrom_service = cdroms_service.cdrom_service(cdrom_device.id) cdrom = cdrom_service.get(current=current) if getattr(cdrom.file, 'id', '') != cd_iso: if not self._module.check_mode: cdrom_service.update( cdrom=otypes.Cdrom( file=otypes.File(id=cd_iso) ), current=current, ) self.changed = True return entity def _migrate_vm(self, entity): vm_host = self.param('host') vm_service = self._service.vm_service(entity.id) if vm_host is not None: # In case VM is preparing to be UP, wait to be up, to migrate it: if entity.status == otypes.VmStatus.UP: hosts_service = self._connection.system_service().hosts_service() current_vm_host = hosts_service.host_service(entity.host.id).get().name if vm_host != current_vm_host: if not self._module.check_mode: vm_service.migrate(host=otypes.Host(name=vm_host)) self._wait_for_UP(vm_service) self.changed = True return entity def _wait_for_UP(self, vm_service): wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.UP, wait=self.param('wait'), timeout=self.param('timeout'), ) def wait_for_down(self, vm): """ This function will first wait for the status DOWN of the VM. Then it will find the active snapshot and wait until it's state is OK for stateless VMs and statless snaphot is removed. """ vm_service = self._service.vm_service(vm.id) wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, wait=self.param('wait'), timeout=self.param('timeout'), ) if vm.stateless: snapshots_service = vm_service.snapshots_service() snapshots = snapshots_service.list() snap_active = [ snap for snap in snapshots if snap.snapshot_type == otypes.SnapshotType.ACTIVE ][0] snap_stateless = [ snap for snap in snapshots if snap.snapshot_type == otypes.SnapshotType.STATELESS ] # Stateless snapshot may be already removed: if snap_stateless: wait( service=snapshots_service.snapshot_service(snap_stateless[0].id), condition=lambda snap: snap is None, wait=self.param('wait'), timeout=self.param('timeout'), ) wait( service=snapshots_service.snapshot_service(snap_active.id), condition=lambda snap: snap.snapshot_status == otypes.SnapshotStatus.OK, wait=self.param('wait'), timeout=self.param('timeout'), ) return True def __attach_disks(self, entity): disks_service = self._connection.system_service().disks_service() for disk in self.param('disks'): # If disk ID is not specified, find disk by name: disk_id = disk.get('id') if disk_id is None: disk_id = getattr( search_by_name( service=disks_service, name=disk.get('name') ), 'id', None ) # Attach disk to VM: disk_attachments_service = self._service.service(entity.id).disk_attachments_service() if get_entity(disk_attachments_service.attachment_service(disk_id)) is None: if not self._module.check_mode: disk_attachments_service.add( otypes.DiskAttachment( disk=otypes.Disk( id=disk_id, ), active=disk.get('activate', True), interface=otypes.DiskInterface( disk.get('interface', 'virtio') ), bootable=disk.get('bootable', False), ) ) self.changed = True def __get_vnic_profile_id(self, nic): """ Return VNIC profile ID looked up by it's name, because there can be more VNIC profiles with same name, other criteria of filter is cluster. """ vnics_service = self._connection.system_service().vnic_profiles_service() clusters_service = self._connection.system_service().clusters_service() cluster = search_by_name(clusters_service, self.param('cluster')) profiles = [ profile for profile in vnics_service.list() if profile.name == nic.get('profile_name') ] cluster_networks = [ net.id for net in self._connection.follow_link(cluster.networks) ] try: return next( profile.id for profile in profiles if profile.network.id in cluster_networks ) except StopIteration: raise Exception( "Profile '%s' was not found in cluster '%s'" % ( nic.get('profile_name'), self.param('cluster') ) ) def __attach_nics(self, entity): # Attach NICs to VM, if specified: nics_service = self._service.service(entity.id).nics_service() for nic in self.param('nics'): if search_by_name(nics_service, nic.get('name')) is None: if not self._module.check_mode: nics_service.add( otypes.Nic( name=nic.get('name'), interface=otypes.NicInterface( nic.get('interface', 'virtio') ), vnic_profile=otypes.VnicProfile( id=self.__get_vnic_profile_id(nic), ) if nic.get('profile_name') else None, mac=otypes.Mac( address=nic.get('mac_address') ) if nic.get('mac_address') else None, ) ) self.changed = True def _get_initialization(sysprep, cloud_init, cloud_init_nics): initialization = None if cloud_init or cloud_init_nics: initialization = otypes.Initialization( nic_configurations=[ otypes.NicConfiguration( boot_protocol=otypes.BootProtocol( nic.pop('nic_boot_protocol').lower() ) if nic.get('nic_boot_protocol') else None, name=nic.pop('nic_name', None), on_boot=nic.pop('nic_on_boot', None), ip=otypes.Ip( address=nic.pop('nic_ip_address', None), netmask=nic.pop('nic_netmask', None), gateway=nic.pop('nic_gateway', None), ) if ( nic.get('nic_gateway') is not None or nic.get('nic_netmask') is not None or nic.get('nic_ip_address') is not None ) else None, ) for nic in cloud_init_nics if ( nic.get('nic_gateway') is not None or nic.get('nic_netmask') is not None or nic.get('nic_ip_address') is not None or nic.get('nic_boot_protocol') is not None or nic.get('nic_on_boot') is not None ) ] if cloud_init_nics else None, **cloud_init ) elif sysprep: initialization = otypes.Initialization( **sysprep ) return initialization def control_state(vm, vms_service, module): if vm is None: return force = module.params['force'] state = module.params['state'] vm_service = vms_service.vm_service(vm.id) if vm.status == otypes.VmStatus.IMAGE_LOCKED: wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, ) elif vm.status == otypes.VmStatus.SAVING_STATE: # Result state is SUSPENDED, we should wait to be suspended: wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED, ) elif ( vm.status == otypes.VmStatus.UNASSIGNED or vm.status == otypes.VmStatus.UNKNOWN ): # Invalid states: module.fail_json(msg="Not possible to control VM, if it's in '{}' status".format(vm.status)) elif vm.status == otypes.VmStatus.POWERING_DOWN: if (force and state == 'stopped') or state == 'absent': vm_service.stop() wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, ) else: # If VM is powering down, wait to be DOWN or UP. # VM can end in UP state in case there is no GA # or ACPI on the VM or shutdown operation crashed: wait( service=vm_service, condition=lambda vm: vm.status in [otypes.VmStatus.DOWN, otypes.VmStatus.UP], ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['running', 'stopped', 'present', 'absent', 'suspended', 'next_run'], default='present', ), name=dict(default=None), id=dict(default=None), cluster=dict(default=None), template=dict(default=None), template_version=dict(default=None, type='int'), use_latest_template_version=dict(default=None, type='bool'), disks=dict(default=[], type='list'), memory=dict(default=None), memory_guaranteed=dict(default=None), cpu_sockets=dict(default=None, type='int'), cpu_cores=dict(default=None, type='int'), cpu_shares=dict(default=None, type='int'), type=dict(choices=['server', 'desktop']), operating_system=dict( default=None, choices=[ 'rhel_6_ppc64', 'other', 'freebsd', 'windows_2003x64', 'windows_10', 'rhel_6x64', 'rhel_4x64', 'windows_2008x64', 'windows_2008R2x64', 'debian_7', 'windows_2012x64', 'ubuntu_14_04', 'ubuntu_12_04', 'ubuntu_13_10', 'windows_8x64', 'other_linux_ppc64', 'windows_2003', 'other_linux', 'windows_10x64', 'windows_2008', 'rhel_3', 'rhel_5', 'rhel_4', 'other_ppc64', 'sles_11', 'rhel_6', 'windows_xp', 'rhel_7x64', 'freebsdx64', 'rhel_7_ppc64', 'windows_7', 'rhel_5x64', 'ubuntu_14_04_ppc64', 'sles_11_ppc64', 'windows_8', 'windows_2012R2x64', 'windows_2008r2x64', 'ubuntu_13_04', 'ubuntu_12_10', 'windows_7x64', ], ), cd_iso=dict(default=None), boot_devices=dict(default=None, type='list'), high_availability=dict(type='bool'), stateless=dict(type='bool'), delete_protected=dict(type='bool'), force=dict(type='bool', default=False), nics=dict(default=[], type='list'), cloud_init=dict(type='dict'), cloud_init_nics=dict(defaul=[], type='list'), sysprep=dict(type='dict'), host=dict(default=None), clone=dict(type='bool', default=False), clone_permissions=dict(type='bool', default=False), kernel_path=dict(default=None), initrd_path=dict(default=None), kernel_params=dict(default=None), instance_type=dict(default=None), description=dict(default=None), comment=dict(default=None), timezone=dict(default=None), serial_policy=dict(default=None, choices=['vm', 'host', 'custom']), serial_policy_value=dict(default=None), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) check_params(module) try: state = module.params['state'] auth = module.params.pop('auth') connection = create_connection(auth) vms_service = connection.system_service().vms_service() vms_module = VmsModule( connection=connection, module=module, service=vms_service, ) vm = vms_module.search_entity() control_state(vm, vms_service, module) if state == 'present' or state == 'running' or state == 'next_run': sysprep = module.params['sysprep'] cloud_init = module.params['cloud_init'] cloud_init_nics = module.params['cloud_init_nics'] or [] if cloud_init is not None: cloud_init_nics.append(cloud_init) # In case VM don't exist, wait for VM DOWN state, # otherwise don't wait for any state, just update VM: vms_module.create( entity=vm, result_state=otypes.VmStatus.DOWN if vm is None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], ) initialization = _get_initialization(sysprep, cloud_init, cloud_init_nics) ret = vms_module.action( action='start', post_action=vms_module._post_start_action, action_condition=lambda vm: ( vm.status not in [ otypes.VmStatus.MIGRATING, otypes.VmStatus.POWERING_UP, otypes.VmStatus.REBOOT_IN_PROGRESS, otypes.VmStatus.WAIT_FOR_LAUNCH, otypes.VmStatus.UP, otypes.VmStatus.RESTORING_STATE, ] ), wait_condition=lambda vm: vm.status == otypes.VmStatus.UP, # Start action kwargs: use_cloud_init=cloud_init is not None or len(cloud_init_nics) > 0, use_sysprep=sysprep is not None, vm=otypes.Vm( placement_policy=otypes.VmPlacementPolicy( hosts=[otypes.Host(name=module.params['host'])] ) if module.params['host'] else None, initialization=initialization, os=otypes.OperatingSystem( cmdline=module.params.get('kernel_params'), initrd=module.params.get('initrd_path'), kernel=module.params.get('kernel_path'), ) if ( module.params.get('kernel_params') or module.params.get('initrd_path') or module.params.get('kernel_path') ) else None, ) if ( module.params.get('kernel_params') or module.params.get('initrd_path') or module.params.get('kernel_path') or module.params.get('host') or initialization ) else None, ) if state == 'next_run': # Apply next run configuration, if needed: vm = vms_service.vm_service(ret['id']).get() if vm.next_run_configuration_exists: ret = vms_module.action( action='reboot', entity=vm, action_condition=lambda vm: vm.status == otypes.VmStatus.UP, wait_condition=lambda vm: vm.status == otypes.VmStatus.UP, ) elif state == 'stopped': vms_module.create( result_state=otypes.VmStatus.DOWN if vm is None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], ) if module.params['force']: ret = vms_module.action( action='stop', post_action=vms_module._attach_cd, action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=vms_module.wait_for_down, ) else: ret = vms_module.action( action='shutdown', pre_action=vms_module._pre_shutdown_action, post_action=vms_module._attach_cd, action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=vms_module.wait_for_down, ) elif state == 'suspended': vms_module.create( result_state=otypes.VmStatus.DOWN if vm is None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], ) ret = vms_module.action( action='suspend', pre_action=vms_module._pre_suspend_action, action_condition=lambda vm: vm.status != otypes.VmStatus.SUSPENDED, wait_condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED, ) elif state == 'absent': ret = vms_module.remove() module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout='token' not in module.params['auth']) if __name__ == "__main__": main()
gpl-3.0
sszlm/MissionPlanner
Lib/site-packages/numpy/core/tests/test_records.py
53
6522
import sys from os import path import numpy as np from numpy.testing import * from numpy.compat import asbytes, asunicode import warnings class TestFromrecords(TestCase): def test_fromrecords(self): r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]], names='col1,col2,col3') assert_equal(r[0].item(), (456, 'dbe', 1.2)) @dec.skipif(sys.platform == 'cli', 'Buffer support is not available on IronPython yet.') def test_method_array(self): r = np.rec.array(asbytes('abcdefg') * 100, formats='i2,a3,i4', shape=3, byteorder='big') assert_equal(r[1].item(), (25444, asbytes('efg'), 1633837924)) def test_method_array2(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') if sys.platform == 'cli': # asbytes() doesn't work correctly on IronPython. assert_equal(r[1].item(), (2, 22.0, b'b')) else: assert_equal(r[1].item(), (2, 22.0, asbytes('b'))) def test_recarray_slices(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') if sys.platform == 'cli': # asbytes() doesn't work correctly on IronPython. assert_equal(r[1::2][1].item(), (4, 44.0, b'd')) else: assert_equal(r[1::2][1].item(), (4, 44.0, asbytes('d'))) def test_recarray_fromarrays(self): x1 = np.array([1, 2, 3, 4]) x2 = np.array(['a', 'dd', 'xyz', '12']) x3 = np.array([1.1, 2, 3, 4]) r = np.rec.fromarrays([x1, x2, x3], names='a,b,c') assert_equal(r[1].item(), (2, 'dd', 2.0)) x1[1] = 34 assert_equal(r.a, np.array([1, 2, 3, 4])) @dec.skipif(sys.platform == 'cli', "Array from file descriptors are not yet supported on IronPython") def test_recarray_fromfile(self): data_dir = path.join(path.dirname(__file__), 'data') filename = path.join(data_dir, 'recarray_from_file.fits') fd = open(filename, 'rb') fd.seek(2880 * 2) r = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big') def test_recarray_from_obj(self): count = 10 a = np.zeros(count, dtype='O') b = np.zeros(count, dtype='f8') c = np.zeros(count, dtype='f8') for i in range(len(a)): a[i] = range(1, 10) mine = np.rec.fromarrays([a, b, c], names='date,data1,data2') for i in range(len(a)): assert (mine.date[i] == range(1, 10)) assert (mine.data1[i] == 0.0) assert (mine.data2[i] == 0.0) def test_recarray_from_repr(self): x = np.rec.array([ (1, 2)], dtype=[('a', np.int8), ('b', np.int8)]) y = eval("np." + repr(x)) assert isinstance(y, np.recarray) assert_equal(y, x) def test_recarray_from_names(self): ra = np.rec.array([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') pa = np.rec.fromrecords([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') assert ra.dtype == pa.dtype assert ra.shape == pa.shape for k in xrange(len(ra)): assert ra[k].item() == pa[k].item() def test_recarray_conflict_fields(self): ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2), (3, 'wrs', 1.3)], names='field, shape, mean') ra.mean = [1.1, 2.2, 3.3] assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3]) assert type(ra.mean) is type(ra.var) ra.shape = (1, 3) assert ra.shape == (1, 3) ra.shape = ['A', 'B', 'C'] assert_array_equal(ra['shape'], [['A', 'B', 'C']]) ra.field = 5 assert_array_equal(ra['field'], [[5, 5, 5]]) assert callable(ra.field) def test_fromrecords_with_explicit_dtype(self): a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=[('a', int), ('b', np.object)]) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') # ndtype = np.dtype([('a', int), ('b', np.object)]) a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') class TestRecord(TestCase): def setUp(self): self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)], dtype=[("col1", "<i4"), ("col2", "<i4"), ("col3", "<i4")]) def test_assignment1(self): a = self.data assert_equal(a.col1[0], 1) a[0].col1 = 0 assert_equal(a.col1[0], 0) def test_assignment2(self): a = self.data assert_equal(a.col1[0], 1) a.col1[0] = 0 assert_equal(a.col1[0], 0) def test_invalid_assignment(self): a = self.data def assign_invalid_column(x): x[0].col5 = 1 self.assertRaises(AttributeError, assign_invalid_column, a) def test_out_of_order_fields(self): """Ticket #1431. Current behavior deprecated in numpy 1.5""" x = self.data[['col1', 'col2']] y = self.data[['col2', 'col1']] # make sure change is applied in 1.6/2.0 if np.version.short_version[:3] == '1.5': assert_array_equal(x, y) elif float(np.version.short_version[:3]) >= 1.6 and np.version.release: assert_(y[0][0] == 4) warnings.filterwarnings('ignore', message="Out of order field selection on recarrays") def test_find_duplicate(): l1 = [1, 2, 3, 4, 5, 6] assert np.rec.find_duplicate(l1) == [] l2 = [1, 2, 1, 4, 5, 6] assert np.rec.find_duplicate(l2) == [1] l3 = [1, 2, 1, 4, 1, 6, 2, 3] assert np.rec.find_duplicate(l3) == [1, 2] l3 = [2, 2, 1, 4, 1, 6, 2, 3] assert np.rec.find_duplicate(l3) == [2, 1] if __name__ == "__main__": run_module_suite()
gpl-3.0
dongxingshui/virt-manager
virtManager/baseclass.py
1
7607
# # Copyright (C) 2010, 2013 Red Hat, Inc. # Copyright (C) 2010 Cole Robinson <crobinso@redhat.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA. # import logging import os import sys import threading import traceback from . import config from gi.repository import Gdk from gi.repository import GLib from gi.repository import GObject from gi.repository import Gtk class vmmGObject(GObject.GObject): _leak_check = True @staticmethod def idle_add(func, *args, **kwargs): """ Make sure idle functions are run thread safe """ def cb(): try: return func(*args, **kwargs) except: print traceback.format_exc() return False return GLib.idle_add(cb) def __init__(self): GObject.GObject.__init__(self) self.config = config.RUNNING_CONFIG self._gobject_handles = [] self._gobject_timeouts = [] self._gsettings_handles = [] self._signal_id_map = {} self._next_signal_id = 1 self.object_key = str(self) # Config might not be available if we error early in startup if self.config and self._leak_check: self.config.add_object(self.object_key) def cleanup(self): # Do any cleanup required to drop reference counts so object is # actually reaped by python. Usually means unregistering callbacks try: for h in self._gsettings_handles[:]: self.remove_gsettings_handle(h) for h in self._gobject_handles[:]: if GObject.GObject.handler_is_connected(self, h): self.disconnect(h) for h in self._gobject_timeouts[:]: self.remove_gobject_timeout(h) self._cleanup() except: logging.exception("Error cleaning up %s", self) def _cleanup(self): raise NotImplementedError("_cleanup must be implemented in subclass") # pylint: disable=arguments-differ # Newer pylint can detect, but warns that overridden arguments are wrong def connect(self, name, callback, *args): ret = GObject.GObject.connect(self, name, callback, *args) self._gobject_handles.append(ret) return ret def disconnect(self, handle): ret = GObject.GObject.disconnect(self, handle) self._gobject_handles.remove(handle) return ret def add_gsettings_handle(self, handle): self._gsettings_handles.append(handle) def remove_gsettings_handle(self, handle): self.config.remove_notifier(handle) self._gsettings_handles.remove(handle) def add_gobject_timeout(self, handle): self._gobject_timeouts.append(handle) def remove_gobject_timeout(self, handle): GLib.source_remove(handle) self._gobject_timeouts.remove(handle) def _logtrace(self, msg=""): if msg: msg += " " logging.debug("%s(%s %s)\n:%s", msg, self.object_key, self._refcount(), "".join(traceback.format_stack())) def _refcount(self): # Function generates 2 temporary refs, so adjust total accordingly return (sys.getrefcount(self) - 2) def _start_thread(self, target=None, name=None, args=None, kwargs=None): # Helper for starting a daemonized thread t = threading.Thread(target=target, name=name, args=args or [], kwargs=kwargs or {}) t.daemon = True t.start() def connect_once(self, signal, func, *args): id_list = [] def wrap_func(*wrapargs): if id_list: self.disconnect(id_list[0]) return func(*wrapargs) conn_id = self.connect(signal, wrap_func, *args) id_list.append(conn_id) return conn_id def connect_opt_out(self, signal, func, *args): id_list = [] def wrap_func(*wrapargs): ret = func(*wrapargs) if ret and id_list: self.disconnect(id_list[0]) conn_id = self.connect(signal, wrap_func, *args) id_list.append(conn_id) return conn_id def idle_emit(self, signal, *args): """ Safe wrapper for using 'self.emit' with GLib.idle_add """ def emitwrap(_s, *_a): self.emit(_s, *_a) return False self.idle_add(emitwrap, signal, *args) def timeout_add(self, timeout, func, *args): """ Make sure timeout functions are run thread safe """ def cb(): try: return func(*args) except: print traceback.format_exc() return False ret = GLib.timeout_add(timeout, cb) self.add_gobject_timeout(ret) return ret def emit(self, signal_name, *args): return GObject.GObject.emit(self, signal_name, *args) def __del__(self): try: if self.config and self._leak_check: self.config.remove_object(self.object_key) except: logging.exception("Error removing %s", self.object_key) class vmmGObjectUI(vmmGObject): @staticmethod def bind_escape_key_close_helper(topwin, close_cb): def close_on_escape(src_ignore, event): if Gdk.keyval_name(event.keyval) == "Escape": close_cb() topwin.connect("key-press-event", close_on_escape) def __init__(self, filename, windowname, builder=None, topwin=None): vmmGObject.__init__(self) self._external_topwin = bool(topwin) if filename: uifile = os.path.join(self.config.get_ui_dir(), filename) self.builder = Gtk.Builder() self.builder.set_translation_domain("virt-manager") self.builder.add_from_string(file(uifile).read()) if not topwin: self.topwin = self.widget(windowname) self.topwin.hide() else: self.topwin = topwin else: self.builder = builder self.topwin = topwin self._err = None def _get_err(self): if self._err is None: from . import error self._err = error.vmmErrorDialog(self.topwin) return self._err err = property(_get_err) def widget(self, name): return self.builder.get_object(name) def cleanup(self): self.close() vmmGObject.cleanup(self) self.builder = None if not self._external_topwin: self.topwin.destroy() self.topwin = None self._err = None def _cleanup(self): raise NotImplementedError("_cleanup must be implemented in subclass") def close(self, ignore1=None, ignore2=None): pass def bind_escape_key_close(self): self.bind_escape_key_close_helper(self.topwin, self.close)
gpl-2.0
xsmart/opencvr
3rdparty/protobuf/gmock/gtest/test/gtest_shuffle_test.py
3023
12549
#!/usr/bin/env python # # Copyright 2009 Google Inc. All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Verifies that test shuffling works.""" __author__ = 'wan@google.com (Zhanyong Wan)' import os import gtest_test_utils # Command to run the gtest_shuffle_test_ program. COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_') # The environment variables for test sharding. TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' TEST_FILTER = 'A*.A:A*.B:C*' ALL_TESTS = [] ACTIVE_TESTS = [] FILTERED_TESTS = [] SHARDED_TESTS = [] SHUFFLED_ALL_TESTS = [] SHUFFLED_ACTIVE_TESTS = [] SHUFFLED_FILTERED_TESTS = [] SHUFFLED_SHARDED_TESTS = [] def AlsoRunDisabledTestsFlag(): return '--gtest_also_run_disabled_tests' def FilterFlag(test_filter): return '--gtest_filter=%s' % (test_filter,) def RepeatFlag(n): return '--gtest_repeat=%s' % (n,) def ShuffleFlag(): return '--gtest_shuffle' def RandomSeedFlag(n): return '--gtest_random_seed=%s' % (n,) def RunAndReturnOutput(extra_env, args): """Runs the test program and returns its output.""" environ_copy = os.environ.copy() environ_copy.update(extra_env) return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output def GetTestsForAllIterations(extra_env, args): """Runs the test program and returns a list of test lists. Args: extra_env: a map from environment variables to their values args: command line flags to pass to gtest_shuffle_test_ Returns: A list where the i-th element is the list of tests run in the i-th test iteration. """ test_iterations = [] for line in RunAndReturnOutput(extra_env, args).split('\n'): if line.startswith('----'): tests = [] test_iterations.append(tests) elif line.strip(): tests.append(line.strip()) # 'TestCaseName.TestName' return test_iterations def GetTestCases(tests): """Returns a list of test cases in the given full test names. Args: tests: a list of full test names Returns: A list of test cases from 'tests', in their original order. Consecutive duplicates are removed. """ test_cases = [] for test in tests: test_case = test.split('.')[0] if not test_case in test_cases: test_cases.append(test_case) return test_cases def CalculateTestLists(): """Calculates the list of tests run under different flags.""" if not ALL_TESTS: ALL_TESTS.extend( GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0]) if not ACTIVE_TESTS: ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0]) if not FILTERED_TESTS: FILTERED_TESTS.extend( GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0]) if not SHARDED_TESTS: SHARDED_TESTS.extend( GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'}, [])[0]) if not SHUFFLED_ALL_TESTS: SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations( {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0]) if not SHUFFLED_ACTIVE_TESTS: SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(1)])[0]) if not SHUFFLED_FILTERED_TESTS: SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0]) if not SHUFFLED_SHARDED_TESTS: SHUFFLED_SHARDED_TESTS.extend( GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'}, [ShuffleFlag(), RandomSeedFlag(1)])[0]) class GTestShuffleUnitTest(gtest_test_utils.TestCase): """Tests test shuffling.""" def setUp(self): CalculateTestLists() def testShufflePreservesNumberOfTests(self): self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS)) self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS)) self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS)) self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS)) def testShuffleChangesTestOrder(self): self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS) self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS) self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS, SHUFFLED_FILTERED_TESTS) self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS, SHUFFLED_SHARDED_TESTS) def testShuffleChangesTestCaseOrder(self): self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS), GetTestCases(SHUFFLED_ALL_TESTS)) self.assert_( GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS), GetTestCases(SHUFFLED_ACTIVE_TESTS)) self.assert_( GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS), GetTestCases(SHUFFLED_FILTERED_TESTS)) self.assert_( GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS), GetTestCases(SHUFFLED_SHARDED_TESTS)) def testShuffleDoesNotRepeatTest(self): for test in SHUFFLED_ALL_TESTS: self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test), '%s appears more than once' % (test,)) for test in SHUFFLED_ACTIVE_TESTS: self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test), '%s appears more than once' % (test,)) for test in SHUFFLED_FILTERED_TESTS: self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test), '%s appears more than once' % (test,)) for test in SHUFFLED_SHARDED_TESTS: self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test), '%s appears more than once' % (test,)) def testShuffleDoesNotCreateNewTest(self): for test in SHUFFLED_ALL_TESTS: self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,)) for test in SHUFFLED_ACTIVE_TESTS: self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,)) for test in SHUFFLED_FILTERED_TESTS: self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,)) for test in SHUFFLED_SHARDED_TESTS: self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,)) def testShuffleIncludesAllTests(self): for test in ALL_TESTS: self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,)) for test in ACTIVE_TESTS: self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,)) for test in FILTERED_TESTS: self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,)) for test in SHARDED_TESTS: self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,)) def testShuffleLeavesDeathTestsAtFront(self): non_death_test_found = False for test in SHUFFLED_ACTIVE_TESTS: if 'DeathTest.' in test: self.assert_(not non_death_test_found, '%s appears after a non-death test' % (test,)) else: non_death_test_found = True def _VerifyTestCasesDoNotInterleave(self, tests): test_cases = [] for test in tests: [test_case, _] = test.split('.') if test_cases and test_cases[-1] != test_case: test_cases.append(test_case) self.assertEqual(1, test_cases.count(test_case), 'Test case %s is not grouped together in %s' % (test_case, tests)) def testShuffleDoesNotInterleaveTestCases(self): self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS) self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS) self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS) self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS) def testShuffleRestoresOrderAfterEachIteration(self): # Get the test lists in all 3 iterations, using random seed 1, 2, # and 3 respectively. Google Test picks a different seed in each # iteration, and this test depends on the current implementation # picking successive numbers. This dependency is not ideal, but # makes the test much easier to write. [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = ( GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)])) # Make sure running the tests with random seed 1 gets the same # order as in iteration 1 above. [tests_with_seed1] = GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(1)]) self.assertEqual(tests_in_iteration1, tests_with_seed1) # Make sure running the tests with random seed 2 gets the same # order as in iteration 2 above. Success means that Google Test # correctly restores the test order before re-shuffling at the # beginning of iteration 2. [tests_with_seed2] = GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(2)]) self.assertEqual(tests_in_iteration2, tests_with_seed2) # Make sure running the tests with random seed 3 gets the same # order as in iteration 3 above. Success means that Google Test # correctly restores the test order before re-shuffling at the # beginning of iteration 3. [tests_with_seed3] = GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(3)]) self.assertEqual(tests_in_iteration3, tests_with_seed3) def testShuffleGeneratesNewOrderInEachIteration(self): [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = ( GetTestsForAllIterations( {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)])) self.assert_(tests_in_iteration1 != tests_in_iteration2, tests_in_iteration1) self.assert_(tests_in_iteration1 != tests_in_iteration3, tests_in_iteration1) self.assert_(tests_in_iteration2 != tests_in_iteration3, tests_in_iteration2) def testShuffleShardedTestsPreservesPartition(self): # If we run M tests on N shards, the same M tests should be run in # total, regardless of the random seeds used by the shards. [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '0'}, [ShuffleFlag(), RandomSeedFlag(1)]) [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '1'}, [ShuffleFlag(), RandomSeedFlag(20)]) [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', SHARD_INDEX_ENV_VAR: '2'}, [ShuffleFlag(), RandomSeedFlag(25)]) sorted_sharded_tests = tests1 + tests2 + tests3 sorted_sharded_tests.sort() sorted_active_tests = [] sorted_active_tests.extend(ACTIVE_TESTS) sorted_active_tests.sort() self.assertEqual(sorted_active_tests, sorted_sharded_tests) if __name__ == '__main__': gtest_test_utils.Main()
mit
rchicoli/linux
scripts/gdb/linux/radixtree.py
27
2542
# # gdb helper commands and functions for Linux kernel debugging # # Radix Tree Parser # # Copyright (c) 2016 Linaro Ltd # # Authors: # Kieran Bingham <kieran.bingham@linaro.org> # # This work is licensed under the terms of the GNU GPL version 2. # import gdb from linux import utils from linux import constants radix_tree_root_type = utils.CachedType("struct radix_tree_root") radix_tree_node_type = utils.CachedType("struct radix_tree_node") def is_indirect_ptr(node): long_type = utils.get_long_type() return (node.cast(long_type) & constants.LX_RADIX_TREE_INDIRECT_PTR) def indirect_to_ptr(node): long_type = utils.get_long_type() node_type = node.type indirect_ptr = node.cast(long_type) & ~constants.LX_RADIX_TREE_INDIRECT_PTR return indirect_ptr.cast(node_type) def maxindex(height): height = height & constants.LX_RADIX_TREE_HEIGHT_MASK return gdb.parse_and_eval("height_to_maxindex["+str(height)+"]") def lookup(root, index): if root.type == radix_tree_root_type.get_type().pointer(): root = root.dereference() elif root.type != radix_tree_root_type.get_type(): raise gdb.GdbError("Must be struct radix_tree_root not {}" .format(root.type)) node = root['rnode'] if node is 0: return None if not (is_indirect_ptr(node)): if (index > 0): return None return node node = indirect_to_ptr(node) height = node['path'] & constants.LX_RADIX_TREE_HEIGHT_MASK if (index > maxindex(height)): return None shift = (height-1) * constants.LX_RADIX_TREE_MAP_SHIFT while True: new_index = (index >> shift) & constants.LX_RADIX_TREE_MAP_MASK slot = node['slots'][new_index] node = slot.cast(node.type.pointer()).dereference() if node is 0: return None shift -= constants.LX_RADIX_TREE_MAP_SHIFT height -= 1 if (height <= 0): break return node class LxRadixTree(gdb.Function): """ Lookup and return a node from a RadixTree. $lx_radix_tree_lookup(root_node [, index]): Return the node at the given index. If index is omitted, the root node is dereferenced and returned.""" def __init__(self): super(LxRadixTree, self).__init__("lx_radix_tree_lookup") def invoke(self, root, index=0): result = lookup(root, index) if result is None: raise gdb.GdbError("No entry in tree at index {}".format(index)) return result LxRadixTree()
gpl-2.0
xrmx/django
django/contrib/messages/storage/session.py
478
1714
import json from django.contrib.messages.storage.base import BaseStorage from django.contrib.messages.storage.cookie import ( MessageDecoder, MessageEncoder, ) from django.utils import six class SessionStorage(BaseStorage): """ Stores messages in the session (that is, django.contrib.sessions). """ session_key = '_messages' def __init__(self, request, *args, **kwargs): assert hasattr(request, 'session'), "The session-based temporary "\ "message storage requires session middleware to be installed, "\ "and come before the message middleware in the "\ "MIDDLEWARE_CLASSES list." super(SessionStorage, self).__init__(request, *args, **kwargs) def _get(self, *args, **kwargs): """ Retrieves a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag. """ return self.deserialize_messages(self.request.session.get(self.session_key)), True def _store(self, messages, response, *args, **kwargs): """ Stores a list of messages to the request's session. """ if messages: self.request.session[self.session_key] = self.serialize_messages(messages) else: self.request.session.pop(self.session_key, None) return [] def serialize_messages(self, messages): encoder = MessageEncoder(separators=(',', ':')) return encoder.encode(messages) def deserialize_messages(self, data): if data and isinstance(data, six.string_types): return json.loads(data, cls=MessageDecoder) return data
bsd-3-clause
leezu/mxnet
python/mxnet/contrib/io.py
11
3384
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 """Contrib data iterators for common data formats.""" from ..io import DataIter, DataDesc from .. import ndarray as nd class DataLoaderIter(DataIter): """Returns an iterator for ``mx.gluon.data.Dataloader`` so gluon dataloader can be used in symbolic module. Parameters ---------- loader : mxnet.gluon.data.Dataloader Gluon dataloader instance data_name : str, optional The data name. label_name : str, optional The label name. dtype : str, optional The dtype specifier, can be float32 or float16 Examples -------- >>> import mxnet as mx >>> from mxnet.gluon.data.vision import MNIST >>> from mxnet.gluon.data import DataLoader >>> train_dataset = MNIST(train=True) >>> train_data = mx.gluon.data.DataLoader(train_dataset, 32, shuffle=True, num_workers=4) >>> dataiter = mx.io.DataloaderIter(train_data) >>> for batch in dataiter: ... batch.data[0].shape ... (32L, 28L, 28L, 1L) """ def __init__(self, loader, data_name='data', label_name='softmax_label', dtype='float32'): super(DataLoaderIter, self).__init__() self._loader = loader self._iter = iter(self._loader) data, label = next(self._iter) self.batch_size = data.shape[0] self.dtype = dtype self.provide_data = [DataDesc(data_name, data.shape, dtype)] self.provide_label = [DataDesc(label_name, label.shape, dtype)] self._current_batch = None self.reset() def reset(self): self._iter = iter(self._loader) def iter_next(self): try: self._current_batch = next(self._iter) except StopIteration: self._current_batch = None return self._current_batch is not None def getdata(self): if self.getpad(): dshape = self._current_batch[0].shape ret = nd.empty(shape=([self.batch_size] + list(dshape[1:]))) ret[:dshape[0]] = self._current_batch[0].astype(self.dtype) return [ret] return [self._current_batch[0].astype(self.dtype)] def getlabel(self): if self.getpad(): lshape = self._current_batch[1].shape ret = nd.empty(shape=([self.batch_size] + list(lshape[1:]))) ret[:lshape[0]] = self._current_batch[1].astype(self.dtype) return [ret] return [self._current_batch[1].astype(self.dtype)] def getpad(self): return self.batch_size - self._current_batch[0].shape[0] def getindex(self): return None
apache-2.0
yury-s/v8-inspector
Source/chrome/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
2
17172
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import base64 import logging import urlparse from common.chrome_proxy_measurements import ChromeProxyValidation from integration_tests import chrome_proxy_metrics as metrics from metrics import loading from telemetry.core import exceptions from telemetry.page import page_test class ChromeProxyDataSaving(page_test.PageTest): """Chrome proxy data saving measurement.""" def __init__(self, *args, **kwargs): super(ChromeProxyDataSaving, self).__init__(*args, **kwargs) self._metrics = metrics.ChromeProxyMetric() self._enable_proxy = True def CustomizeBrowserOptions(self, options): if self._enable_proxy: options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) self._metrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): # Wait for the load event. tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) self._metrics.Stop(page, tab) self._metrics.AddResultsForDataSaving(tab, results) class ChromeProxyHeaders(ChromeProxyValidation): """Correctness measurement for response headers.""" def __init__(self): super(ChromeProxyHeaders, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForHeaderValidation(tab, results) class ChromeProxyBypass(ChromeProxyValidation): """Correctness measurement for bypass responses.""" def __init__(self): super(ChromeProxyBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForBypass(tab, results) class ChromeProxyCorsBypass(ChromeProxyValidation): """Correctness measurement for bypass responses for CORS requests.""" def __init__(self): super(ChromeProxyCorsBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def ValidateAndMeasurePage(self, page, tab, results): # The test page sets window.xhrRequestCompleted to true when the XHR fetch # finishes. tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 300) super(ChromeProxyCorsBypass, self).ValidateAndMeasurePage(page, tab, results) def AddResults(self, tab, results): self._metrics.AddResultsForCorsBypass(tab, results) class ChromeProxyBlockOnce(ChromeProxyValidation): """Correctness measurement for block-once responses.""" def __init__(self): super(ChromeProxyBlockOnce, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForBlockOnce(tab, results) class ChromeProxySafebrowsingOn(ChromeProxyValidation): """Correctness measurement for safebrowsing.""" def __init__(self): super(ChromeProxySafebrowsingOn, self).__init__( metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForSafebrowsingOn(tab, results) class ChromeProxySafebrowsingOff(ChromeProxyValidation): """Correctness measurement for safebrowsing.""" def __init__(self): super(ChromeProxySafebrowsingOff, self).__init__( metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForSafebrowsingOff(tab, results) _FAKE_PROXY_AUTH_VALUE = 'aabbccdd3b7579186c1b0620614fdb1f0000ffff' _TEST_SERVER = 'chromeproxy-test.appspot.com' _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default' # We rely on the chromeproxy-test server to facilitate some of the tests. # The test server code is at <TBD location> and runs at _TEST_SERVER # # The test server allow request to override response status, headers, and # body through query parameters. See GetResponseOverrideURL. def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0, respHeader="", respBody=""): """ Compose the request URL with query parameters to override the chromeproxy-test server response. """ queries = [] if respStatus > 0: queries.append('respStatus=%d' % respStatus) if respHeader: queries.append('respHeader=%s' % base64.b64encode(respHeader)) if respBody: queries.append('respBody=%s' % base64.b64encode(respBody)) if len(queries) == 0: return url "&".join(queries) # url has query already if urlparse.urlparse(url).query: return url + '&' + "&".join(queries) else: return url + '?' + "&".join(queries) class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the probe URL does not return 'OK'. Chrome is expected to use the fallback proxy. """ def __init__(self): super(ChromeProxyHTTPFallbackProbeURL, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackProbeURL, self).CustomizeBrowserOptions(options) # Set the secure proxy check URL to the google.com favicon, which will be # interpreted as a secure proxy check failure since the response body is not # "OK". The google.com favicon is used because it will load reliably fast, # and there have been problems with chromeproxy-test.appspot.com being slow # and causing tests to flake. options.AppendExtraBrowserArgs( '--data-reduction-proxy-secure-proxy-check-url=' 'http://www.google.com/favicon.ico') def AddResults(self, tab, results): self._metrics.AddResultsForHTTPFallback(tab, results) class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the configured proxy is the chromeproxy-test server which will send back a response without the expected Via header. Chrome is expected to use the fallback proxy and add the configured proxy to the bad proxy list. """ def __init__(self): super(ChromeProxyHTTPFallbackViaHeader, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackViaHeader, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--ignore-certificate-errors') options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPFallback(tab, results) class ChromeProxyClientVersion(ChromeProxyValidation): """Correctness measurement for version directives in Chrome-Proxy header. The test verifies that the version information provided in the Chrome-Proxy request header overrides any version, if specified, that is provided in the user agent string. """ def __init__(self): super(ChromeProxyClientVersion, self).__init__( metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyClientVersion, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--user-agent="Chrome/32.0.1700.99"') def AddResults(self, tab, results): self._metrics.AddResultsForClientVersion(tab, results) class ChromeProxyClientType(ChromeProxyValidation): """Correctness measurement for Chrome-Proxy header client type directives.""" def __init__(self): super(ChromeProxyClientType, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) self._chrome_proxy_client_type = None def AddResults(self, tab, results): # Get the Chrome-Proxy client type from the first page in the page set, so # that the client type value can be used to determine which of the later # pages in the page set should be bypassed. if not self._chrome_proxy_client_type: client_type = self._metrics.GetClientTypeFromRequests(tab) if client_type: self._chrome_proxy_client_type = client_type self._metrics.AddResultsForClientType(tab, results, self._chrome_proxy_client_type, self._page.bypass_for_client_type) class ChromeProxyLoFi(ChromeProxyValidation): """Correctness measurement for Lo-Fi in Chrome-Proxy header.""" def __init__(self): super(ChromeProxyLoFi, self).__init__(restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyLoFi, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--enable-data-reduction-proxy-lo-fi') def AddResults(self, tab, results): self._metrics.AddResultsForLoFi(tab, results) class ChromeProxyExpDirective(ChromeProxyValidation): """Correctness measurement for experiment directives in Chrome-Proxy header. This test verifies that "exp=test" in the Chrome-Proxy request header causes a bypass on the experiment test page. """ def __init__(self): super(ChromeProxyExpDirective, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyExpDirective, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--data-reduction-proxy-experiment=test') def AddResults(self, tab, results): self._metrics.AddResultsForBypass(tab, results, url_pattern='/exptest/') class ChromeProxyPassThrough(ChromeProxyValidation): """Correctness measurement for Chrome-Proxy pass-through directives. This test verifies that "pass-through" in the Chrome-Proxy request header causes a resource to be loaded without Data Reduction Proxy transformations. """ def __init__(self): super(ChromeProxyPassThrough, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyPassThrough, self).CustomizeBrowserOptions(options) def AddResults(self, tab, results): self._metrics.AddResultsForPassThrough(tab, results) class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation): """Correctness measurement for HTTP proxy fallback to direct.""" def __init__(self): super(ChromeProxyHTTPToDirectFallback, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPToDirectFallback, self).CustomizeBrowserOptions(options) # Set the primary proxy to something that will fail to be resolved so that # this test will run using the HTTP fallback proxy. options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://nonexistent.googlezip.net') def WillNavigateToPage(self, page, tab): super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab) # Attempt to load a page through the nonexistent primary proxy in order to # cause a proxy fallback, and have this test run starting from the HTTP # fallback proxy. tab.Navigate(_TEST_SERVER_DEFAULT_URL) tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPToDirectFallback(tab, results, _TEST_SERVER) class ChromeProxyReenableAfterBypass(ChromeProxyValidation): """Correctness measurement for re-enabling proxies after bypasses. This test loads a page that causes all data reduction proxies to be bypassed for 1 to 5 minutes, then waits 5 minutes and verifies that the proxy is no longer bypassed. """ def __init__(self): super(ChromeProxyReenableAfterBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForReenableAfterBypass( tab, results, self._page.bypass_seconds_min, self._page.bypass_seconds_max) class ChromeProxySmoke(ChromeProxyValidation): """Smoke measurement for basic chrome proxy correctness.""" def __init__(self): super(ChromeProxySmoke, self).__init__(restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def WillNavigateToPage(self, page, tab): super(ChromeProxySmoke, self).WillNavigateToPage(page, tab) def AddResults(self, tab, results): # Map a page name to its AddResults func. page_to_metrics = { 'header validation': [self._metrics.AddResultsForHeaderValidation], 'compression: image': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: javascript': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: css': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'bypass': [self._metrics.AddResultsForBypass], } if not self._page.name in page_to_metrics: raise page_test.MeasurementFailure( 'Invalid page name (%s) in smoke. Page name must be one of:\n%s' % ( self._page.name, page_to_metrics.keys())) for add_result in page_to_metrics[self._page.name]: add_result(tab, results) PROXIED = metrics.PROXIED DIRECT = metrics.DIRECT class ChromeProxyVideoValidation(page_test.PageTest): """Validation for video pages. Measures pages using metrics.ChromeProxyVideoMetric. Pages can be fetched either direct from the origin server or via the proxy. If a page is fetched both ways, then the PROXIED and DIRECT measurements are compared to ensure the same video was loaded in both cases. """ def __init__(self): super(ChromeProxyVideoValidation, self).__init__( needs_browser_restart_after_each_page=True, clear_cache_before_each_run=True) # The type is _allMetrics[url][PROXIED,DIRECT][metricName] = value, # where (metricName,value) is a metric computed by videowrapper.js. self._allMetrics = {} def CustomizeBrowserOptionsForSinglePage(self, page, options): if page.use_chrome_proxy: options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') options.AppendExtraBrowserArgs('--data-reduction-proxy-experiment=video') def DidNavigateToPage(self, page, tab): self._currMetrics = metrics.ChromeProxyVideoMetric(tab) self._currMetrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): assert self._currMetrics self._currMetrics.Stop(page, tab) if page.url not in self._allMetrics: self._allMetrics[page.url] = {} # Verify this page. if page.use_chrome_proxy: self._currMetrics.AddResultsForProxied(tab, results) self._allMetrics[page.url][PROXIED] = self._currMetrics.videoMetrics else: self._currMetrics.AddResultsForDirect(tab, results) self._allMetrics[page.url][DIRECT] = self._currMetrics.videoMetrics self._currMetrics = None # Compare proxied and direct results for this url, if they exist. m = self._allMetrics[page.url] if PROXIED in m and DIRECT in m: self._CompareProxiedAndDirectMetrics(page.url, m[PROXIED], m[DIRECT]) def _CompareProxiedAndDirectMetrics(self, url, pm, dm): """Compare metrics from PROXIED and DIRECT fetches. Compares video metrics computed by videowrapper.js for pages that were fetch both PROXIED and DIRECT. Args: url: The url for the page being tested. pm: Metrics when loaded by the Flywheel proxy. dm: Metrics when loaded directly from the origin server. Raises: ChromeProxyMetricException on failure. """ def err(s): raise ChromeProxyMetricException, s if not pm['ready']: err('Proxied page did not load video: %s' % page.url) if not dm['ready']: err('Direct page did not load video: %s' % page.url) # Compare metrics that should match for PROXIED and DIRECT. for x in ('video_height', 'video_width', 'video_duration', 'decoded_frames'): if x not in pm: err('Proxied page has no %s: %s' % (x, page.url)) if x not in dm: err('Direct page has no %s: %s' % (x, page.url)) if pm[x] != dm[x]: err('Mismatch for %s (proxied=%s direct=%s): %s' % (x, str(pm[x]), str(dm[x]), page.url)) # Proxied XOCL should match direct CL. pxocl = pm['x_original_content_length_header'] dcl = dm['content_length_header'] if pxocl != dcl: err('Mismatch for content length (proxied=%s direct=%s): %s' % (str(pxocl), str(dcl), page.url))
bsd-3-clause
faywong/FFPlayer
project/jni/python/src/Lib/idlelib/GrepDialog.py
67
4023
import os import fnmatch import sys from Tkinter import * import SearchEngine from SearchDialogBase import SearchDialogBase def grep(text, io=None, flist=None): root = text._root() engine = SearchEngine.get(root) if not hasattr(engine, "_grepdialog"): engine._grepdialog = GrepDialog(root, engine, flist) dialog = engine._grepdialog searchphrase = text.get("sel.first", "sel.last") dialog.open(text, searchphrase, io) class GrepDialog(SearchDialogBase): title = "Find in Files Dialog" icon = "Grep" needwrapbutton = 0 def __init__(self, root, engine, flist): SearchDialogBase.__init__(self, root, engine) self.flist = flist self.globvar = StringVar(root) self.recvar = BooleanVar(root) def open(self, text, searchphrase, io=None): SearchDialogBase.open(self, text, searchphrase) if io: path = io.filename or "" else: path = "" dir, base = os.path.split(path) head, tail = os.path.splitext(base) if not tail: tail = ".py" self.globvar.set(os.path.join(dir, "*" + tail)) def create_entries(self): SearchDialogBase.create_entries(self) self.globent = self.make_entry("In files:", self.globvar) def create_other_buttons(self): f = self.make_frame() btn = Checkbutton(f, anchor="w", variable=self.recvar, text="Recurse down subdirectories") btn.pack(side="top", fill="both") btn.select() def create_command_buttons(self): SearchDialogBase.create_command_buttons(self) self.make_button("Search Files", self.default_command, 1) def default_command(self, event=None): prog = self.engine.getprog() if not prog: return path = self.globvar.get() if not path: self.top.bell() return from OutputWindow import OutputWindow save = sys.stdout try: sys.stdout = OutputWindow(self.flist) self.grep_it(prog, path) finally: sys.stdout = save def grep_it(self, prog, path): dir, base = os.path.split(path) list = self.findfiles(dir, base, self.recvar.get()) list.sort() self.close() pat = self.engine.getpat() print "Searching %r in %s ..." % (pat, path) hits = 0 for fn in list: try: f = open(fn) except IOError, msg: print msg continue lineno = 0 while 1: block = f.readlines(100000) if not block: break for line in block: lineno = lineno + 1 if line[-1:] == '\n': line = line[:-1] if prog.search(line): sys.stdout.write("%s: %s: %s\n" % (fn, lineno, line)) hits = hits + 1 if hits: if hits == 1: s = "" else: s = "s" print "Found", hits, "hit%s." % s print "(Hint: right-click to open locations.)" else: print "No hits." def findfiles(self, dir, base, rec): try: names = os.listdir(dir or os.curdir) except os.error, msg: print msg return [] list = [] subdirs = [] for name in names: fn = os.path.join(dir, name) if os.path.isdir(fn): subdirs.append(fn) else: if fnmatch.fnmatch(name, base): list.append(fn) if rec: for subdir in subdirs: list.extend(self.findfiles(subdir, base, rec)) return list def close(self, event=None): if self.top: self.top.grab_release() self.top.withdraw()
lgpl-2.1
imcom/pyleus
examples/apparent_temperature/apparent_temperature/temperature_generator.py
9
1148
from __future__ import absolute_import import logging from collections import namedtuple import random from apparent_temperature.measure_generator import MeasureGeneratorSpout log = logging.getLogger('temperature_generator') TemperatureMeasure = namedtuple( "TemperatureMeasure", "id_sensor timestamp temperature") class TemperatureSpout(MeasureGeneratorSpout): OUTPUT_FIELDS = TemperatureMeasure SENSORS = { 1042: (48, 12), 1077: (75, 10), 1078: (84, 20), 1079: (67, 8), 1082: (72, 4), 1126: (38, 10), 1156: (81, 5), 1178: (37, 11), 1201: (43, 14), 1234: (29, 16), 1312: (31, 6), 1448: (88, 8), 2089: (86, 6), } def measure(self, *args): return random.normalvariate(*args) def log(self, measure): log.debug("id: {0}, time: {1}, temperature: {2} F" .format(*measure)) if __name__ == '__main__': logging.basicConfig( level=logging.DEBUG, filename='/tmp/apparent_temperature_temperature.log', filemode='a', ) TemperatureSpout().run()
apache-2.0
NewpTone/stacklab-nova
debian/python-nova/usr/share/pyshared/nova/openstack/common/policy.py
6
9316
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2011 OpenStack, LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Common Policy Engine Implementation""" import logging import urllib import urllib2 from nova.openstack.common.gettextutils import _ from nova.openstack.common import jsonutils LOG = logging.getLogger(__name__) _BRAIN = None def set_brain(brain): """Set the brain used by enforce(). Defaults use Brain() if not set. """ global _BRAIN _BRAIN = brain def reset(): """Clear the brain used by enforce().""" global _BRAIN _BRAIN = None def enforce(match_list, target_dict, credentials_dict, exc=None, *args, **kwargs): """Enforces authorization of some rules against credentials. :param match_list: nested tuples of data to match against The basic brain supports three types of match lists: 1) rules looks like: ``('rule:compute:get_instance',)`` Retrieves the named rule from the rules dict and recursively checks against the contents of the rule. 2) roles looks like: ``('role:compute:admin',)`` Matches if the specified role is in credentials_dict['roles']. 3) generic looks like: ``('tenant_id:%(tenant_id)s',)`` Substitutes values from the target dict into the match using the % operator and matches them against the creds dict. Combining rules: The brain returns True if any of the outer tuple of rules match and also True if all of the inner tuples match. You can use this to perform simple boolean logic. For example, the following rule would return True if the creds contain the role 'admin' OR the if the tenant_id matches the target dict AND the the creds contains the role 'compute_sysadmin': :: { "rule:combined": ( 'role:admin', ('tenant_id:%(tenant_id)s', 'role:compute_sysadmin') ) } Note that rule and role are reserved words in the credentials match, so you can't match against properties with those names. Custom brains may also add new reserved words. For example, the HttpBrain adds http as a reserved word. :param target_dict: dict of object properties Target dicts contain as much information as we can about the object being operated on. :param credentials_dict: dict of actor properties Credentials dicts contain as much information as we can about the user performing the action. :param exc: exception to raise Class of the exception to raise if the check fails. Any remaining arguments passed to enforce() (both positional and keyword arguments) will be passed to the exception class. If exc is not provided, returns False. :return: True if the policy allows the action :return: False if the policy does not allow the action and exc is not set """ global _BRAIN if not _BRAIN: _BRAIN = Brain() if not _BRAIN.check(match_list, target_dict, credentials_dict): if exc: raise exc(*args, **kwargs) return False return True class Brain(object): """Implements policy checking.""" _checks = {} @classmethod def _register(cls, name, func): cls._checks[name] = func @classmethod def load_json(cls, data, default_rule=None): """Init a brain using json instead of a rules dictionary.""" rules_dict = jsonutils.loads(data) return cls(rules=rules_dict, default_rule=default_rule) def __init__(self, rules=None, default_rule=None): if self.__class__ != Brain: LOG.warning(_("Inheritance-based rules are deprecated; use " "the default brain instead of %s.") % self.__class__.__name__) self.rules = rules or {} self.default_rule = default_rule def add_rule(self, key, match): self.rules[key] = match def _check(self, match, target_dict, cred_dict): try: match_kind, match_value = match.split(':', 1) except Exception: LOG.exception(_("Failed to understand rule %(match)r") % locals()) # If the rule is invalid, fail closed return False func = None try: old_func = getattr(self, '_check_%s' % match_kind) except AttributeError: func = self._checks.get(match_kind, self._checks.get(None, None)) else: LOG.warning(_("Inheritance-based rules are deprecated; update " "_check_%s") % match_kind) func = (lambda brain, kind, value, target, cred: old_func(value, target, cred)) if not func: LOG.error(_("No handler for matches of kind %s") % match_kind) # Fail closed return False return func(self, match_kind, match_value, target_dict, cred_dict) def check(self, match_list, target_dict, cred_dict): """Checks authorization of some rules against credentials. Detailed description of the check with examples in policy.enforce(). :param match_list: nested tuples of data to match against :param target_dict: dict of object properties :param credentials_dict: dict of actor properties :returns: True if the check passes """ if not match_list: return True for and_list in match_list: if isinstance(and_list, basestring): and_list = (and_list,) if all([self._check(item, target_dict, cred_dict) for item in and_list]): return True return False class HttpBrain(Brain): """A brain that can check external urls for policy. Posts json blobs for target and credentials. Note that this brain is deprecated; the http check is registered by default. """ pass def register(name, func=None): """ Register a function as a policy check. :param name: Gives the name of the check type, e.g., 'rule', 'role', etc. If name is None, a default function will be registered. :param func: If given, provides the function to register. If not given, returns a function taking one argument to specify the function to register, allowing use as a decorator. """ # Perform the actual decoration by registering the function. # Returns the function for compliance with the decorator # interface. def decorator(func): # Register the function Brain._register(name, func) return func # If the function is given, do the registration if func: return decorator(func) return decorator @register("rule") def _check_rule(brain, match_kind, match, target_dict, cred_dict): """Recursively checks credentials based on the brains rules.""" try: new_match_list = brain.rules[match] except KeyError: if brain.default_rule and match != brain.default_rule: new_match_list = ('rule:%s' % brain.default_rule,) else: return False return brain.check(new_match_list, target_dict, cred_dict) @register("role") def _check_role(brain, match_kind, match, target_dict, cred_dict): """Check that there is a matching role in the cred dict.""" return match.lower() in [x.lower() for x in cred_dict['roles']] @register('http') def _check_http(brain, match_kind, match, target_dict, cred_dict): """Check http: rules by calling to a remote server. This example implementation simply verifies that the response is exactly 'True'. A custom brain using response codes could easily be implemented. """ url = 'http:' + (match % target_dict) data = {'target': jsonutils.dumps(target_dict), 'credentials': jsonutils.dumps(cred_dict)} post_data = urllib.urlencode(data) f = urllib2.urlopen(url, post_data) return f.read() == "True" @register(None) def _check_generic(brain, match_kind, match, target_dict, cred_dict): """Check an individual match. Matches look like: tenant:%(tenant_id)s role:compute:admin """ # TODO(termie): do dict inspection via dot syntax match = match % target_dict if match_kind in cred_dict: return match == unicode(cred_dict[match_kind]) return False
apache-2.0
patcon/open-cabinet
venv/lib/python2.7/site-packages/setuptools/site-patch.py
720
2389
def __boot(): import sys import os PYTHONPATH = os.environ.get('PYTHONPATH') if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): PYTHONPATH = [] else: PYTHONPATH = PYTHONPATH.split(os.pathsep) pic = getattr(sys,'path_importer_cache',{}) stdpath = sys.path[len(PYTHONPATH):] mydir = os.path.dirname(__file__) #print "searching",stdpath,sys.path for item in stdpath: if item==mydir or not item: continue # skip if current dir. on Windows, or my own directory importer = pic.get(item) if importer is not None: loader = importer.find_module('site') if loader is not None: # This should actually reload the current module loader.load_module('site') break else: try: import imp # Avoid import loop in Python >= 3.3 stream, path, descr = imp.find_module('site',[item]) except ImportError: continue if stream is None: continue try: # This should actually reload the current module imp.load_module('site',stream,path,descr) finally: stream.close() break else: raise ImportError("Couldn't find the real 'site' module") #print "loaded", __file__ known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp oldpos = getattr(sys,'__egginsert',0) # save old insertion position sys.__egginsert = 0 # and reset the current one for item in PYTHONPATH: addsitedir(item) sys.__egginsert += oldpos # restore effective old position d, nd = makepath(stdpath[0]) insert_at = None new_path = [] for item in sys.path: p, np = makepath(item) if np==nd and insert_at is None: # We've hit the first 'system' path entry, so added entries go here insert_at = len(new_path) if np in known_paths or insert_at is None: new_path.append(item) else: # new path after the insert point, back-insert it new_path.insert(insert_at, item) insert_at += 1 sys.path[:] = new_path if __name__=='site': __boot() del __boot
mit
log2timeline/l2tscaffolder
tests/test_helper/path_helper.py
1
1073
# -*- coding: utf-8 -*- """helper for the test""" import os __file__ = os.path.abspath(__file__) def TemplatePath() -> str: """ generating the template path for the tests Returns: str: the template path """ return os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'l2tscaffolder', 'bll', 'templates') def TestTemplatePath() -> str: """ generating the template path for the tests Returns: str: the template path """ return os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tests', 'test_template') def TestDatabasePath() -> str: """ generating the template path for the tests Returns: str: the template path """ return os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tests', 'test_database') def YapfStyleFilePath() -> str: """ gets the path to the yapf style file. Returns: str: the yapf file path """ return os.path.join( os.path.dirname(os.path.dirname(__file__)), '.style.yapf')
apache-2.0
quaddra/engage
python_pkg/engage/engine/install_context.py
1
2561
"""Global context for install engine. Currently, this is just a bunch of global variables initialized via install_engine.py. Eventually, should refactor into a context object which is passed to each resource manager. """ import os.path import sys import getpass import engage.utils.pw_repository as pwr password_repository = None config_dir = None cipher_file = None salt_file = None engage_file_layout = None # we save the package library in a global so that resource managers can load # additional packages (e.g. patches) during the install package_library = None def setup_context(engage_file_layout_, subprocess_mode, package_library_, pw_database): """Setup some global state. Load the password repository and set the password_repository module variable. This requires obtaining a password from the user. If running in subprocess mode, we read the password from stdin. Otherwise, we use getpass to prompt the user for the password from the tty. In the event that we already have a password database, we just use that. """ global password_repository, config_dir, cipher_file, salt_file, \ package_library, engage_file_layout config_dir = engage_file_layout_.get_password_file_directory() package_library = package_library_ engage_file_layout = engage_file_layout_ if pw_database: #If we already have an in-memory password database, # us that and don't try to read from the file. password_repository = pw_database else: cipher_file = os.path.join(config_dir, pwr.REPOSITORY_FILE_NAME) salt_file = os.path.join(config_dir, pwr.SALT_FILE_NAME) if subprocess_mode: user_key = sys.stdin.read().rstrip() else: user_key = getpass.getpass() password_repository = pwr.PasswordRepository.load_from_file(cipher_file, salt_file, user_key) def get_sudo_password(username=None): """Return the sudo password entry. If no entry is present, returns None. If the username is not specified, we use getpass.getuser(), the same as used by system_info.py. """ global password_repository if not password_repository: return None if not username: username = getpass.getuser() key = "GenForma/%s/sudo_password" % username if not password_repository.has_key(key): return None return password_repository.get_value(key)
apache-2.0
jborean93/ansible
lib/ansible/vars/hostvars.py
51
5183
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.module_utils.common._collections_compat import Mapping from ansible.template import Templar, AnsibleUndefined STATIC_VARS = [ 'ansible_version', 'ansible_play_hosts', 'ansible_dependent_role_names', 'ansible_play_role_names', 'ansible_role_names', 'inventory_hostname', 'inventory_hostname_short', 'inventory_file', 'inventory_dir', 'groups', 'group_names', 'omit', 'playbook_dir', 'play_hosts', 'role_names', 'ungrouped', ] __all__ = ['HostVars', 'HostVarsVars'] # Note -- this is a Mapping, not a MutableMapping class HostVars(Mapping): ''' A special view of vars_cache that adds values from the inventory when needed. ''' def __init__(self, inventory, variable_manager, loader): self._inventory = inventory self._loader = loader self._variable_manager = variable_manager variable_manager._hostvars = self def set_variable_manager(self, variable_manager): self._variable_manager = variable_manager variable_manager._hostvars = self def set_inventory(self, inventory): self._inventory = inventory def _find_host(self, host_name): # does not use inventory.hosts so it can create localhost on demand return self._inventory.get_host(host_name) def raw_get(self, host_name): ''' Similar to __getitem__, however the returned data is not run through the templating engine to expand variables in the hostvars. ''' host = self._find_host(host_name) if host is None: return AnsibleUndefined(name="hostvars['%s']" % host_name) return self._variable_manager.get_vars(host=host, include_hostvars=False) def __setstate__(self, state): self.__dict__.update(state) # Methods __getstate__ and __setstate__ of VariableManager do not # preserve _loader and _hostvars attributes to improve pickle # performance and memory utilization. Since HostVars holds values # of those attributes already, assign them if needed. if self._variable_manager._loader is None: self._variable_manager._loader = self._loader if self._variable_manager._hostvars is None: self._variable_manager._hostvars = self def __getitem__(self, host_name): data = self.raw_get(host_name) if isinstance(data, AnsibleUndefined): return data return HostVarsVars(data, loader=self._loader) def set_host_variable(self, host, varname, value): self._variable_manager.set_host_variable(host, varname, value) def set_nonpersistent_facts(self, host, facts): self._variable_manager.set_nonpersistent_facts(host, facts) def set_host_facts(self, host, facts): self._variable_manager.set_host_facts(host, facts) def __contains__(self, host_name): # does not use inventory.hosts so it can create localhost on demand return self._find_host(host_name) is not None def __iter__(self): for host in self._inventory.hosts: yield host def __len__(self): return len(self._inventory.hosts) def __repr__(self): out = {} for host in self._inventory.hosts: out[host] = self.get(host) return repr(out) def __deepcopy__(self, memo): # We do not need to deepcopy because HostVars is immutable, # however we have to implement the method so we can deepcopy # variables' dicts that contain HostVars. return self class HostVarsVars(Mapping): def __init__(self, variables, loader): self._vars = variables self._loader = loader def __getitem__(self, var): templar = Templar(variables=self._vars, loader=self._loader) foo = templar.template(self._vars[var], fail_on_undefined=False, static_vars=STATIC_VARS) return foo def __contains__(self, var): return (var in self._vars) def __iter__(self): for var in self._vars.keys(): yield var def __len__(self): return len(self._vars.keys()) def __repr__(self): templar = Templar(variables=self._vars, loader=self._loader) return repr(templar.template(self._vars, fail_on_undefined=False, static_vars=STATIC_VARS))
gpl-3.0
ekostat/ekostat_calculator
core/jupyter_eventhandlers.py
1
1699
# -*- coding: utf-8 -*- """ Created on Thu Oct 5 17:03:55 2017 @author: a002087 """ import difflib import random import requests import ipywidgets as widgets import core def MultiCheckboxWidget(descriptions): """ Widget with a search field and lots of checkboxes """ search_widget = widgets.Text() options_dict = {description: widgets.Checkbox(description=description, value=False) for description in descriptions} options = [options_dict[description] for description in descriptions] options_widget = widgets.VBox(options, layout = widgets.Layout(overflow = 'scroll')) multi_select = widgets.VBox([search_widget, options_widget]) # Wire the search field to the checkboxes def on_text_change(change): search_input = change['new'] if search_input == '': # Reset search field new_options = [options_dict[description] for description in descriptions] else: # Filter by search field using difflib. close_matches = difflib.get_close_matches(search_input, descriptions, cutoff = 0.0) new_options = [options_dict[description] for description in close_matches] options_widget.children = new_options search_widget.observe(on_text_change, names='value') return multi_select ## Example of using the widget # ## Get lots of words for our options #words_url = 'https://svnweb.freebsd.org/csrg/share/dict/words?view=co&content-type=text/plain' #response = requests.get(words_url) #response.raise_for_status() #words = response.text #words = set([word.lower() for word in words.splitlines()]) #descriptions = random.sample(words, 100) # #multi_checkbox_widget(descriptions)
mit
ruibarreira/linuxtrail
usr/lib/python3.4/lib2to3/pgen2/parse.py
569
8053
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Parser engine for the grammar tables generated by pgen. The grammar table must be loaded first. See Parser/parser.c in the Python distribution for additional info on how this parsing engine works. """ # Local imports from . import token class ParseError(Exception): """Exception to signal the parser is stuck.""" def __init__(self, msg, type, value, context): Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % (msg, type, value, context)) self.msg = msg self.type = type self.value = value self.context = context class Parser(object): """Parser engine. The proper usage sequence is: p = Parser(grammar, [converter]) # create instance p.setup([start]) # prepare for parsing <for each input token>: if p.addtoken(...): # parse a token; may raise ParseError break root = p.rootnode # root of abstract syntax tree A Parser instance may be reused by calling setup() repeatedly. A Parser instance contains state pertaining to the current token sequence, and should not be used concurrently by different threads to parse separate token sequences. See driver.py for how to get input tokens by tokenizing a file or string. Parsing is complete when addtoken() returns True; the root of the abstract syntax tree can then be retrieved from the rootnode instance variable. When a syntax error occurs, addtoken() raises the ParseError exception. There is no error recovery; the parser cannot be used after a syntax error was reported (but it can be reinitialized by calling setup()). """ def __init__(self, grammar, convert=None): """Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function. """ self.grammar = grammar self.convert = convert or (lambda grammar, node: node) def setup(self, start=None): """Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. """ if start is None: start = self.grammar.start # Each stack entry is a tuple: (dfa, state, node). # A node is a tuple: (type, value, context, children), # where children is a list of nodes or None, and context may be None. newnode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack = [stackentry] self.rootnode = None self.used_names = set() # Aliased to self.rootnode.used_names in pop() def addtoken(self, type, value, context): """Add a token; return True iff this is the end of the program.""" # Map from token to label ilabel = self.classify(type, value, context) # Loop until the token is shifted; may raise exceptions while True: dfa, state, node = self.stack[-1] states, first = dfa arcs = states[state] # Look for a state with this label for i, newstate in arcs: t, v = self.grammar.labels[i] if ilabel == i: # Look it up in the list of labels assert t < 256 # Shift a token; we're done with it self.shift(type, value, newstate, context) # Pop while we are in an accept-only state state = newstate while states[state] == [(0, state)]: self.pop() if not self.stack: # Done parsing! return True dfa, state, node = self.stack[-1] states, first = dfa # Done with this token return False elif t >= 256: # See if it's a symbol and if we're in its first set itsdfa = self.grammar.dfas[t] itsstates, itsfirst = itsdfa if ilabel in itsfirst: # Push a symbol self.push(t, self.grammar.dfas[t], newstate, context) break # To continue the outer while loop else: if (0, state) in arcs: # An accepting state, pop it and try something else self.pop() if not self.stack: # Done parsing, but another token is input raise ParseError("too much input", type, value, context) else: # No success finding a transition raise ParseError("bad input", type, value, context) def classify(self, type, value, context): """Turn a token into a label. (Internal)""" if type == token.NAME: # Keep a listing of all used names self.used_names.add(value) # Check for reserved words ilabel = self.grammar.keywords.get(value) if ilabel is not None: return ilabel ilabel = self.grammar.tokens.get(type) if ilabel is None: raise ParseError("bad token", type, value, context) return ilabel def shift(self, type, value, newstate, context): """Shift a token. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, value, context, None) newnode = self.convert(self.grammar, newnode) if newnode is not None: node[-1].append(newnode) self.stack[-1] = (dfa, newstate, node) def push(self, type, newdfa, newstate, context): """Push a nonterminal. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, None, context, []) self.stack[-1] = (dfa, newstate, node) self.stack.append((newdfa, 0, newnode)) def pop(self): """Pop a nonterminal. (Internal)""" popdfa, popstate, popnode = self.stack.pop() newnode = self.convert(self.grammar, popnode) if newnode is not None: if self.stack: dfa, state, node = self.stack[-1] node[-1].append(newnode) else: self.rootnode = newnode self.rootnode.used_names = self.used_names
gpl-3.0
rm-hull/pcd8544
tests/test_hd44780.py
2
6780
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Richard Hull and contributors # See LICENSE.rst for details. """ Tests for the :py:class:`luma.lcd.device.hd44780` device. """ from luma.lcd.device import hd44780 from luma.core.render import canvas from luma.core.util import bytes_to_nibbles from luma.core.framebuffer import full_frame, diff_to_previous from luma.lcd.const import hd44780 as CONST from PIL import Image, ImageDraw from unittest.mock import Mock, call interface = Mock(unsafe=True, _bitmode=4) gpio = Mock() def test_init_4bitmode(): """ Test initialization of display using 4 bit mode """ hd44780(interface, gpio=gpio, framebuffer=full_frame()) to_8 = \ [call(0x3), call(0x3), call(0x3, 0x3)] * 3 to_4 = \ [call(0x3), call(0x3), call(0x3, 0x02)] fs = [CONST.FUNCTIONSET | CONST.DL4 | CONST.LINES2] calls = \ to_8 + \ to_4 + \ [call(*bytes_to_nibbles(fs))] + \ [call(*bytes_to_nibbles([CONST.DISPLAYOFF]))] + \ [call(*bytes_to_nibbles([CONST.ENTRY]))] + \ [call(*bytes_to_nibbles([CONST.DISPLAYON]))] + \ [call(*bytes_to_nibbles([CONST.DDRAMADDR]))] + \ [call(*bytes_to_nibbles([CONST.DDRAMADDR | CONST.LINES[1]]))] + \ [call(*bytes_to_nibbles([CONST.CLEAR]))] interface.command.assert_has_calls(calls) # Data to clear the screen calls = \ [call(bytes_to_nibbles([0x20] * 16))] + \ [call(bytes_to_nibbles([0x20] * 16))] interface.data.assert_has_calls(calls) def test_init_8bitmode(): """ Test initialization of display using 4 bit mode """ interface._bitmode = 8 hd44780(interface, gpio=gpio, framebuffer=full_frame()) to_8 = \ [call(0x30)] * 3 fs = [CONST.FUNCTIONSET | CONST.DL8 | CONST.LINES2] calls = \ to_8 + \ [call(*fs)] + \ [call(*[CONST.DISPLAYOFF])] + \ [call(*[CONST.ENTRY])] + \ [call(*[CONST.DISPLAYON])] + \ [call(*[CONST.DDRAMADDR])] + \ [call(*[CONST.DDRAMADDR | CONST.LINES[1]])] + \ [call(*[CONST.CLEAR])] interface.command.assert_has_calls(calls) # Data to clear the screen calls = \ [call([0x20] * 16)] + \ [call([0x20] * 16)] interface.data.assert_has_calls(calls) def test_display(): """ Test the display with a line of text and a rectangle to demonstrate correct functioning of the auto-create feature """ device = hd44780(interface, bitmode=8, gpio=gpio, framebuffer=full_frame()) interface.reset_mock() # Use canvas to create a screen worth of data with canvas(device) as drw: # Include unprintable character to show it gets ignored size = device.font.getsize('This is a test\uFFFF') drw.text(((80 - size[0]) // 2, 0), 'This is a test\uFFFF', font=device.font, fill='white') drw.rectangle((10, 10, 69, 14), fill='black', outline='white') drw.rectangle((10, 10, 49, 14), fill='white', outline='white') # Send DDRAMADDR and ascii for the line of text line1 = [call.command(0x80)] + \ [call.data([0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20])] # Create custom characters for the scrollbar custom = [call.command(0x40), call.data([0x00, 0x00, 0x1f, 0x1f, 0x1f, 0x1f, 0x1f, 0x00])] + \ [call.command(0x48), call.data([0x00, 0x00, 0x1f, 0x00, 0x00, 0x00, 0x1f, 0x00])] + \ [call.command(0x50), call.data([0x00, 0x00, 0x1f, 0x01, 0x01, 0x01, 0x1f, 0x00])] # Print the resulting custom characters to form the image of the scrollbar line2 = [call.command(0xc0), call.data([0x20, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02, 0x20, 0x20])] interface.assert_has_calls(line1 + custom + line2) def test_custom_full(): """ Auto-create feature runs out of custom character space """ device = hd44780(interface, bitmode=8, gpio=gpio, framebuffer=diff_to_previous(num_segments=1)) # Consume 8 special character positions img = Image.new('1', (80, 16), 0) drw = ImageDraw.Draw(img) for i in range(8): drw.rectangle((i * 5, 0, (i + 1) * 5, i), fill='white', outline='white') device.display(img) interface.reset_mock() # Consume one more (on the last char position on screen) drw.line((75, 8, 79, 15), fill='white') device.display(img) interface.assert_has_calls([ call.command(0x40), call.data([0x10, 0x08, 0x08, 0x04, 0x04, 0x02, 0x02, 0x01]), call.command(0xcf), call.data([0x0])]) def test_get_font(): """ Test get font capability by requesting two fonts and printing a single character from each that will be different between the two fonts """ device = hd44780(interface, bitmode=8, gpio=gpio, framebuffer=full_frame()) img = Image.new('1', (10, 8), 0) a00 = device.get_font(0) a02 = device.get_font(1) drw = ImageDraw.Draw(img) assert a00.getsize('\u00E0') == (5, 8) drw.text((0, 0), '\u00E0', font=a00, fill='white') drw.text((5, 0), '\u00E0', font=a02, fill='white') assert img.tobytes() == \ b'\x02\x00\x01\x00H\x00\xab\x80\x90@\x93\xc0l@\x03\xc0' def test_no_contrast(): """ HD44780 should ignore requests to change contrast """ device = hd44780(interface, bitmode=8, gpio=gpio, framebuffer=full_frame()) device.contrast(100) def test_i2c_backlight(): """ Test of i2c_backlight """ def _mask(pin): """ Return a mask that contains a 1 in the pin position """ return 1 << pin interface = Mock(unsafe=True, _bitmode=4, _backlight_enabled=0, _mask=_mask) hd44780(interface, bitmode=8, backpack_pin=3, gpio=gpio, framebuffer=full_frame()) assert interface._backlight_enabled == 8 def test_i2c_does_not_support_backlight(): """ An exception is thrown if supplied interface does not support a backlight """ import luma.core interface = Mock(spec_set=luma.core.interface.serial.i2c) flag = False try: hd44780(interface, gpio=gpio, backpack_pin=3, framebuffer=full_frame()) except luma.core.error.UnsupportedPlatform as ex: assert str(ex) == "This I2C interface does not support a backlight" flag = True assert flag, "Expected exception but none occured" def test_unsupported_display_mode(): """ An exception is thrown if an unsupported display mode is requested """ import luma.core try: hd44780(interface, width=12, height=3, gpio=gpio, framebuffer=full_frame()) except luma.core.error.DeviceDisplayModeError as ex: assert str(ex) == "Unsupported display mode: 12 x 3"
mit
mmmavis/bedrock
bedrock/legal/views.py
4
2653
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from lib import l10n_utils import jingo from django.core.mail import EmailMessage from django.shortcuts import redirect from django.views.decorators.csrf import csrf_protect from funfactory.urlresolvers import reverse from forms import FraudReportForm FRAUD_REPORT_EMAIL_FROM = 'Mozilla.com <noreply@mozilla.com>' FRAUD_REPORT_EMAIL_SUBJECT = 'New violating website report' FRAUD_REPORT_EMAIL_TO = ['trademarks@mozilla.com', 'mozilla@mofo.com'] def submit_form(request, form): form_submitted = True if form.is_valid(): form_error = False subject = FRAUD_REPORT_EMAIL_SUBJECT sender = FRAUD_REPORT_EMAIL_FROM to = FRAUD_REPORT_EMAIL_TO msg = jingo.render_to_string(request, 'legal/emails/fraud-report.txt', form.cleaned_data) email = EmailMessage(subject, msg, sender, to) attachment = form.cleaned_data['input_attachment'] if (attachment): email.attach(attachment.name, attachment.read(), attachment.content_type) email.send() else: form_error = True return {'form_submitted': form_submitted, 'form_error': form_error} @csrf_protect def fraud_report(request): form = FraudReportForm(auto_id='%s') form_submitted = False form_error = False if request.method == 'POST': form = FraudReportForm(request.POST, request.FILES) form_results = submit_form(request, form) form_submitted = form_results['form_submitted'] form_error = form_results['form_error'] template_vars = { 'form': form, 'form_submitted': form_submitted, 'form_error': form_error, } if request.POST and not form_error: # Seeing the form was submitted without error, redirect, do not simply # send a response to avoid problem described below. # @see https://bugzilla.mozilla.org/show_bug.cgi?id=873476 (3.2) response = redirect(reverse('legal.fraud-report'), template_vars) response['Location'] += '?submitted=%s' % form_submitted return response else: # If the below is called after a redirect the template_vars will be lost, therefore # we need to update the form_submitted state from the submitted url parameter. submitted = request.GET.get('submitted') == 'True' template_vars['form_submitted'] = submitted return l10n_utils.render(request, 'legal/fraud-report.html', template_vars)
mpl-2.0
mattclark/osf.io
addons/mendeley/tests/test_views.py
23
1210
# -*- coding: utf-8 -*- import mock import pytest import urlparse from addons.base.tests import views from addons.base.tests.utils import MockFolder from addons.mendeley.models import Mendeley from addons.mendeley.tests.utils import MendeleyTestCase, mock_responses from tests.base import OsfTestCase from addons.mendeley.provider import MendeleyCitationsProvider from addons.mendeley.serializer import MendeleySerializer API_URL = 'https://api.mendeley.com' pytestmark = pytest.mark.django_db class TestAuthViews(MendeleyTestCase, views.OAuthAddonAuthViewsTestCaseMixin, OsfTestCase): pass class TestConfigViews(MendeleyTestCase, views.OAuthCitationAddonConfigViewsTestCaseMixin, OsfTestCase): folder = MockFolder() Serializer = MendeleySerializer client = Mendeley citationsProvider = MendeleyCitationsProvider foldersApiUrl = urlparse.urljoin(API_URL, 'folders') documentsApiUrl = urlparse.urljoin(API_URL, 'documents') mockResponses = mock_responses @mock.patch('addons.mendeley.models.NodeSettings._fetch_folder_name', mock.PropertyMock(return_value='Fake Name')) def test_deauthorize_node(self): super(TestConfigViews, self).test_deauthorize_node()
apache-2.0
vightel/FloodMapsWorkshop
python/download_landsat8.py
3
4636
#! /usr/bin/env python import os,sys,math,urllib2,urllib import optparse ########################################################################### class OptionParser (optparse.OptionParser): def check_required (self, opt): option = self.get_option(opt) # Assumes the option's 'default' is set to None! if getattr(self.values, option.dest) is None: self.error("%s option not supplied" % option) #############################"Connection Earth explorer sans proxy def connect_earthexplorer_no_proxy(usgs): opener = urllib2.build_opener(urllib2.HTTPCookieProcessor()) urllib2.install_opener(opener) params = urllib.urlencode(dict(username=usgs['account'],password= usgs['passwd'])) f = opener.open("https://earthexplorer.usgs.gov/login/", params) data = f.read() f.close() if data.find('You must sign in as a registered user to download data or place orders for USGS EROS products')>0 : print "Authentification failed" sys.exit(-1) return #############################"pour des gros fichiers def downloadChunks(url,rep,nom_fic): """Telecharge de gros fichiers par morceaux inspire de http://josh.gourneau.com """ try: req = urllib2.urlopen(url) #taille du fichier if (req.info().gettype()=='text/html'): print "erreur : le fichier est au format html" lignes=req.read() if lignes.find('Download Not Found')>0 : raise TypeError else: print lignes print sys.exit(-1) total_size = int(req.info().getheader('Content-Length').strip()) if (total_size<50000): print "erreur : le fichier est trop petit pour etre une image landsat" print url sys.exit(-1) print nom_fic,total_size downloaded = 0 CHUNK = 1024 * 1024 *8 with open(rep+'/'+nom_fic, 'wb') as fp: while True: chunk = req.read(CHUNK) downloaded += len(chunk) sys.stdout.write(str(math.floor((float(downloaded) / total_size) * 100 )) +'%') sys.stdout.flush() if not chunk: break fp.write(chunk) print 'fini' except urllib2.HTTPError, e: print "HTTP Error:",e.code , url return False except urllib2.URLError, e: print "URL Error:",e.reason , url return False return rep,nom_fic ###################################################################################### ###############""main ################################################################################# ################Lecture des arguments if len(sys.argv) == 1: prog = os.path.basename(sys.argv[0]) print ' '+sys.argv[0]+' [options]' print " Aide : ", prog, " --help" print " ou : ", prog, " -h" print "example (scene): python %s -s scene -u usgs.txt"%sys.argv[0] sys.exit(-1) else: usage = "usage: %prog [options] " parser = OptionParser(usage=usage) parser.add_option("-s", "--scene", dest="scene", action="store", type="string", help="coordonnees WRS2 de la scene (ex 198030)", default=None) (options, args) = parser.parse_args() parser.check_required("-s") # Load USGS Account/Password from Env usgs_account = os.environ['USGS_ACCOUNT'] usgs_password = os.environ['USGS_PASSWORD'] assert(usgs_account), "USGS_ACCOUNT undefined" assert(usgs_password), "USGS_PASSWORD undefined" usgs={'account':usgs_account,'passwd':usgs_password} rep='../data/l8' if not os.path.exists(rep): os.mkdir(rep) ############Telechargement des produits par scene repert = 4923 url="http://earthexplorer.usgs.gov/download/%s/%s/STANDARD/EE"%(repert,options.scene) tgz = rep+'/'+options.scene+'.tar.gz' if not(os.path.exists(tgz)): try: connect_earthexplorer_no_proxy(usgs) downloadChunks(url,rep,options.scene+'.tar.gz') except TypeError: print ' produit %s non trouve'%options.scene else : print ' produit %s deja telecharge'%options.scene # # Let's start the processing # # Generate RGB Composite cmd = "./landsat8_composite_toa.py --scene "+options.scene + " --red 4 --green 3 --blue 2" print cmd err = os.system(cmd) if err: print "Error generating l8 composite" sys.exit(-1) # Generate surface water cmd = "./landsat8_toa_watermap.py --scene "+options.scene print cmd err = os.system(cmd) if err: print "Error generating l8 surface water map" sys.exit(-1) # Generate browse image cmd = "./landsat8_to_topojson.py --scene "+options.scene print cmd err = os.system(cmd) if err: print "Error generating l8 browseimage" sys.exit(-1) # Generate browse image cmd = "./landsat8_browseimage.py --scene "+options.scene print cmd err = os.system(cmd) if err: print "Error generating l8 browseimage" sys.exit(-1)
apache-2.0
polynomial/dd-agent
tests/checks/mock/test_windows_service.py
29
4073
# stdlib from mock import Mock # project from checks import AgentCheck from tests.checks.common import AgentCheckTest WinHttpAutoProxySvc_attr = { # Running Windows Service 'AcceptPause': False, 'AcceptStop': True, 'Caption': "WinHTTP Web Proxy Auto-Discovery Service", 'CheckPoint': 0, 'CreationClassName': "Win32_Service", 'Description': "WinHTTP implements the client HTTP stack and provides developers" " with a Win32 API and COM Automation component for sending HTTP requests" " and receiving responses. In addition, WinHTTP provides support " " for auto-discovering a proxy configuration via its implementation" " of the Web Proxy Auto-Discovery (WPAD) protocol.", 'DesktopInteract': False, 'DisplayName': "WinHTTP Web Proxy Auto-Discovery Service", 'ErrorControl': "Normal", 'ExitCode': 0, 'Name': "WinHttpAutoProxySvc", 'PathName': "C:\\Windows\\system32\\svchost.exe -k LocalService", 'ProcessId': 864, 'ServiceSpecificExitCode': 0, 'ServiceType': "Share Process", 'Started': True, 'StartMode': "Manual", 'StartName': "NT AUTHORITY\\LocalService", 'State': "Running", 'Status': "OK", 'SystemCreationClassName': "Win32_ComputerSystem", 'SystemName': "WIN-7022K3K6GF8", 'TagId': 0, 'WaitHint': 0, } WSService_attr = { # Stopped Windows Service 'AcceptPause': False, 'AcceptStop': False, 'Caption': "Windows Store Service (WSService)", 'CheckPoint': 0, 'CreationClassName': "Win32_Service", 'Description': "Provides infrastructure support for Windows Store." "This service is started on demand and if ded applications" " bought using Windows Store will not behave correctly.", 'DesktopInteract': False, 'DisplayName': "Windows Store Service (WSService)", 'ErrorControl': "Normal", 'ExitCode': 1077, 'Name': "WSService", 'PathName': "C:\\Windows\\System32\\svchost.exe -k wsappx", 'ProcessId': 0, 'ServiceSpecificExitCode': 0, 'ServiceType': "Share Process", 'Started': False, 'StartMode': "Manual", 'StartName': "LocalSystem", 'State': "Stopped", 'Status': "OK", 'SystemCreationClassName': "Win32_ComputerSystem", 'SystemName': "WIN-7022K3K6GF8", 'TagId': 0, 'WaitHint': 0, } class Mocked_Win32_Service(object): """ Generate Mocked Win32 Service from given attributes """ def __init__(self, **entries): self.__dict__.update(entries) class Mocked_WMI(Mock): """ Mock WMI methods for test purpose """ def Win32_Service(self, name): """ Returns mock match Win32 Service """ if name == "WinHttpAutoProxySvc": return [Mocked_Win32_Service(**WinHttpAutoProxySvc_attr)] if name == "WSService": return [Mocked_Win32_Service(**WSService_attr)] return [] class WindowsServiceTestCase(AgentCheckTest): CHECK_NAME = 'windows_service' WIN_SERVICES_CONFIG = { 'host': ".", 'services': ["WinHttpAutoProxySvc", "WSService"] } def test_check(self): """ Returns the right service checks """ # Mocking `wmi` Python package import sys sys.modules['wmi'] = Mocked_WMI() # Run check config = { 'instances': [self.WIN_SERVICES_CONFIG] } self.run_check(config) # Test service checks self.assertServiceCheck('windows_service.state', status=AgentCheck.OK, count=1, tags=[u'service:WinHttpAutoProxySvc', u'host:' + self.check.hostname]) self.assertServiceCheck('windows_service.state', status=AgentCheck.CRITICAL, count=1, tags=[u'service:WSService', u'host:' + self.check.hostname]) self.coverage_report()
bsd-3-clause
xingyepei/edx-platform
lms/djangoapps/courseware/tests/test_view_authentication.py
89
17038
import datetime import pytz from django.core.urlresolvers import reverse from mock import patch from nose.plugins.attrib import attr from courseware.access import has_access from courseware.tests.helpers import CourseAccessTestMixin, LoginEnrollmentTestCase from courseware.tests.factories import ( BetaTesterFactory, StaffFactory, GlobalStaffFactory, InstructorFactory, OrgStaffFactory, OrgInstructorFactory, ) from xmodule.modulestore.django import modulestore from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from student.tests.factories import UserFactory, CourseEnrollmentFactory @attr('shard_1') class TestViewAuth(ModuleStoreTestCase, LoginEnrollmentTestCase): """ Check that view authentication works properly. """ ACCOUNT_INFO = [('view@test.com', 'foo'), ('view2@test.com', 'foo')] @staticmethod def _reverse_urls(names, course): """ Reverse a list of course urls. `names` is a list of URL names that correspond to sections in a course. `course` is the instance of CourseDescriptor whose section URLs are to be returned. Returns a list URLs corresponding to section in the passed in course. """ return [reverse(name, kwargs={'course_id': course.id.to_deprecated_string()}) for name in names] def _check_non_staff_light(self, course): """ Check that non-staff have access to light urls. `course` is an instance of CourseDescriptor. """ urls = [reverse('about_course', kwargs={'course_id': course.id.to_deprecated_string()}), reverse('courses')] for url in urls: self.assert_request_status_code(200, url) def _check_non_staff_dark(self, course): """ Check that non-staff don't have access to dark urls. """ names = ['courseware', 'instructor_dashboard', 'progress'] urls = self._reverse_urls(names, course) urls.extend([ reverse('book', kwargs={'course_id': course.id.to_deprecated_string(), 'book_index': index}) for index, __ in enumerate(course.textbooks) ]) for url in urls: self.assert_request_status_code(404, url) def _check_staff(self, course): """ Check that access is right for staff in course. """ names = ['about_course', 'instructor_dashboard', 'progress'] urls = self._reverse_urls(names, course) urls.extend([ reverse('book', kwargs={'course_id': course.id.to_deprecated_string(), 'book_index': index}) for index in xrange(len(course.textbooks)) ]) for url in urls: self.assert_request_status_code(200, url) # The student progress tab is not accessible to a student # before launch, so the instructor view-as-student feature # should return a 404 as well. # TODO (vshnayder): If this is not the behavior we want, will need # to make access checking smarter and understand both the effective # user (the student), and the requesting user (the prof) url = reverse( 'student_progress', kwargs={ 'course_id': course.id.to_deprecated_string(), 'student_id': self.enrolled_user.id, } ) self.assert_request_status_code(404, url) # The courseware url should redirect, not 200 url = self._reverse_urls(['courseware'], course)[0] self.assert_request_status_code(302, url) def login(self, user): return super(TestViewAuth, self).login(user.email, 'test') def setUp(self): super(TestViewAuth, self).setUp() self.course = CourseFactory.create(number='999', display_name='Robot_Super_Course') self.courseware_chapter = ItemFactory.create(display_name='courseware') self.overview_chapter = ItemFactory.create( parent_location=self.course.location, display_name='Super Overview' ) self.welcome_section = ItemFactory.create( parent_location=self.overview_chapter.location, display_name='Super Welcome' ) self.welcome_unit = ItemFactory.create( parent_location=self.welcome_section.location, display_name='Super Unit' ) self.course = modulestore().get_course(self.course.id) self.test_course = CourseFactory.create(org=self.course.id.org) self.other_org_course = CourseFactory.create(org='Other_Org_Course') self.sub_courseware_chapter = ItemFactory.create( parent_location=self.test_course.location, display_name='courseware' ) self.sub_overview_chapter = ItemFactory.create( parent_location=self.sub_courseware_chapter.location, display_name='Overview' ) self.sub_welcome_section = ItemFactory.create( parent_location=self.sub_overview_chapter.location, display_name='Welcome' ) self.sub_welcome_unit = ItemFactory.create( parent_location=self.sub_welcome_section.location, display_name='New Unit' ) self.test_course = modulestore().get_course(self.test_course.id) self.global_staff_user = GlobalStaffFactory() self.unenrolled_user = UserFactory(last_name="Unenrolled") self.enrolled_user = UserFactory(last_name="Enrolled") CourseEnrollmentFactory(user=self.enrolled_user, course_id=self.course.id) CourseEnrollmentFactory(user=self.enrolled_user, course_id=self.test_course.id) self.staff_user = StaffFactory(course_key=self.course.id) self.instructor_user = InstructorFactory(course_key=self.course.id) self.org_staff_user = OrgStaffFactory(course_key=self.course.id) self.org_instructor_user = OrgInstructorFactory(course_key=self.course.id) def test_redirection_unenrolled(self): """ Verify unenrolled student is redirected to the 'about' section of the chapter instead of the 'Welcome' section after clicking on the courseware tab. """ self.login(self.unenrolled_user) response = self.client.get(reverse('courseware', kwargs={'course_id': self.course.id.to_deprecated_string()})) self.assertRedirects( response, reverse( 'about_course', args=[self.course.id.to_deprecated_string()] ) ) def test_redirection_enrolled(self): """ Verify enrolled student is redirected to the 'Welcome' section of the chapter after clicking on the courseware tab. """ self.login(self.enrolled_user) response = self.client.get( reverse( 'courseware', kwargs={'course_id': self.course.id.to_deprecated_string()} ) ) self.assertRedirects( response, reverse( 'courseware_section', kwargs={'course_id': self.course.id.to_deprecated_string(), 'chapter': self.overview_chapter.url_name, 'section': self.welcome_section.url_name} ) ) def test_instructor_page_access_nonstaff(self): """ Verify non-staff cannot load the instructor dashboard, the grade views, and student profile pages. """ self.login(self.enrolled_user) urls = [reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}), reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})] # Shouldn't be able to get to the instructor pages for url in urls: self.assert_request_status_code(404, url) def test_staff_course_access(self): """ Verify staff can load the staff dashboard, the grade views, and student profile pages for their course. """ self.login(self.staff_user) # Now should be able to get to self.course, but not self.test_course url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()}) self.assert_request_status_code(404, url) def test_instructor_course_access(self): """ Verify instructor can load the instructor dashboard, the grade views, and student profile pages for their course. """ self.login(self.instructor_user) # Now should be able to get to self.course, but not self.test_course url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()}) self.assert_request_status_code(404, url) def test_org_staff_access(self): """ Verify org staff can load the instructor dashboard, the grade views, and student profile pages for course in their org. """ self.login(self.org_staff_user) url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.other_org_course.id.to_deprecated_string()}) self.assert_request_status_code(404, url) def test_org_instructor_access(self): """ Verify org instructor can load the instructor dashboard, the grade views, and student profile pages for course in their org. """ self.login(self.org_instructor_user) url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()}) self.assert_request_status_code(200, url) url = reverse('instructor_dashboard', kwargs={'course_id': self.other_org_course.id.to_deprecated_string()}) self.assert_request_status_code(404, url) def test_global_staff_access(self): """ Verify the global staff user can access any course. """ self.login(self.global_staff_user) # and now should be able to load both urls = [reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}), reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})] for url in urls: self.assert_request_status_code(200, url) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_dark_launch_enrolled_student(self): """ Make sure that before course start, students can't access course pages. """ # Make courses start in the future now = datetime.datetime.now(pytz.UTC) tomorrow = now + datetime.timedelta(days=1) self.course.start = tomorrow self.test_course.start = tomorrow self.course = self.update_course(self.course, self.user.id) self.test_course = self.update_course(self.test_course, self.user.id) self.assertFalse(self.course.has_started()) self.assertFalse(self.test_course.has_started()) # First, try with an enrolled student self.login(self.enrolled_user) # shouldn't be able to get to anything except the light pages self._check_non_staff_light(self.course) self._check_non_staff_dark(self.course) self._check_non_staff_light(self.test_course) self._check_non_staff_dark(self.test_course) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_dark_launch_instructor(self): """ Make sure that before course start instructors can access the page for their course. """ now = datetime.datetime.now(pytz.UTC) tomorrow = now + datetime.timedelta(days=1) self.course.start = tomorrow self.test_course.start = tomorrow self.course = self.update_course(self.course, self.user.id) self.test_course = self.update_course(self.test_course, self.user.id) self.login(self.instructor_user) # Enroll in the classes---can't see courseware otherwise. self.enroll(self.course, True) self.enroll(self.test_course, True) # should now be able to get to everything for self.course self._check_non_staff_light(self.test_course) self._check_non_staff_dark(self.test_course) self._check_staff(self.course) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_dark_launch_global_staff(self): """ Make sure that before course start staff can access course pages. """ now = datetime.datetime.now(pytz.UTC) tomorrow = now + datetime.timedelta(days=1) self.course.start = tomorrow self.test_course.start = tomorrow self.course = self.update_course(self.course, self.user.id) self.test_course = self.update_course(self.test_course, self.user.id) self.login(self.global_staff_user) self.enroll(self.course, True) self.enroll(self.test_course, True) # and now should be able to load both self._check_staff(self.course) self._check_staff(self.test_course) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_enrollment_period(self): """ Check that enrollment periods work. """ # Make courses start in the future now = datetime.datetime.now(pytz.UTC) tomorrow = now + datetime.timedelta(days=1) nextday = tomorrow + datetime.timedelta(days=1) yesterday = now - datetime.timedelta(days=1) # self.course's enrollment period hasn't started self.course.enrollment_start = tomorrow self.course.enrollment_end = nextday # test_course course's has self.test_course.enrollment_start = yesterday self.test_course.enrollment_end = tomorrow self.course = self.update_course(self.course, self.user.id) self.test_course = self.update_course(self.test_course, self.user.id) # First, try with an enrolled student self.login(self.unenrolled_user) self.assertFalse(self.enroll(self.course)) self.assertTrue(self.enroll(self.test_course)) self.logout() self.login(self.instructor_user) self.assertTrue(self.enroll(self.course)) # unenroll and try again self.login(self.global_staff_user) self.assertTrue(self.enroll(self.course)) @attr('shard_1') class TestBetatesterAccess(ModuleStoreTestCase, CourseAccessTestMixin): """ Tests for the beta tester feature """ def setUp(self): super(TestBetatesterAccess, self).setUp() now = datetime.datetime.now(pytz.UTC) tomorrow = now + datetime.timedelta(days=1) self.course = CourseFactory(days_early_for_beta=2, start=tomorrow) self.content = ItemFactory(parent=self.course) self.normal_student = UserFactory() self.beta_tester = BetaTesterFactory(course_key=self.course.id) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_course_beta_period(self): """ Check that beta-test access works for courses. """ self.assertFalse(self.course.has_started()) self.assertCannotAccessCourse(self.normal_student, 'load', self.course) self.assertCanAccessCourse(self.beta_tester, 'load', self.course) @patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_content_beta_period(self): """ Check that beta-test access works for content. """ # student user shouldn't see it self.assertFalse(has_access(self.normal_student, 'load', self.content, self.course.id)) # now the student should see it self.assertTrue(has_access(self.beta_tester, 'load', self.content, self.course.id))
agpl-3.0
YangSongzhou/django
django/contrib/auth/forms.py
116
14378
from __future__ import unicode_literals from django import forms from django.contrib.auth import ( authenticate, get_user_model, password_validation, ) from django.contrib.auth.hashers import ( UNUSABLE_PASSWORD_PREFIX, identify_hasher, ) from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.core.mail import EmailMultiAlternatives from django.forms.utils import flatatt from django.template import loader from django.utils.encoding import force_bytes from django.utils.html import format_html, format_html_join from django.utils.http import urlsafe_base64_encode from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils.translation import ugettext, ugettext_lazy as _ class ReadOnlyPasswordHashWidget(forms.Widget): def render(self, name, value, attrs): encoded = value final_attrs = self.build_attrs(attrs) if not encoded or encoded.startswith(UNUSABLE_PASSWORD_PREFIX): summary = mark_safe("<strong>%s</strong>" % ugettext("No password set.")) else: try: hasher = identify_hasher(encoded) except ValueError: summary = mark_safe("<strong>%s</strong>" % ugettext( "Invalid password format or unknown hashing algorithm.")) else: summary = format_html_join('', "<strong>{}</strong>: {} ", ((ugettext(key), value) for key, value in hasher.safe_summary(encoded).items()) ) return format_html("<div{}>{}</div>", flatatt(final_attrs), summary) class ReadOnlyPasswordHashField(forms.Field): widget = ReadOnlyPasswordHashWidget def __init__(self, *args, **kwargs): kwargs.setdefault("required", False) super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs) def bound_data(self, data, initial): # Always return initial because the widget doesn't # render an input field. return initial def has_changed(self, initial, data): return False class UserCreationForm(forms.ModelForm): """ A form that creates a user, with no privileges, from the given username and password. """ error_messages = { 'password_mismatch': _("The two password fields didn't match."), } password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput) password2 = forms.CharField(label=_("Password confirmation"), widget=forms.PasswordInput, help_text=_("Enter the same password as before, for verification.")) class Meta: model = User fields = ("username",) def clean_password2(self): password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise forms.ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) self.instance.username = self.cleaned_data.get('username') password_validation.validate_password(self.cleaned_data.get('password2'), self.instance) return password2 def save(self, commit=True): user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data["password1"]) if commit: user.save() return user class UserChangeForm(forms.ModelForm): password = ReadOnlyPasswordHashField(label=_("Password"), help_text=_("Raw passwords are not stored, so there is no way to see " "this user's password, but you can change the password " "using <a href=\"../password/\">this form</a>.")) class Meta: model = User fields = '__all__' def __init__(self, *args, **kwargs): super(UserChangeForm, self).__init__(*args, **kwargs) f = self.fields.get('user_permissions') if f is not None: f.queryset = f.queryset.select_related('content_type') def clean_password(self): # Regardless of what the user provides, return the initial value. # This is done here, rather than on the field, because the # field does not have access to the initial value return self.initial["password"] class AuthenticationForm(forms.Form): """ Base class for authenticating users. Extend this to get a form that accepts username/password logins. """ username = forms.CharField(max_length=254) password = forms.CharField(label=_("Password"), widget=forms.PasswordInput) error_messages = { 'invalid_login': _("Please enter a correct %(username)s and password. " "Note that both fields may be case-sensitive."), 'inactive': _("This account is inactive."), } def __init__(self, request=None, *args, **kwargs): """ The 'request' parameter is set for custom auth use by subclasses. The form data comes in via the standard 'data' kwarg. """ self.request = request self.user_cache = None super(AuthenticationForm, self).__init__(*args, **kwargs) # Set the label for the "username" field. UserModel = get_user_model() self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD) if self.fields['username'].label is None: self.fields['username'].label = capfirst(self.username_field.verbose_name) def clean(self): username = self.cleaned_data.get('username') password = self.cleaned_data.get('password') if username and password: self.user_cache = authenticate(username=username, password=password) if self.user_cache is None: raise forms.ValidationError( self.error_messages['invalid_login'], code='invalid_login', params={'username': self.username_field.verbose_name}, ) else: self.confirm_login_allowed(self.user_cache) return self.cleaned_data def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise forms.ValidationError( self.error_messages['inactive'], code='inactive', ) def get_user_id(self): if self.user_cache: return self.user_cache.id return None def get_user(self): return self.user_cache class PasswordResetForm(forms.Form): email = forms.EmailField(label=_("Email"), max_length=254) def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): """ Sends a django.core.mail.EmailMultiAlternatives to `to_email`. """ subject = loader.render_to_string(subject_template_name, context) # Email subject *must not* contain newlines subject = ''.join(subject.splitlines()) body = loader.render_to_string(email_template_name, context) email_message = EmailMultiAlternatives(subject, body, from_email, [to_email]) if html_email_template_name is not None: html_email = loader.render_to_string(html_email_template_name, context) email_message.attach_alternative(html_email, 'text/html') email_message.send() def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This allows subclasses to more easily customize the default policies that prevent inactive users and users with unusable passwords from resetting their password. """ active_users = get_user_model()._default_manager.filter( email__iexact=email, is_active=True) return (u for u in active_users if u.has_usable_password()) def save(self, domain_override=None, subject_template_name='registration/password_reset_subject.txt', email_template_name='registration/password_reset_email.html', use_https=False, token_generator=default_token_generator, from_email=None, request=None, html_email_template_name=None, extra_email_context=None): """ Generates a one-use only link for resetting password and sends to the user. """ email = self.cleaned_data["email"] for user in self.get_users(email): if not domain_override: current_site = get_current_site(request) site_name = current_site.name domain = current_site.domain else: site_name = domain = domain_override context = { 'email': user.email, 'domain': domain, 'site_name': site_name, 'uid': urlsafe_base64_encode(force_bytes(user.pk)), 'user': user, 'token': token_generator.make_token(user), 'protocol': 'https' if use_https else 'http', } if extra_email_context is not None: context.update(extra_email_context) self.send_mail(subject_template_name, email_template_name, context, from_email, user.email, html_email_template_name=html_email_template_name) class SetPasswordForm(forms.Form): """ A form that lets a user change set their password without entering the old password """ error_messages = { 'password_mismatch': _("The two password fields didn't match."), } new_password1 = forms.CharField(label=_("New password"), widget=forms.PasswordInput, help_text=password_validation.password_validators_help_text_html()) new_password2 = forms.CharField(label=_("New password confirmation"), widget=forms.PasswordInput) def __init__(self, user, *args, **kwargs): self.user = user super(SetPasswordForm, self).__init__(*args, **kwargs) def clean_new_password2(self): password1 = self.cleaned_data.get('new_password1') password2 = self.cleaned_data.get('new_password2') if password1 and password2: if password1 != password2: raise forms.ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) password_validation.validate_password(password2, self.user) return password2 def save(self, commit=True): password = self.cleaned_data["new_password1"] self.user.set_password(password) if commit: self.user.save() return self.user class PasswordChangeForm(SetPasswordForm): """ A form that lets a user change their password by entering their old password. """ error_messages = dict(SetPasswordForm.error_messages, **{ 'password_incorrect': _("Your old password was entered incorrectly. " "Please enter it again."), }) old_password = forms.CharField(label=_("Old password"), widget=forms.PasswordInput) field_order = ['old_password', 'new_password1', 'new_password2'] def clean_old_password(self): """ Validates that the old_password field is correct. """ old_password = self.cleaned_data["old_password"] if not self.user.check_password(old_password): raise forms.ValidationError( self.error_messages['password_incorrect'], code='password_incorrect', ) return old_password class AdminPasswordChangeForm(forms.Form): """ A form used to change the password of a user in the admin interface. """ error_messages = { 'password_mismatch': _("The two password fields didn't match."), } required_css_class = 'required' password1 = forms.CharField( label=_("Password"), widget=forms.PasswordInput, help_text=password_validation.password_validators_help_text_html(), ) password2 = forms.CharField( label=_("Password (again)"), widget=forms.PasswordInput, help_text=_("Enter the same password as before, for verification."), ) def __init__(self, user, *args, **kwargs): self.user = user super(AdminPasswordChangeForm, self).__init__(*args, **kwargs) def clean_password2(self): password1 = self.cleaned_data.get('password1') password2 = self.cleaned_data.get('password2') if password1 and password2: if password1 != password2: raise forms.ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) password_validation.validate_password(password2, self.user) return password2 def save(self, commit=True): """ Saves the new password. """ password = self.cleaned_data["password1"] self.user.set_password(password) if commit: self.user.save() return self.user def _get_changed_data(self): data = super(AdminPasswordChangeForm, self).changed_data for name in self.fields.keys(): if name not in data: return [] return ['password'] changed_data = property(_get_changed_data)
bsd-3-clause
seem-sky/kbengine
kbe/res/scripts/common/Lib/email/_encoded_words.py
85
7913
""" Routines for manipulating RFC2047 encoded words. This is currently a package-private API, but will be considered for promotion to a public API if there is demand. """ # An ecoded word looks like this: # # =?charset[*lang]?cte?encoded_string?= # # for more information about charset see the charset module. Here it is one # of the preferred MIME charset names (hopefully; you never know when parsing). # cte (Content Transfer Encoding) is either 'q' or 'b' (ignoring case). In # theory other letters could be used for other encodings, but in practice this # (almost?) never happens. There could be a public API for adding entries # to the CTE tables, but YAGNI for now. 'q' is Quoted Printable, 'b' is # Base64. The meaning of encoded_string should be obvious. 'lang' is optional # as indicated by the brackets (they are not part of the syntax) but is almost # never encountered in practice. # # The general interface for a CTE decoder is that it takes the encoded_string # as its argument, and returns a tuple (cte_decoded_string, defects). The # cte_decoded_string is the original binary that was encoded using the # specified cte. 'defects' is a list of MessageDefect instances indicating any # problems encountered during conversion. 'charset' and 'lang' are the # corresponding strings extracted from the EW, case preserved. # # The general interface for a CTE encoder is that it takes a binary sequence # as input and returns the cte_encoded_string, which is an ascii-only string. # # Each decoder must also supply a length function that takes the binary # sequence as its argument and returns the length of the resulting encoded # string. # # The main API functions for the module are decode, which calls the decoder # referenced by the cte specifier, and encode, which adds the appropriate # RFC 2047 "chrome" to the encoded string, and can optionally automatically # select the shortest possible encoding. See their docstrings below for # details. import re import base64 import binascii import functools from string import ascii_letters, digits from email import errors __all__ = ['decode_q', 'encode_q', 'decode_b', 'encode_b', 'len_q', 'len_b', 'decode', 'encode', ] # # Quoted Printable # # regex based decoder. _q_byte_subber = functools.partial(re.compile(br'=([a-fA-F0-9]{2})').sub, lambda m: bytes([int(m.group(1), 16)])) def decode_q(encoded): encoded = encoded.replace(b'_', b' ') return _q_byte_subber(encoded), [] # dict mapping bytes to their encoded form class _QByteMap(dict): safe = b'-!*+/' + ascii_letters.encode('ascii') + digits.encode('ascii') def __missing__(self, key): if key in self.safe: self[key] = chr(key) else: self[key] = "={:02X}".format(key) return self[key] _q_byte_map = _QByteMap() # In headers spaces are mapped to '_'. _q_byte_map[ord(' ')] = '_' def encode_q(bstring): return ''.join(_q_byte_map[x] for x in bstring) def len_q(bstring): return sum(len(_q_byte_map[x]) for x in bstring) # # Base64 # def decode_b(encoded): defects = [] pad_err = len(encoded) % 4 if pad_err: defects.append(errors.InvalidBase64PaddingDefect()) padded_encoded = encoded + b'==='[:4-pad_err] else: padded_encoded = encoded try: return base64.b64decode(padded_encoded, validate=True), defects except binascii.Error: # Since we had correct padding, this must an invalid char error. defects = [errors.InvalidBase64CharactersDefect()] # The non-alphabet characters are ignored as far as padding # goes, but we don't know how many there are. So we'll just # try various padding lengths until something works. for i in 0, 1, 2, 3: try: return base64.b64decode(encoded+b'='*i, validate=False), defects except binascii.Error: if i==0: defects.append(errors.InvalidBase64PaddingDefect()) else: # This should never happen. raise AssertionError("unexpected binascii.Error") def encode_b(bstring): return base64.b64encode(bstring).decode('ascii') def len_b(bstring): groups_of_3, leftover = divmod(len(bstring), 3) # 4 bytes out for each 3 bytes (or nonzero fraction thereof) in. return groups_of_3 * 4 + (4 if leftover else 0) _cte_decoders = { 'q': decode_q, 'b': decode_b, } def decode(ew): """Decode encoded word and return (string, charset, lang, defects) tuple. An RFC 2047/2243 encoded word has the form: =?charset*lang?cte?encoded_string?= where '*lang' may be omitted but the other parts may not be. This function expects exactly such a string (that is, it does not check the syntax and may raise errors if the string is not well formed), and returns the encoded_string decoded first from its Content Transfer Encoding and then from the resulting bytes into unicode using the specified charset. If the cte-decoded string does not successfully decode using the specified character set, a defect is added to the defects list and the unknown octets are replaced by the unicode 'unknown' character \uFDFF. The specified charset and language are returned. The default for language, which is rarely if ever encountered, is the empty string. """ _, charset, cte, cte_string, _ = ew.split('?') charset, _, lang = charset.partition('*') cte = cte.lower() # Recover the original bytes and do CTE decoding. bstring = cte_string.encode('ascii', 'surrogateescape') bstring, defects = _cte_decoders[cte](bstring) # Turn the CTE decoded bytes into unicode. try: string = bstring.decode(charset) except UnicodeError: defects.append(errors.UndecodableBytesDefect("Encoded word " "contains bytes not decodable using {} charset".format(charset))) string = bstring.decode(charset, 'surrogateescape') except LookupError: string = bstring.decode('ascii', 'surrogateescape') if charset.lower() != 'unknown-8bit': defects.append(errors.CharsetError("Unknown charset {} " "in encoded word; decoded as unknown bytes".format(charset))) return string, charset, lang, defects _cte_encoders = { 'q': encode_q, 'b': encode_b, } _cte_encode_length = { 'q': len_q, 'b': len_b, } def encode(string, charset='utf-8', encoding=None, lang=''): """Encode string using the CTE encoding that produces the shorter result. Produces an RFC 2047/2243 encoded word of the form: =?charset*lang?cte?encoded_string?= where '*lang' is omitted unless the 'lang' parameter is given a value. Optional argument charset (defaults to utf-8) specifies the charset to use to encode the string to binary before CTE encoding it. Optional argument 'encoding' is the cte specifier for the encoding that should be used ('q' or 'b'); if it is None (the default) the encoding which produces the shortest encoded sequence is used, except that 'q' is preferred if it is up to five characters longer. Optional argument 'lang' (default '') gives the RFC 2243 language string to specify in the encoded word. """ if charset == 'unknown-8bit': bstring = string.encode('ascii', 'surrogateescape') else: bstring = string.encode(charset) if encoding is None: qlen = _cte_encode_length['q'](bstring) blen = _cte_encode_length['b'](bstring) # Bias toward q. 5 is arbitrary. encoding = 'q' if qlen - blen < 5 else 'b' encoded = _cte_encoders[encoding](bstring) if lang: lang = '*' + lang return "=?{}{}?{}?{}?=".format(charset, lang, encoding, encoded)
lgpl-3.0
devoid/nova
nova/virt/xenapi/fake.py
1
35663
# Copyright (c) 2010 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # #============================================================================ # # Parts of this file are based upon xmlrpclib.py, the XML-RPC client # interface included in the Python distribution. # # Copyright (c) 1999-2002 by Secret Labs AB # Copyright (c) 1999-2002 by Fredrik Lundh # # By obtaining, using, and/or copying this software and/or its # associated documentation, you agree that you have read, understood, # and will comply with the following terms and conditions: # # Permission to use, copy, modify, and distribute this software and # its associated documentation for any purpose and without fee is # hereby granted, provided that the above copyright notice appears in # all copies, and that both that copyright notice and this permission # notice appear in supporting documentation, and that the name of # Secret Labs AB or the author not be used in advertising or publicity # pertaining to distribution of the software without specific, written # prior permission. # # SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD # TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- # ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR # BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY # DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE # OF THIS SOFTWARE. # -------------------------------------------------------------------- """ A fake XenAPI SDK. """ import base64 import pickle import random import uuid from xml.sax import saxutils import zlib import pprint from nova import exception from nova.openstack.common.gettextutils import _ from nova.openstack.common import jsonutils from nova.openstack.common import log as logging from nova.openstack.common import timeutils from nova.openstack.common import units _CLASSES = ['host', 'network', 'session', 'pool', 'SR', 'VBD', 'PBD', 'VDI', 'VIF', 'PIF', 'VM', 'VLAN', 'task'] _db_content = {} LOG = logging.getLogger(__name__) def log_db_contents(msg=None): text = msg or "" content = pprint.pformat(_db_content) LOG.debug(_("%(text)s: _db_content => %(content)s"), {'text': text, 'content': content}) def reset(): for c in _CLASSES: _db_content[c] = {} host = create_host('fake') create_vm('fake dom 0', 'Running', is_a_template=False, is_control_domain=True, resident_on=host) def reset_table(table): if table not in _CLASSES: return _db_content[table] = {} def _create_pool(name_label): return _create_object('pool', {'name_label': name_label}) def create_host(name_label, hostname='fake_name', address='fake_addr'): host_ref = _create_object('host', {'name_label': name_label, 'hostname': hostname, 'address': address}) host_default_sr_ref = _create_local_srs(host_ref) _create_local_pif(host_ref) # Create a pool if we don't have one already if len(_db_content['pool']) == 0: pool_ref = _create_pool('') _db_content['pool'][pool_ref]['master'] = host_ref _db_content['pool'][pool_ref]['default-SR'] = host_default_sr_ref _db_content['pool'][pool_ref]['suspend-image-SR'] = host_default_sr_ref def create_network(name_label, bridge): return _create_object('network', {'name_label': name_label, 'bridge': bridge}) def create_vm(name_label, status, **kwargs): if status == 'Running': domid = random.randrange(1, 1 << 16) resident_on = _db_content['host'].keys()[0] else: domid = -1 resident_on = '' vm_rec = kwargs.copy() vm_rec.update({'name_label': name_label, 'domid': domid, 'power_state': status, 'blocked_operations': {}, 'resident_on': resident_on}) vm_ref = _create_object('VM', vm_rec) after_VM_create(vm_ref, vm_rec) return vm_ref def destroy_vm(vm_ref): vm_rec = _db_content['VM'][vm_ref] vbd_refs = vm_rec['VBDs'] # NOTE(johannes): Shallow copy since destroy_vbd will remove itself # from the list for vbd_ref in vbd_refs[:]: destroy_vbd(vbd_ref) del _db_content['VM'][vm_ref] def destroy_vbd(vbd_ref): vbd_rec = _db_content['VBD'][vbd_ref] vm_ref = vbd_rec['VM'] vm_rec = _db_content['VM'][vm_ref] vm_rec['VBDs'].remove(vbd_ref) vdi_ref = vbd_rec['VDI'] vdi_rec = _db_content['VDI'][vdi_ref] vdi_rec['VBDs'].remove(vbd_ref) del _db_content['VBD'][vbd_ref] def destroy_vdi(vdi_ref): vdi_rec = _db_content['VDI'][vdi_ref] vbd_refs = vdi_rec['VBDs'] # NOTE(johannes): Shallow copy since destroy_vbd will remove itself # from the list for vbd_ref in vbd_refs[:]: destroy_vbd(vbd_ref) del _db_content['VDI'][vdi_ref] def create_vdi(name_label, sr_ref, **kwargs): vdi_rec = { 'SR': sr_ref, 'read_only': False, 'type': '', 'name_label': name_label, 'name_description': '', 'sharable': False, 'other_config': {}, 'location': '', 'xenstore_data': {}, 'sm_config': {'vhd-parent': None}, 'physical_utilisation': '123', 'managed': True, } vdi_rec.update(kwargs) vdi_ref = _create_object('VDI', vdi_rec) after_VDI_create(vdi_ref, vdi_rec) return vdi_ref def after_VDI_create(vdi_ref, vdi_rec): vdi_rec.setdefault('VBDs', []) def create_vbd(vm_ref, vdi_ref, userdevice=0): vbd_rec = {'VM': vm_ref, 'VDI': vdi_ref, 'userdevice': str(userdevice), 'currently_attached': False} vbd_ref = _create_object('VBD', vbd_rec) after_VBD_create(vbd_ref, vbd_rec) return vbd_ref def after_VBD_create(vbd_ref, vbd_rec): """Create read-only fields and backref from VM and VDI to VBD when VBD is created. """ vbd_rec['currently_attached'] = False vbd_rec['device'] = '' vm_ref = vbd_rec['VM'] vm_rec = _db_content['VM'][vm_ref] vm_rec['VBDs'].append(vbd_ref) vm_name_label = _db_content['VM'][vm_ref]['name_label'] vbd_rec['vm_name_label'] = vm_name_label vdi_ref = vbd_rec['VDI'] if vdi_ref and vdi_ref != "OpaqueRef:NULL": vdi_rec = _db_content['VDI'][vdi_ref] vdi_rec['VBDs'].append(vbd_ref) def after_VM_create(vm_ref, vm_rec): """Create read-only fields in the VM record.""" vm_rec.setdefault('domid', -1) vm_rec.setdefault('is_control_domain', False) vm_rec.setdefault('is_a_template', False) vm_rec.setdefault('memory_static_max', str(8 * units.Gi)) vm_rec.setdefault('memory_dynamic_max', str(8 * units.Gi)) vm_rec.setdefault('VCPUs_max', str(4)) vm_rec.setdefault('VBDs', []) vm_rec.setdefault('resident_on', '') def create_pbd(host_ref, sr_ref, attached): config = {'path': '/var/run/sr-mount/%s' % sr_ref} return _create_object('PBD', {'device_config': config, 'host': host_ref, 'SR': sr_ref, 'currently_attached': attached}) def create_task(name_label): return _create_object('task', {'name_label': name_label, 'status': 'pending'}) def _create_local_srs(host_ref): """Create an SR that looks like the one created on the local disk by default by the XenServer installer. Also, fake the installation of an ISO SR. """ create_sr(name_label='Local storage ISO', type='iso', other_config={'i18n-original-value-name_label': 'Local storage ISO', 'i18n-key': 'local-storage-iso'}, physical_size=80000, physical_utilisation=40000, virtual_allocation=80000, host_ref=host_ref) return create_sr(name_label='Local storage', type='ext', other_config={'i18n-original-value-name_label': 'Local storage', 'i18n-key': 'local-storage'}, physical_size=40000, physical_utilisation=20000, virtual_allocation=10000, host_ref=host_ref) def create_sr(**kwargs): sr_ref = _create_object( 'SR', {'name_label': kwargs.get('name_label'), 'type': kwargs.get('type'), 'content_type': kwargs.get('type', 'user'), 'shared': kwargs.get('shared', False), 'physical_size': kwargs.get('physical_size', str(1 << 30)), 'physical_utilisation': str( kwargs.get('physical_utilisation', 0)), 'virtual_allocation': str(kwargs.get('virtual_allocation', 0)), 'other_config': kwargs.get('other_config', {}), 'VDIs': kwargs.get('VDIs', [])}) pbd_ref = create_pbd(kwargs.get('host_ref'), sr_ref, True) _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] return sr_ref def _create_local_pif(host_ref): pif_ref = _create_object('PIF', {'name-label': 'Fake PIF', 'MAC': '00:11:22:33:44:55', 'physical': True, 'VLAN': -1, 'device': 'fake0', 'host_uuid': host_ref, 'network': '', 'IP': '10.1.1.1', 'IPv6': '', 'uuid': '', 'management': 'true'}) _db_content['PIF'][pif_ref]['uuid'] = pif_ref return pif_ref def _create_object(table, obj): ref = str(uuid.uuid4()) obj['uuid'] = str(uuid.uuid4()) _db_content[table][ref] = obj return ref def _create_sr(table, obj): sr_type = obj[6] # Forces fake to support iscsi only if sr_type != 'iscsi' and sr_type != 'nfs': raise Failure(['SR_UNKNOWN_DRIVER', sr_type]) host_ref = _db_content['host'].keys()[0] sr_ref = _create_object(table, obj[2]) if sr_type == 'iscsi': vdi_ref = create_vdi('', sr_ref) pbd_ref = create_pbd(host_ref, sr_ref, True) _db_content['SR'][sr_ref]['VDIs'] = [vdi_ref] _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] _db_content['VDI'][vdi_ref]['SR'] = sr_ref _db_content['PBD'][pbd_ref]['SR'] = sr_ref return sr_ref def _create_vlan(pif_ref, vlan_num, network_ref): pif_rec = get_record('PIF', pif_ref) vlan_pif_ref = _create_object('PIF', {'name-label': 'Fake VLAN PIF', 'MAC': '00:11:22:33:44:55', 'physical': True, 'VLAN': vlan_num, 'device': pif_rec['device'], 'host_uuid': pif_rec['host_uuid']}) return _create_object('VLAN', {'tagged-pif': pif_ref, 'untagged-pif': vlan_pif_ref, 'tag': vlan_num}) def get_all(table): return _db_content[table].keys() def get_all_records(table): return _db_content[table] def _query_matches(record, query): # Simple support for the XenServer query language: # 'field "host"="<uuid>" and field "SR"="<sr uuid>"' # Tested through existing tests (e.g. calls to find_network_with_bridge) and_clauses = query.split(" and ") if len(and_clauses) > 1: matches = True for clause in and_clauses: matches = matches and _query_matches(record, clause) return matches or_clauses = query.split(" or ") if len(or_clauses) > 1: matches = False for clause in or_clauses: matches = matches or _query_matches(record, clause) return matches if query[:4] == 'not ': return not _query_matches(record, query[4:]) # Now it must be a single field - bad queries never match if query[:5] != 'field': return False (field, value) = query[6:].split('=', 1) # Some fields (e.g. name_label, memory_overhead) have double # underscores in the DB, but only single underscores when querying field = field.replace("__", "_").strip(" \"'") value = value.strip(" \"'") # Strings should be directly compared if isinstance(record[field], str): return record[field] == value # But for all other value-checks, convert to a string first # (Notably used for booleans - which can be lower or camel # case and are interpreted/sanitised by XAPI) return str(record[field]).lower() == value.lower() def get_all_records_where(table_name, query): matching_records = {} table = _db_content[table_name] for record in table: if _query_matches(table[record], query): matching_records[record] = table[record] return matching_records def get_record(table, ref): if ref in _db_content[table]: return _db_content[table].get(ref) else: raise Failure(['HANDLE_INVALID', table, ref]) def check_for_session_leaks(): if len(_db_content['session']) > 0: raise exception.NovaException('Sessions have leaked: %s' % _db_content['session']) def as_value(s): """Helper function for simulating XenAPI plugin responses. It escapes and wraps the given argument. """ return '<value>%s</value>' % saxutils.escape(s) def as_json(*args, **kwargs): """Helper function for simulating XenAPI plugin responses for those that are returning JSON. If this function is given plain arguments, then these are rendered as a JSON list. If it's given keyword arguments then these are rendered as a JSON dict. """ arg = args or kwargs return jsonutils.dumps(arg) class Failure(Exception): def __init__(self, details): self.details = details def __str__(self): try: return str(self.details) except Exception: return "XenAPI Fake Failure: %s" % str(self.details) def _details_map(self): return dict([(str(i), self.details[i]) for i in range(len(self.details))]) class SessionBase(object): """ Base class for Fake Sessions """ def __init__(self, uri): self._session = None def pool_get_default_SR(self, _1, pool_ref): return _db_content['pool'].values()[0]['default-SR'] def VBD_insert(self, _1, vbd_ref, vdi_ref): vbd_rec = get_record('VBD', vbd_ref) get_record('VDI', vdi_ref) vbd_rec['empty'] = False vbd_rec['VDI'] = vdi_ref def VBD_plug(self, _1, ref): rec = get_record('VBD', ref) if rec['currently_attached']: raise Failure(['DEVICE_ALREADY_ATTACHED', ref]) rec['currently_attached'] = True rec['device'] = rec['userdevice'] def VBD_unplug(self, _1, ref): rec = get_record('VBD', ref) if not rec['currently_attached']: raise Failure(['DEVICE_ALREADY_DETACHED', ref]) rec['currently_attached'] = False rec['device'] = '' def VBD_add_to_other_config(self, _1, vbd_ref, key, value): db_ref = _db_content['VBD'][vbd_ref] if 'other_config' not in db_ref: db_ref['other_config'] = {} if key in db_ref['other_config']: raise Failure(['MAP_DUPLICATE_KEY', 'VBD', 'other_config', vbd_ref, key]) db_ref['other_config'][key] = value def VBD_get_other_config(self, _1, vbd_ref): db_ref = _db_content['VBD'][vbd_ref] if 'other_config' not in db_ref: return {} return db_ref['other_config'] def PBD_create(self, _1, pbd_rec): pbd_ref = _create_object('PBD', pbd_rec) _db_content['PBD'][pbd_ref]['currently_attached'] = False return pbd_ref def PBD_plug(self, _1, pbd_ref): rec = get_record('PBD', pbd_ref) if rec['currently_attached']: raise Failure(['DEVICE_ALREADY_ATTACHED', rec]) rec['currently_attached'] = True sr_ref = rec['SR'] _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] def PBD_unplug(self, _1, pbd_ref): rec = get_record('PBD', pbd_ref) if not rec['currently_attached']: raise Failure(['DEVICE_ALREADY_DETACHED', rec]) rec['currently_attached'] = False sr_ref = rec['SR'] _db_content['SR'][sr_ref]['PBDs'].remove(pbd_ref) def SR_introduce(self, _1, sr_uuid, label, desc, type, content_type, shared, sm_config): ref = None rec = None for ref, rec in _db_content['SR'].iteritems(): if rec.get('uuid') == sr_uuid: # make forgotten = 0 and return ref _db_content['SR'][ref]['forgotten'] = 0 return ref # SR not found in db, so we create one params = {'sr_uuid': sr_uuid, 'label': label, 'desc': desc, 'type': type, 'content_type': content_type, 'shared': shared, 'sm_config': sm_config} sr_ref = _create_object('SR', params) _db_content['SR'][sr_ref]['uuid'] = sr_uuid _db_content['SR'][sr_ref]['forgotten'] = 0 vdi_per_lun = False if type == 'iscsi': # Just to be clear vdi_per_lun = True if vdi_per_lun: # we need to create a vdi because this introduce # is likely meant for a single vdi vdi_ref = create_vdi('', sr_ref) _db_content['SR'][sr_ref]['VDIs'] = [vdi_ref] _db_content['VDI'][vdi_ref]['SR'] = sr_ref return sr_ref def SR_forget(self, _1, sr_ref): _db_content['SR'][sr_ref]['forgotten'] = 1 def SR_scan(self, _1, sr_ref): return def VM_get_xenstore_data(self, _1, vm_ref): return _db_content['VM'][vm_ref].get('xenstore_data', {}) def VM_remove_from_xenstore_data(self, _1, vm_ref, key): db_ref = _db_content['VM'][vm_ref] if 'xenstore_data' not in db_ref: return if key in db_ref['xenstore_data']: del db_ref['xenstore_data'][key] def VM_add_to_xenstore_data(self, _1, vm_ref, key, value): db_ref = _db_content['VM'][vm_ref] if 'xenstore_data' not in db_ref: db_ref['xenstore_data'] = {} db_ref['xenstore_data'][key] = value def VM_pool_migrate(self, _1, vm_ref, host_ref, options): pass def VDI_remove_from_other_config(self, _1, vdi_ref, key): db_ref = _db_content['VDI'][vdi_ref] if 'other_config' not in db_ref: return if key in db_ref['other_config']: del db_ref['other_config'][key] def VDI_add_to_other_config(self, _1, vdi_ref, key, value): db_ref = _db_content['VDI'][vdi_ref] if 'other_config' not in db_ref: db_ref['other_config'] = {} if key in db_ref['other_config']: raise Failure(['MAP_DUPLICATE_KEY', 'VDI', 'other_config', vdi_ref, key]) db_ref['other_config'][key] = value def VDI_copy(self, _1, vdi_to_copy_ref, sr_ref): db_ref = _db_content['VDI'][vdi_to_copy_ref] name_label = db_ref['name_label'] read_only = db_ref['read_only'] sharable = db_ref['sharable'] other_config = db_ref['other_config'].copy() return create_vdi(name_label, sr_ref, sharable=sharable, read_only=read_only, other_config=other_config) def VDI_clone(self, _1, vdi_to_clone_ref): db_ref = _db_content['VDI'][vdi_to_clone_ref] sr_ref = db_ref['SR'] return self.VDI_copy(_1, vdi_to_clone_ref, sr_ref) def host_compute_free_memory(self, _1, ref): #Always return 12GB available return 12 * units.Gi def _plugin_agent_version(self, method, args): return as_json(returncode='0', message='1.0\\r\\n') def _plugin_agent_key_init(self, method, args): return as_json(returncode='D0', message='1') def _plugin_agent_password(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_inject_file(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_resetnetwork(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_agentupdate(self, method, args): url = args["url"] md5 = args["md5sum"] message = "success with %(url)s and hash:%(md5)s" % dict(url=url, md5=md5) return as_json(returncode='0', message=message) def _plugin_noop(self, method, args): return '' def _plugin_pickle_noop(self, method, args): return pickle.dumps(None) def _plugin_migration_transfer_vhd(self, method, args): kwargs = pickle.loads(args['params'])['kwargs'] vdi_ref = self.xenapi_request('VDI.get_by_uuid', (kwargs['vdi_uuid'], )) assert vdi_ref return pickle.dumps(None) _plugin_glance_upload_vhd = _plugin_pickle_noop _plugin_kernel_copy_vdi = _plugin_noop _plugin_kernel_create_kernel_ramdisk = _plugin_noop _plugin_kernel_remove_kernel_ramdisk = _plugin_noop _plugin_migration_move_vhds_into_sr = _plugin_noop def _plugin_xenhost_host_data(self, method, args): return jsonutils.dumps({'host_memory': {'total': 10, 'overhead': 20, 'free': 30, 'free-computed': 40}, 'host_hostname': 'fake-xenhost', 'host_cpu_info': {'cpu_count': 50}, }) def _plugin_poweraction(self, method, args): return jsonutils.dumps({"power_action": method[5:]}) _plugin_xenhost_host_reboot = _plugin_poweraction _plugin_xenhost_host_startup = _plugin_poweraction _plugin_xenhost_host_shutdown = _plugin_poweraction def _plugin_xenhost_set_host_enabled(self, method, args): enabled = 'enabled' if args.get('enabled') == 'true' else 'disabled' return jsonutils.dumps({"status": enabled}) def _plugin_xenhost_host_uptime(self, method, args): return jsonutils.dumps({"uptime": "fake uptime"}) def _plugin_console_get_console_log(self, method, args): dom_id = args["dom_id"] if dom_id == 0: raise Failure('Guest does not have a console') return base64.b64encode(zlib.compress("dom_id: %s" % dom_id)) def _plugin_nova_plugin_version_get_version(self, method, args): return pickle.dumps("1.1") def _plugin_xenhost_query_gc(self, method, args): return pickle.dumps("False") def host_call_plugin(self, _1, _2, plugin, method, args): func = getattr(self, '_plugin_%s_%s' % (plugin, method), None) if not func: raise Exception('No simulation in host_call_plugin for %s,%s' % (plugin, method)) return func(method, args) def VDI_get_virtual_size(self, *args): return 1 * units.Gi def VDI_resize_online(self, *args): return 'derp' VDI_resize = VDI_resize_online def _VM_reboot(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] if db_ref['power_state'] != 'Running': raise Failure(['VM_BAD_POWER_STATE', 'fake-opaque-ref', db_ref['power_state'].lower(), 'halted']) db_ref['power_state'] = 'Running' db_ref['domid'] = random.randrange(1, 1 << 16) def VM_clean_reboot(self, session, vm_ref): return self._VM_reboot(session, vm_ref) def VM_hard_reboot(self, session, vm_ref): return self._VM_reboot(session, vm_ref) def VM_hard_shutdown(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Halted' db_ref['domid'] = -1 VM_clean_shutdown = VM_hard_shutdown def VM_suspend(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Suspended' def VM_pause(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Paused' def pool_eject(self, session, host_ref): pass def pool_join(self, session, hostname, username, password): pass def pool_set_name_label(self, session, pool_ref, name): pass def host_migrate_receive(self, session, destref, nwref, options): return "fake_migrate_data" def VM_assert_can_migrate(self, session, vmref, migrate_data, live, vdi_map, vif_map, options): pass def VM_migrate_send(self, session, mref, migrate_data, live, vdi_map, vif_map, options): pass def VM_remove_from_blocked_operations(self, session, vm_ref, key): # operation is idempotent, XenServer doesn't care if the key exists _db_content['VM'][vm_ref]['blocked_operations'].pop(key, None) def xenapi_request(self, methodname, params): if methodname.startswith('login'): self._login(methodname, params) return None elif methodname == 'logout' or methodname == 'session.logout': self._logout() return None else: full_params = (self._session,) + params meth = getattr(self, methodname, None) if meth is None: LOG.debug(_('Raising NotImplemented')) raise NotImplementedError( _('xenapi.fake does not have an implementation for %s') % methodname) return meth(*full_params) def _login(self, method, params): self._session = str(uuid.uuid4()) _session_info = {'uuid': str(uuid.uuid4()), 'this_host': _db_content['host'].keys()[0]} _db_content['session'][self._session] = _session_info def _logout(self): s = self._session self._session = None if s not in _db_content['session']: raise exception.NovaException( "Logging out a session that is invalid or already logged " "out: %s" % s) del _db_content['session'][s] def __getattr__(self, name): if name == 'handle': return self._session elif name == 'xenapi': return _Dispatcher(self.xenapi_request, None) elif name.startswith('login') or name.startswith('slave_local'): return lambda *params: self._login(name, params) elif name.startswith('Async'): return lambda *params: self._async(name, params) elif '.' in name: impl = getattr(self, name.replace('.', '_')) if impl is not None: def callit(*params): LOG.debug(_('Calling %(name)s %(impl)s'), {'name': name, 'impl': impl}) self._check_session(params) return impl(*params) return callit if self._is_gettersetter(name, True): LOG.debug(_('Calling getter %s'), name) return lambda *params: self._getter(name, params) elif self._is_gettersetter(name, False): LOG.debug(_('Calling setter %s'), name) return lambda *params: self._setter(name, params) elif self._is_create(name): return lambda *params: self._create(name, params) elif self._is_destroy(name): return lambda *params: self._destroy(name, params) elif name == 'XenAPI': return FakeXenAPI() else: return None def _is_gettersetter(self, name, getter): bits = name.split('.') return (len(bits) == 2 and bits[0] in _CLASSES and bits[1].startswith(getter and 'get_' or 'set_')) def _is_create(self, name): return self._is_method(name, 'create') def _is_destroy(self, name): return self._is_method(name, 'destroy') def _is_method(self, name, meth): bits = name.split('.') return (len(bits) == 2 and bits[0] in _CLASSES and bits[1] == meth) def _getter(self, name, params): self._check_session(params) (cls, func) = name.split('.') if func == 'get_all': self._check_arg_count(params, 1) return get_all(cls) if func == 'get_all_records': self._check_arg_count(params, 1) return get_all_records(cls) if func == 'get_all_records_where': self._check_arg_count(params, 2) return get_all_records_where(cls, params[1]) if func == 'get_record': self._check_arg_count(params, 2) return get_record(cls, params[1]) if func in ('get_by_name_label', 'get_by_uuid'): self._check_arg_count(params, 2) return_singleton = (func == 'get_by_uuid') return self._get_by_field( _db_content[cls], func[len('get_by_'):], params[1], return_singleton=return_singleton) if len(params) == 2: field = func[len('get_'):] ref = params[1] if (ref in _db_content[cls]): if (field in _db_content[cls][ref]): return _db_content[cls][ref][field] else: raise Failure(['HANDLE_INVALID', cls, ref]) LOG.debug(_('Raising NotImplemented')) raise NotImplementedError( _('xenapi.fake does not have an implementation for %s or it has ' 'been called with the wrong number of arguments') % name) def _setter(self, name, params): self._check_session(params) (cls, func) = name.split('.') if len(params) == 3: field = func[len('set_'):] ref = params[1] val = params[2] if (ref in _db_content[cls] and field in _db_content[cls][ref]): _db_content[cls][ref][field] = val return LOG.debug(_('Raising NotImplemented')) raise NotImplementedError( 'xenapi.fake does not have an implementation for %s or it has ' 'been called with the wrong number of arguments or the database ' 'is missing that field' % name) def _create(self, name, params): self._check_session(params) is_sr_create = name == 'SR.create' is_vlan_create = name == 'VLAN.create' # Storage Repositories have a different API expected = is_sr_create and 10 or is_vlan_create and 4 or 2 self._check_arg_count(params, expected) (cls, _) = name.split('.') ref = (is_sr_create and _create_sr(cls, params) or is_vlan_create and _create_vlan(params[1], params[2], params[3]) or _create_object(cls, params[1])) # Call hook to provide any fixups needed (ex. creating backrefs) after_hook = 'after_%s_create' % cls if after_hook in globals(): globals()[after_hook](ref, params[1]) obj = get_record(cls, ref) # Add RO fields if cls == 'VM': obj['power_state'] = 'Halted' return ref def _destroy(self, name, params): self._check_session(params) self._check_arg_count(params, 2) table = name.split('.')[0] ref = params[1] if ref not in _db_content[table]: raise Failure(['HANDLE_INVALID', table, ref]) # Call destroy function (if exists) destroy_func = globals().get('destroy_%s' % table.lower()) if destroy_func: destroy_func(ref) else: del _db_content[table][ref] def _async(self, name, params): task_ref = create_task(name) task = _db_content['task'][task_ref] func = name[len('Async.'):] try: result = self.xenapi_request(func, params[1:]) if result: result = as_value(result) task['result'] = result task['status'] = 'success' except Failure as exc: task['error_info'] = exc.details task['status'] = 'failed' task['finished'] = timeutils.utcnow() return task_ref def _check_session(self, params): if (self._session is None or self._session not in _db_content['session']): raise Failure(['HANDLE_INVALID', 'session', self._session]) if len(params) == 0 or params[0] != self._session: LOG.debug(_('Raising NotImplemented')) raise NotImplementedError('Call to XenAPI without using .xenapi') def _check_arg_count(self, params, expected): actual = len(params) if actual != expected: raise Failure(['MESSAGE_PARAMETER_COUNT_MISMATCH', expected, actual]) def _get_by_field(self, recs, k, v, return_singleton): result = [] for ref, rec in recs.iteritems(): if rec.get(k) == v: result.append(ref) if return_singleton: try: return result[0] except IndexError: raise Failure(['UUID_INVALID', v, result, recs, k]) return result class FakeXenAPI(object): def __init__(self): self.Failure = Failure # Based upon _Method from xmlrpclib. class _Dispatcher: def __init__(self, send, name): self.__send = send self.__name = name def __repr__(self): if self.__name: return '<xenapi.fake._Dispatcher for %s>' % self.__name else: return '<xenapi.fake._Dispatcher>' def __getattr__(self, name): if self.__name is None: return _Dispatcher(self.__send, name) else: return _Dispatcher(self.__send, "%s.%s" % (self.__name, name)) def __call__(self, *args): return self.__send(self.__name, args)
apache-2.0
xixixao/sublime-javax
javax.py
1
10112
''' Various code generation for Java. See README.md for details. @author: xixixao <xixixao@seznam.cz> @license: MIT (http://www.opensource.org/licenses/mit-license.php) @since: 2014-05-03 ''' import sublime import sublime_plugin import re from collections import namedtuple # Note: klass == class # Custom data structures Klass = namedtuple('Klass', 'accessor name') Field = namedtuple('Field', 'type name') SubCommand = namedtuple('SubCommand', 'caption command') # Public: Proxy command for Generate<X> subcommands, opens a new command palette class JavaxGenerateCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view window = view.window() subCommands = self.__class__.subCommands showQuickPanelForSubCommands(window, view, subCommands) # Stores subcommands subCommands = [] @classmethod def add(self, subCommand, caption): self.subCommands.append(SubCommand(caption, subCommand)) def showQuickPanelForSubCommands(window, view, subCommands): captions = [command.caption for command in subCommands] window.show_quick_panel(captions, runSubCommand(view, subCommands)) def runSubCommand(view, subCommands): def onSelected(index): if index != -1: view.run_command(subCommands[index].command) return onSelected # Public: Generates getters for selected fields class JavaxGenerateGettersCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view selections = view.sel() instanceFields = getSelectedFields(view, selections) getters = gettersDeclaration(instanceFields) insertAtLastSelection(getters, view, edit, selections) JavaxGenerateCommand.add('javax_generate_getters', 'Getters') # Public: Generates setters for selected fields class JavaxGenerateSettersCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view selections = view.sel() instanceFields = getSelectedFields(view, selections) setters = settersDeclaration(instanceFields) insertAtLastSelection(setters, view, edit, selections) JavaxGenerateCommand.add('javax_generate_setters', 'Setters') # Public: Generates a constructor for selected fields class JavaxGenerateConstructorCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view selections = view.sel() klass = findFirstKlass(view) instanceFields = getSelectedFields(view, selections) constructor = constructorDeclaration(klass, instanceFields) insertAtLastSelection(constructor, view, edit, selections) JavaxGenerateCommand.add('javax_generate_constructor', 'Constructor') # Public: Generates an inner class Builder for current top-scope class # # This is the well-known Builder Design Pattern, having a setter for each # instance field of outer class. class JavaxGenerateBuilderCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view selections = view.sel() klass = findFirstKlass(view) instanceFields = getSelectedFields(view, selections) builder = builderDeclaration(klass, instanceFields) insertAtLastSelection(builder, view, edit, selections) JavaxGenerateCommand.add('javax_generate_builder', 'Builder') # Private: finds the first declared class in the view # # TODO: find the enclosing class of selected fields instead def findFirstKlass(view): fileContent = view.substr(sublime.Region(0, view.size())) return getKlass(fileContent) # Private: returns the top level class with name and accessor def getKlass(text): pattern = r""" (^|\n) ((?P<accessor>\w+)\s+)? class\s+ (?P<name>\w+) """ found = Klass(**re.search(pattern, text, re.VERBOSE).groupdict()) return Klass(found.accessor or '', found.name) # Private: returns a list of Fields among the view's selections def getSelectedFields(view, selections): selectedText = getAllSelectedText(view, selections) return fieldsIn(selectedText) # Private: returns all the selections concatenated with a newline # # Newline is used to make sure that fields in different selection Regions # are correctly indentified. def getAllSelectedText(view, selections): return '\n'.join([view.substr(selection) for selection in selections]) # Private: returns a list of Fields with types and names def fieldsIn(text): pattern = r""" ^\s* # from start of the line, with potential indent %(accessor)s ((transient|volatile)\s+)? (final\s+)? (?P<type>[\w$\<\>\,\.\s]+)\s+ (?P<name>[\w$]+) \s*(;|=) """ % dict(accessor = ACCESSOR_REGEXP) flags = re.MULTILINE | re.VERBOSE return [Field(**m.groupdict()) for m in re.finditer(pattern, text, flags)] # Private: insert given content into the view # # The content is formatted and place after the last selection, selected and # centered in the view. def insertAtLastSelection(content, view, edit, selections): lastSelection = findEndOfLastSelection(view, selections) indentSize = inferIndentSize(view.settings()) generatedCode = content contentSize = view.insert(edit, lastSelection, formatJava(indentSize, 1, generatedCode)) view.show_at_center(lastSelection) # selections.clear() # selections.add(sublime.Region(lastSelection, lastSelection + contentSize)) # Private: find the position after last newline selected def findEndOfLastSelection(view, selections): return view.lines(selections[-1])[-1].end() + 1; # Private: infer the indentation step size used in the file, default to 2 spaces # We look at the tab_size setting def inferIndentSize(settings): return settings.get('tab_size', 2) # Private: def constructorDeclaration(klass, fields): return """\ private %(klassName)s(%(arguments)s) { %(assignments)s } """ % dict( klassName = klass.name, arguments = ', '.join(map(variableDeclaration, fields)), assignments = '\n'.join(map(assignment, fields)) ) # Private: the whole Builder class def builderDeclaration(klass, fields): return """\ %(accessor)s static class Builder { %(builderFields)s %(setters)s %(accessor)s %(klassName)s build() { return new %(klassName)s(%(fieldNames)s); } } """ % dict( accessor = klass.accessor, builderFields = '\n'.join(map(fieldDeclaration, fields)), setters = '\n'.join(map(builderSetterDeclaration(klass.accessor), fields)), klassName = klass.name, fieldNames = ', '.join([field.name for field in fields]) ) # Private: return a declaration of a Field with private accessor def fieldDeclaration(field): return "private %(variable)s;" % dict(variable = variableDeclaration(field)) # Private: given an accessor, return a function which given a Field # will return a setter declaration with that accessor def builderSetterDeclaration(accessor): def fn(field): return """\ %(accessor)s Builder set%(capitalizedName)s(%(parameter)s) { %(assignment)s return this; } """ % dict( accessor = accessor, capitalizedName = capitalize(field.name), assignment = assignment(field), parameter = variableDeclaration(field) ) return fn def settersDeclaration(fields): return '\n'.join(map(setterDeclaration('public'), fields)) def setterDeclaration(accessor): def fn(field): return """\ %(accessor)s void set%(capitalizedName)s(%(parameter)s) { %(assignment)s } """ % dict( accessor = accessor, capitalizedName = capitalize(field.name), assignment = assignment(field), parameter = variableDeclaration(field) ) return fn def gettersDeclaration(fields): return '\n'.join(map(getterDeclaration('public'), fields)) # Private: given an accessor, return a function which given a Field # will return a setter declaration with that accessor def getterDeclaration(accessor): def fn(field): return """\ %(accessor)s %(type)s get%(capitalizedName)s() { return %(name)s; } """ % dict( accessor = accessor, capitalizedName = capitalize(field.name), name = field.name, type = field.type ) return fn # Private: because Python sucks def capitalize(str): return str[:1].upper() + str[1:] # Private: return an assignment of a Field with a variable of the same # name def assignment(field): return "this.%(name)s = %(name)s;" % field._asdict() # Private: return the simple type name pair used in various declarations of # variables def variableDeclaration(field): return "%(type)s %(name)s" % field._asdict() # Private: reformats the code to have proper indentation def formatJava(indentSize, initialIndent, code): indentLevel = initialIndent tokens = re.split(r'([\{\};]?\n)', stripIndentation(code)) indentedTokens = [] for token in tokens: if len(token) > 0: if token == '{\n': indentLevel += 1 elif token == '}\n': indentLevel -= 1 if token not in ('{\n', ';\n', '\n'): indentedTokens.append(indentationToken(indentLevel, indentSize)) indentedTokens.append(token) return ''.join(indentedTokens) # Private: first get rid of the bad indentation def stripIndentation(code): return re.sub('(^|\n) *', '\n', code) # Private: returns just enough spaces def indentationToken(indentLevel, indentSize): return ''.join(' ' * indentLevel * indentSize) # Private: any accessor, including package-private ACCESSOR_REGEXP = r""" ( ( private | protected | public )\s+ )? """
mit
TiVoMaker/boto
tests/mturk/cleanup_tests.py
136
1463
import itertools from _init_environment import SetHostMTurkConnection from _init_environment import config_environment def description_filter(substring): return lambda hit: substring in hit.Title def disable_hit(hit): return conn.disable_hit(hit.HITId) def dispose_hit(hit): # assignments must be first approved or rejected for assignment in conn.get_assignments(hit.HITId): if assignment.AssignmentStatus == 'Submitted': conn.approve_assignment(assignment.AssignmentId) return conn.dispose_hit(hit.HITId) def cleanup(): """Remove any boto test related HIT's""" config_environment() global conn conn = SetHostMTurkConnection() is_boto = description_filter('Boto') print 'getting hits...' all_hits = list(conn.get_all_hits()) is_reviewable = lambda hit: hit.HITStatus == 'Reviewable' is_not_reviewable = lambda hit: not is_reviewable(hit) hits_to_process = filter(is_boto, all_hits) hits_to_disable = filter(is_not_reviewable, hits_to_process) hits_to_dispose = filter(is_reviewable, hits_to_process) print 'disabling/disposing %d/%d hits' % (len(hits_to_disable), len(hits_to_dispose)) map(disable_hit, hits_to_disable) map(dispose_hit, hits_to_dispose) total_hits = len(all_hits) hits_processed = len(hits_to_process) skipped = total_hits - hits_processed fmt = 'Processed: %(total_hits)d HITs, disabled/disposed: %(hits_processed)d, skipped: %(skipped)d' print fmt % vars() if __name__ == '__main__': cleanup()
mit
cyc805/FTRerouting
.waf-1.7.11-edc6ccb516c5e3f9b892efc9f53a610f/waflib/Tools/ccroot.py
70
12917
#! /usr/bin/env python # encoding: utf-8 # WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file import os,re from waflib import Task,Utils,Node,Errors from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests from waflib.Configure import conf SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] USELIB_VARS=Utils.defaultdict(set) USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) USELIB_VARS['asm']=set(['ASFLAGS']) @taskgen_method def create_compiled_task(self,name,node): out='%s.%d.o'%(node.name,self.idx) task=self.create_task(name,node,node.parent.find_or_declare(out)) try: self.compiled_tasks.append(task) except AttributeError: self.compiled_tasks=[task] return task @taskgen_method def to_incnodes(self,inlst): lst=[] seen=set([]) for x in self.to_list(inlst): if x in seen or not x: continue seen.add(x) if isinstance(x,Node.Node): lst.append(x) else: if os.path.isabs(x): lst.append(self.bld.root.make_node(x)or x) else: if x[0]=='#': p=self.bld.bldnode.make_node(x[1:]) v=self.bld.srcnode.make_node(x[1:]) else: p=self.path.get_bld().make_node(x) v=self.path.make_node(x) if p.is_child_of(self.bld.bldnode): p.mkdir() lst.append(p) lst.append(v) return lst @feature('c','cxx','d','asm','fc','includes') @after_method('propagate_uselib_vars','process_source') def apply_incpaths(self): lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) self.includes_nodes=lst self.env['INCPATHS']=[x.abspath()for x in lst] class link_task(Task.Task): color='YELLOW' inst_to=None chmod=Utils.O755 def add_target(self,target): if isinstance(target,str): pattern=self.env[self.__class__.__name__+'_PATTERN'] if not pattern: pattern='%s' folder,name=os.path.split(target) if self.__class__.__name__.find('shlib')>0: if self.env.DEST_BINFMT=='pe'and getattr(self.generator,'vnum',None): name=name+'-'+self.generator.vnum.split('.')[0] tmp=folder+os.sep+pattern%name target=self.generator.path.find_or_declare(tmp) self.set_outputs(target) class stlink_task(link_task): run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' def rm_tgt(cls): old=cls.run def wrap(self): try:os.remove(self.outputs[0].abspath()) except OSError:pass return old(self) setattr(cls,'run',wrap) rm_tgt(stlink_task) @feature('c','cxx','d','fc','asm') @after_method('process_source') def apply_link(self): for x in self.features: if x=='cprogram'and'cxx'in self.features: x='cxxprogram' elif x=='cshlib'and'cxx'in self.features: x='cxxshlib' if x in Task.classes: if issubclass(Task.classes[x],link_task): link=x break else: return objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] self.link_task=self.create_task(link,objs) self.link_task.add_target(self.target) try: inst_to=self.install_path except AttributeError: inst_to=self.link_task.__class__.inst_to if inst_to: self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod) @taskgen_method def use_rec(self,name,**kw): if name in self.tmp_use_not or name in self.tmp_use_seen: return try: y=self.bld.get_tgen_by_name(name) except Errors.WafError: self.uselib.append(name) self.tmp_use_not.add(name) return self.tmp_use_seen.append(name) y.post() y.tmp_use_objects=objects=kw.get('objects',True) y.tmp_use_stlib=stlib=kw.get('stlib',True) try: link_task=y.link_task except AttributeError: y.tmp_use_var='' else: objects=False if not isinstance(link_task,stlink_task): stlib=False y.tmp_use_var='LIB' else: y.tmp_use_var='STLIB' p=self.tmp_use_prec for x in self.to_list(getattr(y,'use',[])): try: p[x].append(name) except KeyError: p[x]=[name] self.use_rec(x,objects=objects,stlib=stlib) @feature('c','cxx','d','use','fc') @before_method('apply_incpaths','propagate_uselib_vars') @after_method('apply_link','process_source') def process_use(self): use_not=self.tmp_use_not=set([]) self.tmp_use_seen=[] use_prec=self.tmp_use_prec={} self.uselib=self.to_list(getattr(self,'uselib',[])) self.includes=self.to_list(getattr(self,'includes',[])) names=self.to_list(getattr(self,'use',[])) for x in names: self.use_rec(x) for x in use_not: if x in use_prec: del use_prec[x] out=[] tmp=[] for x in self.tmp_use_seen: for k in use_prec.values(): if x in k: break else: tmp.append(x) while tmp: e=tmp.pop() out.append(e) try: nlst=use_prec[e] except KeyError: pass else: del use_prec[e] for x in nlst: for y in use_prec: if x in use_prec[y]: break else: tmp.append(x) if use_prec: raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) out.reverse() link_task=getattr(self,'link_task',None) for x in out: y=self.bld.get_tgen_by_name(x) var=y.tmp_use_var if var and link_task: if var=='LIB'or y.tmp_use_stlib: self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) self.link_task.dep_nodes.extend(y.link_task.outputs) tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) self.env.append_value(var+'PATH',[tmp_path]) else: if y.tmp_use_objects: self.add_objects_from_tgen(y) if getattr(y,'export_includes',None): self.includes.extend(y.to_incnodes(y.export_includes)) if getattr(y,'export_defines',None): self.env.append_value('DEFINES',self.to_list(y.export_defines)) for x in names: try: y=self.bld.get_tgen_by_name(x) except Exception: if not self.env['STLIB_'+x]and not x in self.uselib: self.uselib.append(x) else: for k in self.to_list(getattr(y,'uselib',[])): if not self.env['STLIB_'+k]and not k in self.uselib: self.uselib.append(k) @taskgen_method def accept_node_to_link(self,node): return not node.name.endswith('.pdb') @taskgen_method def add_objects_from_tgen(self,tg): try: link_task=self.link_task except AttributeError: pass else: for tsk in getattr(tg,'compiled_tasks',[]): for x in tsk.outputs: if self.accept_node_to_link(x): link_task.inputs.append(x) @taskgen_method def get_uselib_vars(self): _vars=set([]) for x in self.features: if x in USELIB_VARS: _vars|=USELIB_VARS[x] return _vars @feature('c','cxx','d','fc','javac','cs','uselib','asm') @after_method('process_use') def propagate_uselib_vars(self): _vars=self.get_uselib_vars() env=self.env for x in _vars: y=x.lower() env.append_unique(x,self.to_list(getattr(self,y,[]))) for x in self.features: for var in _vars: compvar='%s_%s'%(var,x) env.append_value(var,env[compvar]) for x in self.to_list(getattr(self,'uselib',[])): for v in _vars: env.append_value(v,env[v+'_'+x]) @feature('cshlib','cxxshlib','fcshlib') @after_method('apply_link') def apply_implib(self): if not self.env.DEST_BINFMT=='pe': return dll=self.link_task.outputs[0] if isinstance(self.target,Node.Node): name=self.target.name else: name=os.path.split(self.target)[1] implib=self.env['implib_PATTERN']%name implib=dll.parent.find_or_declare(implib) self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) self.link_task.outputs.append(implib) if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': node=self.path.find_resource(self.defs) if not node: raise Errors.WafError('invalid def file %r'%self.defs) if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) self.link_task.dep_nodes.append(node) else: self.link_task.inputs.append(node) try: inst_to=self.install_path except AttributeError: inst_to=self.link_task.__class__.inst_to if not inst_to: return self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env) re_vnum=re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$') @feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') @after_method('apply_link','propagate_uselib_vars') def apply_vnum(self): if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): return link=self.link_task if not re_vnum.match(self.vnum): raise Errors.WafError('Invalid version %r for %r'%(self.vnum,self)) nums=self.vnum.split('.') node=link.outputs[0] libname=node.name if libname.endswith('.dylib'): name3=libname.replace('.dylib','.%s.dylib'%self.vnum) name2=libname.replace('.dylib','.%s.dylib'%nums[0]) else: name3=libname+'.'+self.vnum name2=libname+'.'+nums[0] if self.env.SONAME_ST: v=self.env.SONAME_ST%name2 self.env.append_value('LINKFLAGS',v.split()) self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)]) if getattr(self,'install_task',None): self.install_task.hasrun=Task.SKIP_ME bld=self.bld path=self.install_task.dest t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) t2=bld.symlink_as(path+os.sep+name2,name3) t3=bld.symlink_as(path+os.sep+libname,name3) self.vnum_install_task=(t1,t2,t3) if'-dynamiclib'in self.env['LINKFLAGS']: try: inst_to=self.install_path except AttributeError: inst_to=self.link_task.__class__.inst_to if inst_to: p=Utils.subst_vars(inst_to,self.env) path=os.path.join(p,self.link_task.outputs[0].name) self.env.append_value('LINKFLAGS',['-install_name',path]) class vnum(Task.Task): color='CYAN' quient=True ext_in=['.bin'] def run(self): for x in self.outputs: path=x.abspath() try: os.remove(path) except OSError: pass try: os.symlink(self.inputs[0].name,path) except OSError: return 1 class fake_shlib(link_task): def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER for x in self.outputs: x.sig=Utils.h_file(x.abspath()) return Task.SKIP_ME class fake_stlib(stlink_task): def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER for x in self.outputs: x.sig=Utils.h_file(x.abspath()) return Task.SKIP_ME @conf def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) @conf def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} @feature('fake_lib') def process_lib(self): node=None names=[x%self.name for x in lib_patterns[self.lib_type]] for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: if not isinstance(x,Node.Node): x=self.bld.root.find_node(x)or self.path.find_node(x) if not x: continue for y in names: node=x.find_node(y) if node: node.sig=Utils.h_file(node.abspath()) break else: continue break else: raise Errors.WafError('could not find library %r'%self.name) self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) self.target=self.name class fake_o(Task.Task): def runnable_status(self): return Task.SKIP_ME @extension('.o','.obj') def add_those_o_files(self,node): tsk=self.create_task('fake_o',[],node) try: self.compiled_tasks.append(tsk) except AttributeError: self.compiled_tasks=[tsk] @feature('fake_obj') @before_method('process_source') def process_objs(self): for node in self.to_nodes(self.source): self.add_those_o_files(node) self.source=[] @conf def read_object(self,obj): if not isinstance(obj,self.path.__class__): obj=self.path.find_resource(obj) return self(features='fake_obj',source=obj,name=obj.name)
gpl-2.0
Hons/troposphere
tests/test_rds.py
4
1301
import unittest import troposphere.rds as rds class TestRDS(unittest.TestCase): def test_it_allows_an_rds_instance_created_from_a_snapshot(self): rds_instance = rds.DBInstance( 'SomeTitle', AllocatedStorage=1, DBInstanceClass='db.m1.small', Engine='MySQL', DBSnapshotIdentifier='SomeSnapshotIdentifier' ) rds_instance.JSONrepr() def test_it_allows_an_rds_instance_with_master_username_and_password(self): rds_instance = rds.DBInstance( 'SomeTitle', AllocatedStorage=1, DBInstanceClass='db.m1.small', Engine='MySQL', MasterUsername='SomeUsername', MasterUserPassword='SomePassword' ) rds_instance.JSONrepr() def test_it_rds_instances_require_either_a_snapshot_or_credentials(self): rds_instance = rds.DBInstance( 'SomeTitle', AllocatedStorage=1, DBInstanceClass='db.m1.small', Engine='MySQL' ) with self.assertRaisesRegexp( ValueError, 'Either \(MasterUsername and MasterUserPassword\) or' ' DBSnapshotIdentifier are required' ): rds_instance.JSONrepr()
bsd-2-clause
tinloaf/home-assistant
homeassistant/components/device_tracker/bluetooth_le_tracker.py
1
3923
""" Tracking for bluetooth low energy devices. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.bluetooth_le_tracker/ """ import logging from homeassistant.helpers.event import track_point_in_utc_time from homeassistant.components.device_tracker import ( YAML_DEVICES, CONF_TRACK_NEW, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL, load_config, SOURCE_TYPE_BLUETOOTH_LE ) import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) REQUIREMENTS = ['pygatt==3.2.0'] BLE_PREFIX = 'BLE_' MIN_SEEN_NEW = 5 def setup_scanner(hass, config, see, discovery_info=None): """Set up the Bluetooth LE Scanner.""" # pylint: disable=import-error import pygatt new_devices = {} def see_device(address, name, new_device=False): """Mark a device as seen.""" if new_device: if address in new_devices: _LOGGER.debug( "Seen %s %s times", address, new_devices[address]) new_devices[address] += 1 if new_devices[address] >= MIN_SEEN_NEW: _LOGGER.debug("Adding %s to tracked devices", address) devs_to_track.append(address) else: return else: _LOGGER.debug("Seen %s for the first time", address) new_devices[address] = 1 return if name is not None: name = name.strip("\x00") see(mac=BLE_PREFIX + address, host_name=name, source_type=SOURCE_TYPE_BLUETOOTH_LE) def discover_ble_devices(): """Discover Bluetooth LE devices.""" _LOGGER.debug("Discovering Bluetooth LE devices") try: adapter = pygatt.GATTToolBackend() devs = adapter.scan() devices = {x['address']: x['name'] for x in devs} _LOGGER.debug("Bluetooth LE devices discovered = %s", devices) except RuntimeError as error: _LOGGER.error("Error during Bluetooth LE scan: %s", error) return {} return devices yaml_path = hass.config.path(YAML_DEVICES) devs_to_track = [] devs_donot_track = [] # Load all known devices. # We just need the devices so set consider_home and home range # to 0 for device in load_config(yaml_path, hass, 0): # check if device is a valid bluetooth device if device.mac and device.mac[:4].upper() == BLE_PREFIX: if device.track: _LOGGER.debug("Adding %s to BLE tracker", device.mac) devs_to_track.append(device.mac[4:]) else: _LOGGER.debug("Adding %s to BLE do not track", device.mac) devs_donot_track.append(device.mac[4:]) # if track new devices is true discover new devices # on every scan. track_new = config.get(CONF_TRACK_NEW) if not devs_to_track and not track_new: _LOGGER.warning("No Bluetooth LE devices to track!") return False interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) def update_ble(now): """Lookup Bluetooth LE devices and update status.""" devs = discover_ble_devices() for mac in devs_to_track: if mac not in devs: continue if devs[mac] is None: devs[mac] = mac see_device(mac, devs[mac]) if track_new: for address in devs: if address not in devs_to_track and \ address not in devs_donot_track: _LOGGER.info("Discovered Bluetooth LE device %s", address) see_device(address, devs[address], new_device=True) track_point_in_utc_time(hass, update_ble, dt_util.utcnow() + interval) update_ble(dt_util.utcnow()) return True
apache-2.0
cesargtz/YecoraOdoo
addons/point_of_sale/wizard/pos_details.py
225
2386
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.osv import osv, fields class pos_details(osv.osv_memory): _name = 'pos.details' _description = 'Sales Details' _columns = { 'date_start': fields.date('Date Start', required=True), 'date_end': fields.date('Date End', required=True), 'user_ids': fields.many2many('res.users', 'pos_details_report_user_rel', 'user_id', 'wizard_id', 'Salespeople'), } _defaults = { 'date_start': fields.date.context_today, 'date_end': fields.date.context_today, } def print_report(self, cr, uid, ids, context=None): """ To get the date and print the report @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return : retrun report """ if context is None: context = {} datas = {'ids': context.get('active_ids', [])} res = self.read(cr, uid, ids, ['date_start', 'date_end', 'user_ids'], context=context) res = res and res[0] or {} datas['form'] = res if res.get('id',False): datas['ids']=[res['id']] return self.pool['report'].get_action(cr, uid, [], 'point_of_sale.report_detailsofsales', data=datas, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kishpatel1998/HeatWave-CP8676_I02
tools/perf/scripts/python/syscall-counts-by-pid.py
11180
1927
# system call counts, by pid # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide system call totals, broken down by syscall. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import syscall_name usage = "perf script -s syscall-counts-by-pid.py [comm]\n"; for_comm = None for_pid = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: try: for_pid = int(sys.argv[1]) except: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): print_syscall_totals() def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): if (for_comm and common_comm != for_comm) or \ (for_pid and common_pid != for_pid ): return try: syscalls[common_comm][common_pid][id] += 1 except TypeError: syscalls[common_comm][common_pid][id] = 1 def print_syscall_totals(): if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events by comm/pid:\n\n", print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id, val in sorted(syscalls[comm][pid].iteritems(), \ key = lambda(k, v): (v, k), reverse = True): print " %-38s %10d\n" % (syscall_name(id), val),
gpl-2.0
markgw/pimlico
src/python/pimlico/old_datatypes/modules/features/term_feature_compiler/execute.py
1
1232
# This file is part of Pimlico # Copyright (C) 2020 Mark Granroth-Wilding # Licensed under the GNU LGPL v3.0 - https://www.gnu.org/licenses/lgpl-3.0.en.html from builtins import next from collections import Counter from pimlico.core.modules.map.multiproc import multiprocessing_executor_factory def process_document(worker, archive, filename, doc): term_keys = worker.info.options["term_keys"] output_data_points = [] for data_point in doc: # Look for the special keys we use as terms try: term = next((val for (key, val) in data_point if key in term_keys)) except StopIteration: # No term keys found in the data point: skip it continue # Take all the other keys as feature counts if worker.info.options["include_feature_keys"]: features = ["%s_%s" % (key, value) for (key, value) in data_point if key not in term_keys] else: features = [value for (key, value) in data_point if key not in term_keys] feature_counts = dict(Counter(features)) output_data_points.append((term, feature_counts)) return output_data_points ModuleExecutor = multiprocessing_executor_factory(process_document)
gpl-3.0
ourbest/sns_app
backend/migrations/0092_weizhandownclick.py
1
1326
# Generated by Django 2.0 on 2018-08-14 09:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('backend', '0091_auto_20180814_1124'), ] operations = [ migrations.CreateModel( name='WeizhanDownClick', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('app_id', models.IntegerField(db_index=True)), ('item_id', models.BigIntegerField(db_index=True, default=0)), ('type', models.CharField(default='', max_length=30)), ('uid', models.BigIntegerField(default=0)), ('img', models.CharField(default='', max_length=255)), ('href', models.CharField(default='', max_length=255)), ('idx', models.IntegerField(default=0)), ('tid', models.IntegerField(default=0)), ('net', models.CharField(default='WIFI', max_length=10)), ('platform', models.CharField(default='android', max_length=20)), ('ip', models.CharField(max_length=20)), ('uuid', models.CharField(max_length=32)), ('ts', models.DateTimeField(db_index=True)), ], ), ]
lgpl-3.0
marcoitur/FreeCAD
src/Mod/Path/PathScripts/PostUtils.py
14
6640
#*************************************************************************** #* (c) Yorik van Havre (yorik@uncreated.net) 2014 * #* * #* This file is part of the FreeCAD CAx development system. * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* FreeCAD is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Lesser General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with FreeCAD; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #***************************************************************************/ ''' These are a common functions and classes for creating custom post processors. ''' from PySide import QtCore, QtGui import FreeCAD FreeCADGui = None if FreeCAD.GuiUp: import FreeCADGui class OldHighlighter(QtGui.QSyntaxHighlighter): def highlightBlock(self, text): myClassFormat = QtGui.QTextCharFormat() myClassFormat.setFontWeight(QtGui.QFont.Bold) myClassFormat.setForeground(QtCore.Qt.green) # the regex pattern to be colored pattern = "(G.*?|M.*?)\\s" expression = QtCore.QRegExp(pattern) index = text.index(expression) while index >= 0: length = expression.matchedLength() setFormat(index, length, myClassFormat) index = text.index(expression, index + length) class GCodeHighlighter(QtGui.QSyntaxHighlighter): def __init__(self, parent=None): super(GCodeHighlighter, self).__init__(parent) keywordFormat = QtGui.QTextCharFormat() keywordFormat.setForeground(QtCore.Qt.cyan) keywordFormat.setFontWeight(QtGui.QFont.Bold) keywordPatterns = ["\\bG[0-9]+\\b", "\\bM[0-9]+\\b"] self.highlightingRules = [(QtCore.QRegExp(pattern), keywordFormat) for pattern in keywordPatterns] speedFormat = QtGui.QTextCharFormat() speedFormat.setFontWeight(QtGui.QFont.Bold) speedFormat.setForeground(QtCore.Qt.green) self.highlightingRules.append((QtCore.QRegExp("\\bF[0-9\\.]+\\b"),speedFormat)) def highlightBlock(self, text): for pattern, format in self.highlightingRules: expression = QtCore.QRegExp(pattern) index = expression.indexIn(text) while index >= 0: length = expression.matchedLength() self.setFormat(index, length, format) index = expression.indexIn(text, index + length) class GCodeEditorDialog(QtGui.QDialog): def __init__(self, parent = None): if parent is None: parent = FreeCADGui.getMainWindow() QtGui.QDialog.__init__(self,parent) layout = QtGui.QVBoxLayout(self) # nice text editor widget for editing the gcode self.editor = QtGui.QTextEdit() font = QtGui.QFont() font.setFamily("Courier") font.setFixedPitch(True) font.setPointSize(10) self.editor.setFont(font) self.editor.setText("G01 X55 Y4.5 F300.0") self.highlighter = GCodeHighlighter(self.editor.document()) layout.addWidget(self.editor) # OK and Cancel buttons self.buttons = QtGui.QDialogButtonBox( QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel, QtCore.Qt.Horizontal, self) layout.addWidget(self.buttons) # restore placement and size self.paramKey = "User parameter:BaseApp/Values/Mod/Path/GCodeEditor/" params = FreeCAD.ParamGet(self.paramKey) posX = params.GetInt("posX") posY = params.GetInt("posY") if posX > 0 and posY > 0: self.move(posX, posY) width = params.GetInt("width") height = params.GetInt("height") if width > 0 and height > 0: self.resize(width, height) self.buttons.accepted.connect(self.accept) self.buttons.rejected.connect(self.reject) def done(self, *args, **kwargs): params = FreeCAD.ParamGet(self.paramKey) params.SetInt("posX", self.x()) params.SetInt("posY", self.y()) params.SetInt("width", self.size().width()) params.SetInt("height", self.size().height()) return QtGui.QDialog.done(self, *args, **kwargs) def stringsplit(commandline): returndict = {'command':None, 'X':None, 'Y':None, 'Z':None, 'A':None, 'B':None, 'F':None, 'T':None, 'S':None, 'I':None, 'J':None,'K':None, 'txt': None} wordlist = [a.strip() for a in commandline.split(" ")] if wordlist[0][0] == '(': returndict['command'] = 'message' returndict['txt'] = wordlist[0] else: returndict['command'] = wordlist[0] for word in wordlist[1:]: returndict[word[0]] = word[1:] return returndict def fmt(num,dec,units): ''' used to format axis moves, feedrate, etc for decimal places and units''' if units == 'G21': #metric fnum = '%.*f' % (dec, num) else: #inch fnum = '%.*f' % (dec, num/25.4) #since FreeCAD uses metric units internally return fnum def editor(gcode): '''pops up a handy little editor to look at the code output ''' dia = GCodeEditorDialog() dia.editor.setText(gcode) result = dia.exec_() def fcoms(string,commentsym): ''' filter and rebuild comments with user preferred comment symbol''' if len(commentsym)==1: s1 = string.replace('(', commentsym) comment = s1.replace(')', '') else: return string return comment
lgpl-2.1
Mazecreator/tensorflow
tensorflow/contrib/tpu/python/tpu/tpu_function_test.py
75
5272
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Tests for tpu_function helpers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.tpu.python.tpu import tpu_feed from tensorflow.contrib.tpu.python.tpu import tpu_function from tensorflow.python.platform import test class FunctionArgCheckTest(test.TestCase): def testSimple(self): """Tests that arg checker works for functions with no varargs or defaults. """ def func(x, y, z): return x + y + z self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, None)) self.assertEqual("exactly 3 arguments", tpu_function.check_function_argument_count(func, 2, None)) queue = tpu_feed.InfeedQueue(2) self.assertEqual(None, tpu_function.check_function_argument_count(func, 1, queue)) self.assertEqual("exactly 3 arguments", tpu_function.check_function_argument_count(func, 2, queue)) def testDefaultArgs(self): """Tests that arg checker works for a function with no varargs.""" def func(x, y, z=17): return x + y + z self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, None)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 1, None)) self.assertEqual("at most 3 arguments", tpu_function.check_function_argument_count(func, 4, None)) queue = tpu_feed.InfeedQueue(1) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 1, queue)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 0, queue)) self.assertEqual("at most 3 arguments", tpu_function.check_function_argument_count(func, 4, queue)) def testVarArgs(self): """Tests that arg checker works for a function with varargs.""" def func(x, y, *z): return x + y + len(z) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 4, None)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 1, None)) queue = tpu_feed.InfeedQueue(1) self.assertEqual(None, tpu_function.check_function_argument_count(func, 1, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, queue)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 0, queue)) def testVarArgsAndDefaults(self): """Tests that arg checker works for a function with varargs and defaults.""" def func(x, y, z=17, *q): return x + y + z + len(q) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 4, None)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 5, None)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 1, None)) queue = tpu_feed.InfeedQueue(1) self.assertEqual(None, tpu_function.check_function_argument_count(func, 1, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 2, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 3, queue)) self.assertEqual(None, tpu_function.check_function_argument_count(func, 4, queue)) self.assertEqual("at least 2 arguments", tpu_function.check_function_argument_count(func, 0, queue)) if __name__ == "__main__": test.main()
apache-2.0
averagehuman/mezzanine-invites
invites/forms.py
1
2086
from django import forms from django.utils.translation import ugettext, ugettext_lazy as _ from django.contrib.auth import authenticate from captcha.fields import CaptchaField from mezzanine.core.forms import Html5Mixin from mezzanine.accounts import forms as base from .models import InvitationCode, InviteCodeHasExpired, InviteCodeIsOutOfDate def captcha(): return CaptchaField( label="Enter the letters you see below", help_text="Confirm that you are human", ) def ProfileForm(*args, **kwargs): user = kwargs.get("instance") if user and user.is_authenticated(): cls = base.ProfileForm else: cls = SignupForm return cls(*args, **kwargs) class SignupForm(base.ProfileForm): check = captcha() class LoginForm(base.LoginForm): check = captcha() class PasswordResetForm(base.PasswordResetForm): check = captcha() class QuickLoginForm(Html5Mixin, forms.Form): key = forms.CharField(label="Code", max_length=12) def clean_key(self): key = self.cleaned_data["key"] try: self._user = authenticate(invite_key=key) except InviteCodeIsOutOfDate: raise forms.ValidationError(ugettext("That code has expired.")) except InviteCodeHasExpired: raise forms.ValidationError( ugettext("That code has expired and can no longer be used. You" " can set up a password by following the 'Forgotten Password'" " link on this page.") ) if self._user is None: raise forms.ValidationError(ugettext("Invalid key")) elif not self._user.is_active: raise forms.ValidationError(ugettext("Your account is inactive")) return self.cleaned_data def save(self): """ Just return the authenticated user - used for logging in. """ return getattr(self, "_user", None) class InviteForm(forms.ModelForm): class Meta: model = InvitationCode fields = ('registered_to', 'registered_name', 'registered_phone')
bsd-2-clause
gannetson/sportschooldeopenlucht
env/lib/python2.7/site-packages/django/contrib/gis/geoip/prototypes.py
200
3730
from ctypes import c_char_p, c_float, c_int, string_at, Structure, POINTER from django.contrib.gis.geoip.libgeoip import lgeoip, free #### GeoIP C Structure definitions #### class GeoIPRecord(Structure): _fields_ = [('country_code', c_char_p), ('country_code3', c_char_p), ('country_name', c_char_p), ('region', c_char_p), ('city', c_char_p), ('postal_code', c_char_p), ('latitude', c_float), ('longitude', c_float), # TODO: In 1.4.6 this changed from `int dma_code;` to # `union {int metro_code; int dma_code;};`. Change # to a `ctypes.Union` in to accomodate in future when # pre-1.4.6 versions are no longer distributed. ('dma_code', c_int), ('area_code', c_int), ('charset', c_int), ('continent_code', c_char_p), ] geoip_char_fields = [name for name, ctype in GeoIPRecord._fields_ if ctype is c_char_p] geoip_encodings = { 0: 'iso-8859-1', 1: 'utf8', } class GeoIPTag(Structure): pass RECTYPE = POINTER(GeoIPRecord) DBTYPE = POINTER(GeoIPTag) #### ctypes function prototypes #### # GeoIP_lib_version appeared in version 1.4.7. if hasattr(lgeoip, 'GeoIP_lib_version'): GeoIP_lib_version = lgeoip.GeoIP_lib_version GeoIP_lib_version.argtypes = None GeoIP_lib_version.restype = c_char_p else: GeoIP_lib_version = None # For freeing memory allocated within a record GeoIPRecord_delete = lgeoip.GeoIPRecord_delete GeoIPRecord_delete.argtypes = [RECTYPE] GeoIPRecord_delete.restype = None # For retrieving records by name or address. def check_record(result, func, cargs): if bool(result): # Checking the pointer to the C structure, if valid pull out elements # into a dicionary. rec = result.contents record = dict((fld, getattr(rec, fld)) for fld, ctype in rec._fields_) # Now converting the strings to unicode using the proper encoding. encoding = geoip_encodings[record['charset']] for char_field in geoip_char_fields: if record[char_field]: record[char_field] = record[char_field].decode(encoding) # Free the memory allocated for the struct & return. GeoIPRecord_delete(result) return record else: return None def record_output(func): func.argtypes = [DBTYPE, c_char_p] func.restype = RECTYPE func.errcheck = check_record return func GeoIP_record_by_addr = record_output(lgeoip.GeoIP_record_by_addr) GeoIP_record_by_name = record_output(lgeoip.GeoIP_record_by_name) # For opening & closing GeoIP database files. GeoIP_open = lgeoip.GeoIP_open GeoIP_open.restype = DBTYPE GeoIP_delete = lgeoip.GeoIP_delete GeoIP_delete.argtypes = [DBTYPE] GeoIP_delete.restype = None # This is so the string pointer can be freed within Python. class geoip_char_p(c_char_p): pass def check_string(result, func, cargs): if result: s = string_at(result) free(result) else: s = '' return s GeoIP_database_info = lgeoip.GeoIP_database_info GeoIP_database_info.restype = geoip_char_p GeoIP_database_info.errcheck = check_string # String output routines. def string_output(func): func.restype = c_char_p return func GeoIP_country_code_by_addr = string_output(lgeoip.GeoIP_country_code_by_addr) GeoIP_country_code_by_name = string_output(lgeoip.GeoIP_country_code_by_name) GeoIP_country_name_by_addr = string_output(lgeoip.GeoIP_country_name_by_addr) GeoIP_country_name_by_name = string_output(lgeoip.GeoIP_country_name_by_name)
bsd-3-clause
mcdeaton13/dynamic
Python/run_small.py
2
3947
''' A 'smoke test' for the dynamic package. Uses a fake data set to run the baseline ''' import cPickle as pickle import os import numpy as np import time import dynamic dynamic.parameters.DATASET = 'SMALL' import dynamic.SS import dynamic.TPI from dynamic import parameters, wealth, labor, demographics, income, SS, TPI globals().update(dynamic.parameters.get_parameters()) def runner(): #Create output directory structure dirs = ["./OUTPUT/Saved_moments", "./OUTPUT/SSinit", "./OUTPUT/TPIinit"] for _dir in dirs: try: os.makedirs(_dir) except OSError as oe: pass # Generate Wealth data moments output_dir = "./OUTPUT" wealth.get_wealth_data(lambdas, J, flag_graphs, output_dir) # Generate labor data moments labor.labor_data_moments(flag_graphs, output_dir) get_baseline = True calibrate_model = True # List of parameter names that will not be changing (unless we decide to # change them for a tax experiment) param_names = ['S', 'J', 'T', 'lambdas', 'starting_age', 'ending_age', 'beta', 'sigma', 'alpha', 'nu', 'Z', 'delta', 'E', 'ltilde', 'g_y', 'maxiter', 'mindist_SS', 'mindist_TPI', 'b_ellipse', 'k_ellipse', 'upsilon', 'a_tax_income', 'chi_b_guess', 'chi_n_guess', 'b_tax_income', 'c_tax_income', 'd_tax_income', 'tau_payroll', 'tau_bq', 'calibrate_model', 'retire', 'mean_income_data', 'g_n_vector', 'h_wealth', 'p_wealth', 'm_wealth', 'get_baseline', 'omega', 'g_n_ss', 'omega_SS', 'surv_rate', 'e', 'rho'] ''' ------------------------------------------------------------------------ Run SS with minimization to fit chi_b and chi_n ------------------------------------------------------------------------ ''' # This is the simulation before getting the replacement rate values sim_params = {} glbs = globals() lcls = locals() for key in param_names: if key in glbs: sim_params[key] = glbs[key] else: sim_params[key] = lcls[key] income_tax_params, wealth_tax_params, ellipse_params, ss_parameters, iterative_params = SS.create_steady_state_parameters(**sim_params) print "got here" before = time.time() ss_outputs = SS.run_steady_state(ss_parameters, iterative_params, get_baseline) ''' ------------------------------------------------------------------------ Run the baseline TPI simulation ------------------------------------------------------------------------ ''' ss_outputs['get_baseline'] = get_baseline income_tax_params, wealth_tax_params, ellipse_params, parameters, N_tilde, omega_stationary, K0, b_sinit, \ b_splus1init, L0, Y0, w0, r0, BQ0, T_H_0, tax0, c0, initial_b, initial_n = TPI.create_tpi_params(**sim_params) ss_outputs['income_tax_params'] = income_tax_params ss_outputs['wealth_tax_params'] = wealth_tax_params ss_outputs['ellipse_params'] = ellipse_params ss_outputs['parameters'] = parameters ss_outputs['N_tilde'] = N_tilde ss_outputs['omega_stationary'] = omega_stationary ss_outputs['K0'] = K0 ss_outputs['b_sinit'] = b_sinit ss_outputs['b_splus1init'] = b_splus1init ss_outputs['L0'] = L0 ss_outputs['Y0'] = Y0 ss_outputs['r0'] = r0 ss_outputs['BQ0'] = BQ0 ss_outputs['T_H_0'] = T_H_0 ss_outputs['tax0'] = tax0 ss_outputs['c0'] = c0 ss_outputs['initial_b'] = initial_b ss_outputs['initial_n'] = initial_n ss_outputs['tau_bq'] = tau_bq ss_outputs['g_n_vector'] = g_n_vector with open("ss_outputs.pkl", 'wb') as fp: pickle.dump(ss_outputs, fp) TPI.run_time_path_iteration(**ss_outputs) print "took {0} seconds to get that part done.".format(time.time() - before) if __name__ == "__main__": runner()
mit
JuPeg/tools-artbio
unstable/local_tools/clustering3.py
4
5988
#!/usr/bin/python # script find clusters of small RNA reads in the genome # version 3 - 24-12-2013 evolution to multiprocessing # Usage clustering.py <bowtie input> <output> <bowtie index> <clustering_distance> <minimum read number per cluster to be outputed> <collapse option> <extention value> <average_cluster_size> # <folding> <output format> import sys, subprocess, time from collections import defaultdict # required for some SmRNAwindow attributes (readDic) #from numpy import mean, std # required for some SmRNAwindow methods #from scipy import stats from smRtools import * import multiprocessing def clustering (Instance): def clustermining (cluster, Instance): # cluster argument is a list if Instance.readDict[-cluster[0]]: # test whether the first position in the cluster was reverse reads shift = max(Instance.readDict[-cluster[0]]) upstream_coord = cluster[0] - shift + 1 else: upstream_coord = cluster[0] if Instance.readDict[cluster[-1]]: # test whether the last position in the cluster was forward reads shift = max(Instance.readDict[cluster[-1]]) downstream_coord = cluster[-1] + shift -1 else: downstream_coord = cluster[-1] readcount = Instance.readcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord) mean_size, median_size, stdv_size = Instance.statsizes(upstream_coord=upstream_coord, downstream_coord=downstream_coord) if readcount >= minimum_reads and median_size >= min_median_size: location = [Instance.gene.split()[0], upstream_coord, downstream_coord] if output_format == "intervals": return "%s\t%s\t%s\t%s" % (location[0], location[1], location[2], readcount) cluster_size = downstream_coord - upstream_coord + 1 if folding == "yes" and cluster_size < 151: foldEnergy = Instance.foldEnergy(upstream_coord=upstream_coord, downstream_coord=downstream_coord) ## be careful, test ! else: foldEnergy = "." forwardReadcount = Instance.forwardreadcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord) # reverseReadcount = Instance.reversereadcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord) # density = readcount / float(cluster_size) # if output_format == "GFF3": if forwardReadcount >= reverseReadcount: GFFstrand = "+" else: GFFstrand = "-" Attributes = "ID=RC %s : FR %s : RR %s : Dens %s : Med %s : FE %s" % (readcount, forwardReadcount, reverseReadcount, density, median_size, foldEnergy) return "%s\tGalaxy\tRead_Cluster\t%s\t%s\t%s\t%s\t.\t%s" % (location[0], location[1], location[2], readcount, GFFstrand, Attributes) else: Forward_Barycenter, Reverse_Barycenter = Instance.barycenter(upstream_coord=upstream_coord, downstream_coord=downstream_coord) Zsignature = Instance.signature(24,29,24,29,range(1,27), zscore="yes", upstream_coord=upstream_coord, downstream_coord=downstream_coord)[10] # Hsignature = Instance.hannon_signature(24,29,24,29, range(1,27), upstream_coord=upstream_coord, downstream_coord=downstream_coord )[10] * 100 UpiFreq = Instance.Ufreq(range(24,29), upstream_coord=upstream_coord, downstream_coord=downstream_coord) UsiFreq = Instance.Ufreq(range(20,22), upstream_coord=upstream_coord, downstream_coord=downstream_coord) return "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (location[0], location[1], location[2], cluster_size, readcount, forwardReadcount, reverseReadcount, density, median_size, foldEnergy, Forward_Barycenter, Reverse_Barycenter, Zsignature, Hsignature, UpiFreq, UsiFreq) return False l = Instance.readDict.keys() l=[abs(i) for i in l] l=list(set(l)) l.sort() upstream = 0 cluster_list = [] for i, element in enumerate (l[1:]): if abs(element-l[i]) > dist or i+2==len(l): # the 2nd part of the logical test is to capture the last cluster if it overlaps the end of the list cluster = l[upstream:i+1] upstream = i+1 cluster_list.append(cluster) result_list = [] for i in cluster_list: totestresult = clustermining (i, Instance) if totestresult: result_list.append(totestresult) return result_list def logtask (results): global number_of_clusters if results: number_of_clusters += len(results) LOG.append(results) return if __name__ == '__main__': start_time = time.time() fasta_dic = get_fasta (sys.argv[3]) objDic = {} number_of_reads = 0 F = open (sys.argv[1], "r") # F is the bowtie output taken as input for line in F: number_of_reads += 1 fields = line.split() polarity = fields[1] gene = fields[2] offset = int(fields[3]) size = len (fields[4]) try: objDic[gene].addread (polarity, offset, size) except KeyError: objDic[gene] = SmRNAwindow(gene, fasta_dic[gene]) objDic[gene].addread (polarity, offset, size) F.close() OUT = open (sys.argv[2], "w") output_format=sys.argv[8] if output_format == "intervals": print >> OUT, "#chrom\tStart\tEnd\tReadCount" elif output_format == "GFF3": print >> OUT, "##gff-version 3" else: print >> OUT, "#ID\t#chrom\tStart\tEnd\tLength\tReadCount\tForwardReads\tReverseReads\tDensity\tMedian\tFoldEnergy\tForBar\tRevBar\tz-score_signature\tHannon_signature\tUfreq_in_24-28RNAs\tUfreq_in_20-21RNs" dist = int(sys.argv[4]) min_median_size = int(sys.argv[6]) minimum_reads = int(sys.argv[5]) number_of_clusters = 0 Instance_ID = 0 folding=sys.argv[7] pool = multiprocessing.Pool(8) LOG = [] for object in objDic: pool.apply_async(clustering, args=(objDic[object],), callback=logtask) pool.close() pool.join() for lines in LOG: for line in lines: print >> OUT, line OUT.close() elapsed_time = time.time() - start_time print "number of reads: %s\nnumber of clusters: %s\ntime: %s" % (number_of_reads, number_of_clusters, elapsed_time)
mit
marctc/django
tests/m2m_intermediary/tests.py
381
1334
from __future__ import unicode_literals from datetime import datetime from django.test import TestCase from django.utils import six from .models import Article, Reporter, Writer class M2MIntermediaryTests(TestCase): def test_intermeiary(self): r1 = Reporter.objects.create(first_name="John", last_name="Smith") r2 = Reporter.objects.create(first_name="Jane", last_name="Doe") a = Article.objects.create( headline="This is a test", pub_date=datetime(2005, 7, 27) ) w1 = Writer.objects.create(reporter=r1, article=a, position="Main writer") w2 = Writer.objects.create(reporter=r2, article=a, position="Contributor") self.assertQuerysetEqual( a.writer_set.select_related().order_by("-position"), [ ("John Smith", "Main writer"), ("Jane Doe", "Contributor"), ], lambda w: (six.text_type(w.reporter), w.position) ) self.assertEqual(w1.reporter, r1) self.assertEqual(w2.reporter, r2) self.assertEqual(w1.article, a) self.assertEqual(w2.article, a) self.assertQuerysetEqual( r1.writer_set.all(), [ ("John Smith", "Main writer") ], lambda w: (six.text_type(w.reporter), w.position) )
bsd-3-clause
cloudera/hue
desktop/core/ext-py/repoze.who-2.3/repoze/who/tests/test__auth_tkt.py
2
10808
import unittest class AuthTicketTests(unittest.TestCase): def _getTargetClass(self): from .._auth_tkt import AuthTicket return AuthTicket def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_defaults(self): import hashlib from .. import _auth_tkt with _Monkey(_auth_tkt, time_mod=_Timemod): tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4') self.assertEqual(tkt.secret, 'SEEKRIT') self.assertEqual(tkt.userid, 'USERID') self.assertEqual(tkt.ip, '1.2.3.4') self.assertEqual(tkt.tokens, '') self.assertEqual(tkt.user_data, '') self.assertEqual(tkt.time, _WHEN) self.assertEqual(tkt.cookie_name, 'auth_tkt') self.assertEqual(tkt.secure, False) self.assertEqual(tkt.digest_algo, hashlib.md5) def test_ctor_explicit(self): import hashlib tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', tokens=('a', 'b'), user_data='DATA', time=_WHEN, cookie_name='oatmeal', secure=True, digest_algo=hashlib.sha512) self.assertEqual(tkt.secret, 'SEEKRIT') self.assertEqual(tkt.userid, 'USERID') self.assertEqual(tkt.ip, '1.2.3.4') self.assertEqual(tkt.tokens, 'a,b') self.assertEqual(tkt.user_data, 'DATA') self.assertEqual(tkt.time, _WHEN) self.assertEqual(tkt.cookie_name, 'oatmeal') self.assertEqual(tkt.secure, True) self.assertEqual(tkt.digest_algo, hashlib.sha512) def test_ctor_string_algorithm(self): import hashlib tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', tokens=('a', 'b'), user_data='DATA', time=_WHEN, cookie_name='oatmeal', secure=True, digest_algo='sha1') self.assertEqual(tkt.secret, 'SEEKRIT') self.assertEqual(tkt.userid, 'USERID') self.assertEqual(tkt.ip, '1.2.3.4') self.assertEqual(tkt.tokens, 'a,b') self.assertEqual(tkt.user_data, 'DATA') self.assertEqual(tkt.time, _WHEN) self.assertEqual(tkt.cookie_name, 'oatmeal') self.assertEqual(tkt.secure, True) self.assertEqual(tkt.digest_algo, hashlib.sha1) def test_digest(self): from .._auth_tkt import calculate_digest, hashlib tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', tokens=('a', 'b'), user_data='DATA', time=_WHEN, cookie_name='oatmeal', secure=True) digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', 'a,b', 'DATA', hashlib.md5) self.assertEqual(tkt.digest(), digest) def test_cookie_value_wo_tokens_or_userdata(self): from .._auth_tkt import calculate_digest, hashlib tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', time=_WHEN) digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', '', '', hashlib.md5) self.assertEqual(tkt.cookie_value(), '%s%08xUSERID!' % (digest, _WHEN)) def test_cookie_value_w_tokens_and_userdata(self): from .._auth_tkt import calculate_digest, hashlib tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', tokens=('a', 'b'), user_data='DATA', time=_WHEN) digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', 'a,b', 'DATA', hashlib.md5) self.assertEqual(tkt.cookie_value(), '%s%08xUSERID!a,b!DATA' % (digest, _WHEN)) def test_cookie_not_secure_wo_tokens_or_userdata(self): from .._auth_tkt import calculate_digest, hashlib from .._compat import encodestring tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', time=_WHEN, cookie_name='oatmeal') digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', '', '', hashlib.md5) cookie = tkt.cookie() self.assertEqual(cookie['oatmeal'].value, encodestring('%s%08xUSERID!' % (digest, _WHEN) ).strip()) self.assertEqual(cookie['oatmeal']['path'], '/') self.assertEqual(cookie['oatmeal']['secure'], '') def test_cookie_secure_w_tokens_and_userdata(self): from .._auth_tkt import calculate_digest, hashlib from .._compat import encodestring tkt = self._makeOne('SEEKRIT', 'USERID', '1.2.3.4', tokens=('a', 'b'), user_data='DATA', time=_WHEN, cookie_name='oatmeal', secure=True) digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', 'a,b', 'DATA', hashlib.md5) cookie = tkt.cookie() self.assertEqual(cookie['oatmeal'].value, encodestring('%s%08xUSERID!a,b!DATA' % (digest, _WHEN) ).strip()) self.assertEqual(cookie['oatmeal']['path'], '/') self.assertEqual(cookie['oatmeal']['secure'], 'true') class BadTicketTests(unittest.TestCase): def _getTargetClass(self): from .._auth_tkt import BadTicket return BadTicket def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_wo_expected(self): exc = self._makeOne('message') self.assertEqual(exc.args, ('message',)) self.assertEqual(exc.expected, None) def test_w_expected(self): exc = self._makeOne('message', 'foo') self.assertEqual(exc.args, ('message',)) self.assertEqual(exc.expected, 'foo') class Test_parse_ticket(unittest.TestCase): def _callFUT(self, secret='SEEKRIT', ticket=None, ip='1.2.3.4', digest="md5"): from .._auth_tkt import parse_ticket return parse_ticket(secret, ticket, ip, digest) def test_bad_timestamp(self): from .._auth_tkt import BadTicket TICKET = '12345678901234567890123456789012XXXXXXXXuserid!' try: self._callFUT(ticket=TICKET) except BadTicket as e: self.assertTrue(e.args[0].startswith( 'Timestamp is not a hex integer:')) else: # pragma: no cover self.fail('Did not raise') def test_no_bang_after_userid(self): from .._auth_tkt import BadTicket TICKET = '1234567890123456789012345678901201020304userid' try: self._callFUT(ticket=TICKET) except BadTicket as e: self.assertEqual(e.args[0], 'userid is not followed by !') else: # pragma: no cover self.fail('Did not raise') def test_wo_tokens_or_data_bad_digest(self): from .._auth_tkt import BadTicket TICKET = '1234567890123456789012345678901201020304userid!' try: self._callFUT(ticket=TICKET) except BadTicket as e: self.assertEqual(e.args[0], 'Digest signature is not correct') else: # pragma: no cover self.fail('Did not raise') def test_wo_tokens_or_data_ok_digest(self): from .._auth_tkt import calculate_digest, hashlib digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', '', '', hashlib.md5) TICKET = '%s%08xUSERID!' % (digest, _WHEN) timestamp, userid, tokens, user_data = self._callFUT(ticket=TICKET) self.assertEqual(timestamp, _WHEN) self.assertEqual(userid, 'USERID') self.assertEqual(tokens, ['']) self.assertEqual(user_data, '') def test_w_tokens_and_data_ok_digest(self): from .._auth_tkt import calculate_digest, hashlib digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', 'a,b', 'DATA', hashlib.md5) TICKET = '%s%08xUSERID!a,b!DATA' % (digest, _WHEN) timestamp, userid, tokens, user_data = self._callFUT(ticket=TICKET) self.assertEqual(timestamp, _WHEN) self.assertEqual(userid, 'USERID') self.assertEqual(tokens, ['a', 'b']) self.assertEqual(user_data, 'DATA') def test_w_tokens_and_data_ok_alternate_digest(self): from .._auth_tkt import calculate_digest, hashlib digest = calculate_digest('1.2.3.4', _WHEN, 'SEEKRIT', 'USERID', 'a,b', 'DATA', hashlib.sha256) TICKET = '%s%08xUSERID!a,b!DATA' % (digest, _WHEN) timestamp, userid, tokens, user_data = self._callFUT( ticket=TICKET, digest=hashlib.sha256) self.assertEqual(timestamp, _WHEN) self.assertEqual(userid, 'USERID') self.assertEqual(tokens, ['a', 'b']) self.assertEqual(user_data, 'DATA') class Test_helpers(unittest.TestCase): # calculate_digest is not very testable, fully exercised through callers. def test_ints_to_bytes(self): from struct import pack from .._auth_tkt import ints2bytes self.assertEqual(ints2bytes([1, 2, 3, 4]), pack('>BBBB', 1, 2, 3, 4)) def test_encode_ip_timestamp(self): from struct import pack from .._auth_tkt import encode_ip_timestamp self.assertEqual(encode_ip_timestamp('1.2.3.4', _WHEN), pack('>BBBBL', 1, 2, 3, 4, _WHEN)) def test_maybe_encode_bytes(self): from .._auth_tkt import maybe_encode foo = b'foo' self.assertTrue(maybe_encode(foo) is foo) def test_maybe_encode_native_string(self): from .._auth_tkt import maybe_encode foo = 'foo' self.assertEqual(maybe_encode(foo), b'foo') def test_maybe_encode_unicode(self): from .._auth_tkt import maybe_encode from .._compat import u foo = u('foo') self.assertEqual(maybe_encode(foo), b'foo') _WHEN = 1234567 class _Timemod(object): @staticmethod def time(): return _WHEN class _Monkey(object): def __init__(self, module, **replacements): self.module = module self.orig = {} self.replacements = replacements def __enter__(self): for k, v in self.replacements.items(): orig = getattr(self.module, k, self) if orig is not self: self.orig[k] = orig setattr(self.module, k, v) def __exit__(self, *exc_info): for k, v in self.replacements.items(): if k in self.orig: setattr(self.module, k, self.orig[k]) else: #pragma NO COVERSGE delattr(self.module, k)
apache-2.0
yrizk/django-blog
blogvenv/lib/python3.4/site-packages/django/conf/locale/cs/formats.py
115
1702
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j. E Y' TIME_FORMAT = 'G:i' DATETIME_FORMAT = 'j. E Y G:i' YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'd.m.Y' SHORT_DATETIME_FORMAT = 'd.m.Y G:i' FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06' '%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) # Kept ISO formats as one is in first position TIME_INPUT_FORMATS = ( '%H:%M:%S', # '04:30:59' '%H.%M', # '04.30' '%H:%M', # '04:30' ) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59' '%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200' '%d.%m.%Y %H.%M', # '05.01.2006 04.30' '%d.%m.%Y %H:%M', # '05.01.2006 04:30' '%d.%m.%Y', # '05.01.2006' '%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59' '%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200' '%d. %m. %Y %H.%M', # '05. 01. 2006 04.30' '%d. %m. %Y %H:%M', # '05. 01. 2006 04:30' '%d. %m. %Y', # '05. 01. 2006' '%Y-%m-%d %H.%M', # '2006-01-05 04.30' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space NUMBER_GROUPING = 3
apache-2.0
popazerty/beyonwiz-4.1
lib/python/Screens/InstallWizard.py
6
5714
from Screens.Screen import Screen from Components.ConfigList import ConfigListScreen from Components.Sources.StaticText import StaticText from Components.config import config, ConfigSubsection, ConfigBoolean, getConfigListEntry, ConfigSelection, ConfigYesNo, ConfigIP from Components.Network import iNetwork from Components.Ipkg import IpkgComponent from enigma import eDVBDB config.misc.installwizard = ConfigSubsection() config.misc.installwizard.hasnetwork = ConfigBoolean(default = False) config.misc.installwizard.ipkgloaded = ConfigBoolean(default = False) config.misc.installwizard.channellistdownloaded = ConfigBoolean(default = False) class InstallWizard(Screen, ConfigListScreen): STATE_UPDATE = 0 STATE_CHOISE_CHANNELLIST = 1 # STATE_CHOISE_SOFTCAM = 2 def __init__(self, session, args = None): Screen.__init__(self, session) self.index = args self.list = [] ConfigListScreen.__init__(self, self.list) if self.index == self.STATE_UPDATE: config.misc.installwizard.hasnetwork.value = False config.misc.installwizard.ipkgloaded.value = False modes = {0: " "} self.enabled = ConfigSelection(choices = modes, default = 0) self.adapters = [(iNetwork.getFriendlyAdapterName(x),x) for x in iNetwork.getAdapterList()] is_found = False for x in self.adapters: if x[1] == 'eth0' or x[1] == 'eth1' or x[1] == 'wlan0' or x[1] == 'ra0': if iNetwork.getAdapterAttribute(x[1], 'up'): self.ipConfigEntry = ConfigIP(default = iNetwork.getAdapterAttribute(x[1], "ip")) iNetwork.checkNetworkState(self.checkNetworkCB) if_found = True else: iNetwork.restartNetwork(self.checkNetworkLinkCB) break if is_found is False: self.createMenu() elif self.index == self.STATE_CHOISE_CHANNELLIST: self.enabled = ConfigYesNo(default = True) modes = {"henksat-19e": "Astra 1", "henksat-23e": "Astra 3", "henksat-19e-23e": "Astra 1 Astra 3", "henksat-19e-23e-28e": "Astra 1 Astra 2 Astra 3", "henksat-13e-19e-23e-28e": "Astra 1 Astra 2 Astra 3 Hotbird"} self.channellist_type = ConfigSelection(choices = modes, default = "henksat-13e-19e-23e-28e") self.createMenu() # elif self.index == self.STATE_CHOISE_SOFTCAM: # self.enabled = ConfigYesNo(default = True) # modes = {"cccam": _("default") + " (CCcam)", "scam": "scam"} # self.softcam_type = ConfigSelection(choices = modes, default = "cccam") # self.createMenu() def checkNetworkCB(self, data): if data < 3: config.misc.installwizard.hasnetwork.value = True self.createMenu() def checkNetworkLinkCB(self, retval): if retval: iNetwork.checkNetworkState(self.checkNetworkCB) else: self.createMenu() def createMenu(self): try: test = self.index except: return self.list = [] if self.index == self.STATE_UPDATE: if config.misc.installwizard.hasnetwork.value: self.list.append(getConfigListEntry(_("Your internet connection is working (ip: %s)") % (self.ipConfigEntry.getText()), self.enabled)) else: self.list.append(getConfigListEntry(_("Your receiver does not have an internet connection"), self.enabled)) elif self.index == self.STATE_CHOISE_CHANNELLIST: self.list.append(getConfigListEntry(_("Install channel list"), self.enabled)) if self.enabled.value: self.list.append(getConfigListEntry(_("Channel list type"), self.channellist_type)) # elif self.index == self.STATE_CHOISE_SOFTCAM: # self.list.append(getConfigListEntry(_("Install softcam"), self.enabled)) # if self.enabled.value: # self.list.append(getConfigListEntry(_("Softcam type"), self.softcam_type)) self["config"].list = self.list self["config"].l.setList(self.list) def keyLeft(self): if self.index == 0: return ConfigListScreen.keyLeft(self) self.createMenu() def keyRight(self): if self.index == 0: return ConfigListScreen.keyRight(self) self.createMenu() def run(self): if self.index == self.STATE_UPDATE: if config.misc.installwizard.hasnetwork.value: self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (updating packages)'), IpkgComponent.CMD_UPDATE) elif self.index == self.STATE_CHOISE_CHANNELLIST and self.enabled.value and self.channellist_type.value != "ATV": self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading channel list)'), IpkgComponent.CMD_REMOVE, {'package': 'enigma2-plugin-settings-' + self.channellist_type.value}) # elif self.index == self.STATE_CHOISE_SOFTCAM and self.enabled.value: # self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading softcam)'), IpkgComponent.CMD_INSTALL, {'package': 'enigma2-plugin-softcams-' + self.softcam_type.value}) return class InstallWizardIpkgUpdater(Screen): def __init__(self, session, index, info, cmd, pkg = None): Screen.__init__(self, session) self["statusbar"] = StaticText(info) self.pkg = pkg self.index = index self.state = 0 self.ipkg = IpkgComponent() self.ipkg.addCallback(self.ipkgCallback) if self.index == InstallWizard.STATE_CHOISE_CHANNELLIST: self.ipkg.startCmd(cmd, {'package': 'enigma2-plugin-settings-*'}) else: self.ipkg.startCmd(cmd, pkg) def ipkgCallback(self, event, param): if event == IpkgComponent.EVENT_DONE: if self.index == InstallWizard.STATE_UPDATE: config.misc.installwizard.ipkgloaded.value = True elif self.index == InstallWizard.STATE_CHOISE_CHANNELLIST: if self.state == 0: self.ipkg.startCmd(IpkgComponent.CMD_INSTALL, self.pkg) self.state = 1 return else: config.misc.installwizard.channellistdownloaded.value = True eDVBDB.getInstance().reloadBouquets() eDVBDB.getInstance().reloadServicelist() self.close()
gpl-2.0
fsherratt/custom_pixhawk
Tools/ardupilotwaf/toolchain.py
44
4505
""" WAF Tool to select the correct toolchain based on the target archtecture. This tool loads compiler_c and compiler_cxx, so you don't need to load them (and you must not load them before this tool). Use the environment variable TOOLCHAIN to define the toolchain. Example:: def configure(cfg): cfg.env.TOOLCHAIN = 'arm-linux-gnueabihf' cfg.load('toolchain') """ from waflib import Errors, Context, Utils from waflib.Configure import conf from waflib.Tools import compiler_c, compiler_cxx from waflib.Tools import clang, clangxx, gcc, gxx import os import re @conf def find_gxx(conf): names = ['g++', 'c++'] if conf.env.TOOLCHAIN != 'native': names = ['%s-%s' % (conf.env.TOOLCHAIN, n) for n in names] cxx = conf.find_program(names, var='CXX') conf.get_cc_version(cxx, gcc=True) conf.env.CXX_NAME = 'gcc' @conf def find_gcc(conf): names = ['gcc', 'cc'] if conf.env.TOOLCHAIN != 'native': names = ['%s-%s' % (conf.env.TOOLCHAIN, n) for n in names] cc = conf.find_program(names, var='CC') conf.get_cc_version(cc, gcc=True) conf.env.CC_NAME = 'gcc' def _clang_cross_support(cfg): if _clang_cross_support.called: return prefix = cfg.env.TOOLCHAIN + '-' try: cfg.find_program(prefix + 'gcc', var='CROSS_GCC') except Errors.ConfigurationError as e: cfg.fatal('toolchain: clang: couldn\'t find cross GCC', ex=e) environ = dict(os.environ) if 'TOOLCHAIN_CROSS_AR' in environ: # avoid OS's environment to mess up toolchain path finding del environ['TOOLCHAIN_CROSS_AR'] try: cfg.find_program( prefix + 'ar', var='TOOLCHAIN_CROSS_AR', environ=environ, ) except Errors.ConfigurationError as e: cfg.fatal('toolchain: clang: couldn\'t find toolchain path', ex=e) toolchain_path = os.path.join(cfg.env.TOOLCHAIN_CROSS_AR[0], '..', '..') toolchain_path = os.path.abspath(toolchain_path) cfg.msg('Using toolchain path for clang', toolchain_path) sysroot = cfg.cmd_and_log( [cfg.env.CROSS_GCC[0], '--print-sysroot'], quiet=Context.BOTH, ).strip() cfg.env.CLANG_FLAGS = [ '--target=' + cfg.env.TOOLCHAIN, '--gcc-toolchain=' + toolchain_path, '--sysroot=' + sysroot, '-B' + os.path.join(toolchain_path, 'bin') ] _clang_cross_support.called = False def _set_clang_crosscompilation_wrapper(tool_module): original_configure = tool_module.configure def new_configure(cfg): if cfg.env.TOOLCHAIN == 'native': original_configure(cfg) return cfg.env.stash() try: _clang_cross_support(cfg) original_configure(cfg) except Errors.ConfigurationError as e: cfg.env.revert() raise else: cfg.env.commit() tool_module.configure = new_configure _set_clang_crosscompilation_wrapper(clang) _set_clang_crosscompilation_wrapper(clangxx) def _filter_supported_c_compilers(*compilers): for k in compiler_c.c_compiler: l = compiler_c.c_compiler[k] compiler_c.c_compiler[k] = [c for c in compilers if c in l] def _filter_supported_cxx_compilers(*compilers): for k in compiler_cxx.cxx_compiler: l = compiler_cxx.cxx_compiler[k] compiler_cxx.cxx_compiler[k] = [c for c in compilers if c in l] @conf def find_toolchain_program(cfg, filename, **kw): filename = Utils.to_list(filename) if not kw.get('var', ''): # just copy from the original implementation kw['var'] = re.sub(r'[-.]', '_', filename[0].upper()) if cfg.env.TOOLCHAIN != 'native': for i, name in enumerate(filename): filename[i] = '%s-%s' % (cfg.env.TOOLCHAIN, name) return cfg.find_program(filename, **kw) def configure(cfg): if cfg.env.TOOLCHAIN == 'native': cfg.load('compiler_cxx compiler_c') return _filter_supported_c_compilers('gcc', 'clang') _filter_supported_cxx_compilers('g++', 'clang++') cfg.find_toolchain_program('ar') cfg.msg('Using toolchain', cfg.env.TOOLCHAIN) cfg.load('compiler_cxx compiler_c') if cfg.env.COMPILER_CC == 'clang': cfg.env.CFLAGS += cfg.env.CLANG_FLAGS cfg.env.LINKFLAGS_cprogram += cfg.env.CLANG_FLAGS if cfg.env.COMPILER_CXX == 'clang++': cfg.env.CXXFLAGS += cfg.env.CLANG_FLAGS cfg.env.LINKFLAGS_cxxprogram += cfg.env.CLANG_FLAGS
gpl-3.0
bitmazk/django-aps-production
aps_production/admin.py
1
2429
"""Admin classes for the aps_production app.""" from django.contrib import admin from . import models # Inlines ==================================================================== class OrderLineInlineAdmin(admin.TabularInline): model = models.OrderLine class OrderRunInlineAdmin(admin.TabularInline): model = models.OrderRun class ShipmentInlineAdmin(admin.TabularInline): model = models.Shipment # Admins ===================================================================== class ErrorAdmin(admin.ModelAdmin): """Custom admin for the ``Error`` model.""" list_display = ('order_run', 'error_bin', 'quantity', 'comment') raw_id_fields = ['order_run', 'error_bin'] class ErrorBinAdmin(admin.ModelAdmin): """Custom admin for the ``ErrorBin`` model.""" list_display = ('technology', 'error_code') search_fields = ['technology__identifier', 'error_code'] class OrderAdmin(admin.ModelAdmin): """Custom admin for the ``Order`` model.""" list_display = ('order_number', 'company', 'date_created', 'customer_po_number', 'customer_po_date') search_fields = ['order_number'] inlines = [OrderLineInlineAdmin] class OrderLineAdmin(admin.ModelAdmin): """Custom admin for the ``OrderLine`` model.""" list_display = ('order', 'line_no', 'product', 'quantity_ordered', 'date_requested', 'date_shipped', 'date_delivered') search_fields = ['order__order_number'] raw_id_fields = ['order'] inlines = [OrderRunInlineAdmin] class OrderRunAdmin(admin.ModelAdmin): """Custom admin for the ```` model.""" list_display = ('order_line', 'run_number', 'parent', 'ipn', 'quantity_started', 'quantity_dest_out', 'quantity_out', 'is_open', 'comment') list_filter = ['is_open'] search_fields = ['run_number'] raw_id_fields = ['order_line'] inlines = [ShipmentInlineAdmin] class ShipmentAdmin(admin.ModelAdmin): """Custom admin for the ``Shipment`` model.""" list_display = ('order_run', 'quantity', 'date_shipped') raw_id_fields = ['order_run'] admin.site.register(models.Error, ErrorAdmin) admin.site.register(models.ErrorBin, ErrorBinAdmin) admin.site.register(models.Order, OrderAdmin) admin.site.register(models.OrderLine, OrderLineAdmin) admin.site.register(models.OrderRun, OrderRunAdmin) admin.site.register(models.Shipment, ShipmentAdmin)
mit
mozilla/socorro
webapp-django/crashstats/api/templatetags/jinja_helpers.py
1
2206
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import datetime from urllib.parse import quote import warnings from django_jinja import library import jinja2 from django.forms.widgets import RadioSelect @library.global_function def describe_friendly_type(type_): if type_ is str: return "String" if type_ is int: return "Integer" if type_ is list: return "List of strings" if type_ is datetime.date: return "Date" if type_ is datetime.datetime: return "Date and time" if type_ is bool: return "Boolean" warnings.warn("Don't know how to describe type %r" % type_) return type_ @library.global_function def make_test_input(parameter, defaults): if parameter["type"] is bool: # If it's optional, make it possible to select "Not set", if parameter["required"]: raise NotImplementedError("required booleans are not supported") else: widget = RadioSelect( choices=(("", "Not set"), ("false", "False"), ("true", "True")) ) return widget.render(parameter["name"], "") template = '<input type="%(type)s" name="%(name)s"' data = {"name": parameter["name"]} classes = [] if parameter["required"]: classes.append("required") if parameter["type"] is datetime.date: data["type"] = "date" else: data["type"] = "text" if parameter["type"] is not str: classes.append("validate-%s" % parameter["type"].__name__) if defaults.get(parameter["name"]): data["value"] = quote(str(defaults.get(parameter["name"]))) else: data["value"] = "" data["classes"] = " ".join(classes) if data["classes"]: template += ' class="%(classes)s"' if data["value"]: template += ' value="%(value)s"' template += ">" html = template % data return jinja2.Markup(html) @library.filter def pluralize(count, multiple="s", single=""): if count == 1: return single return multiple
mpl-2.0
UXE/edx-configuration
util/import_xml_courses.py
68
1763
# Import XML Courses from git repos into the CMS. # Run with sudo and make sure the user can clone # the course repos. # Output Has per course #{ # repo_url: # repo_name: # org: # course: # run: # disposition: # version: #} import argparse from os.path import basename import yaml if __name__ == '__main__': parser = argparse.ArgumentParser( description="Import XML courses from git repos.") parser.add_argument("-c", "--courses-csv", required=True, help="A CSV of xml courses to import.") args = parser.parse_args() courses = open(args.courses_csv, 'r') all_course_data = [] all_xml_mappings = {} for line in courses: cols = line.strip().split(',') slug = cols[0] author_format = cols[1] disposition = cols[2] repo_url = cols[4] version = cols[5] if author_format.lower() != 'xml' \ or disposition.lower() == "don't import": continue # Checkout w/tilde org, course, run = slug.split("/") repo_name = "{}~{}".format(basename(repo_url).rstrip('.git'), run) course_info = { "repo_url": repo_url, "repo_name": repo_name, "org": org, "course": course, "run": run, "disposition": disposition.lower(), "version": version, } all_course_data.append(course_info) if disposition.lower() == "on disk": all_xml_mappings[slug] = 'xml' edxapp_xml_courses = { "EDXAPP_XML_COURSES": all_course_data, "EDXAPP_XML_MAPPINGS": all_xml_mappings, "EDXAPP_XML_FROM_GIT": True } print yaml.safe_dump(edxapp_xml_courses, default_flow_style=False)
agpl-3.0
zhuyue1314/Empire
lib/modules/credentials/mimikatz/purge.py
22
2133
from lib.common import helpers class Module: def __init__(self, mainMenu, params=[]): self.info = { 'Name': 'Invoke-Mimikatz Golden Ticket', 'Author': ['@JosephBialek', '@gentilkiwi'], 'Description': ("Runs PowerSploit's Invoke-Mimikatz function " "to purge all current kerberos tickets from memory."), 'Background' : True, 'OutputExtension' : None, 'NeedsAdmin' : False, 'OpsecSafe' : True, 'MinPSVersion' : '2', 'Comments': [ 'http://clymb3r.wordpress.com/', 'http://blog.gentilkiwi.com', "https://github.com/gentilkiwi/mimikatz/wiki/module-~-kerberos" ] } # any options needed by the module, settable during runtime self.options = { # format: # value_name : {description, required, default_value} 'Agent' : { 'Description' : 'Agent to run module on.', 'Required' : True, 'Value' : '' } } # save off a copy of the mainMenu object to access external functionality # like listeners/agent handlers/etc. self.mainMenu = mainMenu for param in params: # parameter format is [Name, Value] option, value = param if option in self.options: self.options[option]['Value'] = value def generate(self): # read in the common module source code moduleSource = self.mainMenu.installPath + "/data/module_source/credentials/Invoke-Mimikatz.ps1" try: f = open(moduleSource, 'r') except: print helpers.color("[!] Could not read module source path at: " + str(moduleSource)) return "" moduleCode = f.read() f.close() script = moduleCode # set the purge command script += "Invoke-Mimikatz -Command '\"kerberos::purge\"'" return script
bsd-3-clause
nmercier/linux-cross-gcc
linux/lib/python2.7/encodings/charmap.py
860
2084
""" Generic Python Character Mapping Codec. Use this codec directly rather than through the automatic conversion mechanisms supplied by unicode() and .encode(). Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """#" import codecs ### Codec APIs class Codec(codecs.Codec): # Note: Binding these as C functions will result in the class not # converting them to methods. This is intended. encode = codecs.charmap_encode decode = codecs.charmap_decode class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors='strict', mapping=None): codecs.IncrementalEncoder.__init__(self, errors) self.mapping = mapping def encode(self, input, final=False): return codecs.charmap_encode(input, self.errors, self.mapping)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def __init__(self, errors='strict', mapping=None): codecs.IncrementalDecoder.__init__(self, errors) self.mapping = mapping def decode(self, input, final=False): return codecs.charmap_decode(input, self.errors, self.mapping)[0] class StreamWriter(Codec,codecs.StreamWriter): def __init__(self,stream,errors='strict',mapping=None): codecs.StreamWriter.__init__(self,stream,errors) self.mapping = mapping def encode(self,input,errors='strict'): return Codec.encode(input,errors,self.mapping) class StreamReader(Codec,codecs.StreamReader): def __init__(self,stream,errors='strict',mapping=None): codecs.StreamReader.__init__(self,stream,errors) self.mapping = mapping def decode(self,input,errors='strict'): return Codec.decode(input,errors,self.mapping) ### encodings module API def getregentry(): return codecs.CodecInfo( name='charmap', encode=Codec.encode, decode=Codec.decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, streamreader=StreamReader, )
bsd-3-clause
khagler/boto
tests/integration/cloudsearch/__init__.py
645
1175
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. # All Rights Reserved # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE.
mit
whummer/moto
tests/test_core/test_instance_metadata.py
12
1382
from __future__ import unicode_literals import sure # noqa from nose.tools import assert_raises import requests from moto import mock_ec2, settings if settings.TEST_SERVER_MODE: BASE_URL = 'http://localhost:5000' else: BASE_URL = 'http://169.254.169.254' @mock_ec2 def test_latest_meta_data(): res = requests.get("{0}/latest/meta-data/".format(BASE_URL)) res.content.should.equal(b"iam") @mock_ec2 def test_meta_data_iam(): res = requests.get("{0}/latest/meta-data/iam".format(BASE_URL)) json_response = res.json() default_role = json_response['security-credentials']['default-role'] default_role.should.contain('AccessKeyId') default_role.should.contain('SecretAccessKey') default_role.should.contain('Token') default_role.should.contain('Expiration') @mock_ec2 def test_meta_data_security_credentials(): res = requests.get( "{0}/latest/meta-data/iam/security-credentials/".format(BASE_URL)) res.content.should.equal(b"default-role") @mock_ec2 def test_meta_data_default_role(): res = requests.get( "{0}/latest/meta-data/iam/security-credentials/default-role".format(BASE_URL)) json_response = res.json() json_response.should.contain('AccessKeyId') json_response.should.contain('SecretAccessKey') json_response.should.contain('Token') json_response.should.contain('Expiration')
apache-2.0
EDUlib/edx-ora2
openassessment/xblock/test/test_opaque_keys.py
9
1246
# -*- coding: utf-8 -*- """ Tests for opaque key transition in the LMS runtime. See https://github.com/edx/edx-platform/wiki/Opaque-Keys """ import mock from .base import XBlockHandlerTestCase, scenario class TestOpaqueKeys(XBlockHandlerTestCase): """ Test that the XBlock handles the opaque key transition gracefully. """ @scenario('data/basic_scenario.xml', user_id='Bob') def test_opaque_key_deprecated_string(self, xblock): # Simulate the opaque key changeover by # providing a mock `to_deprecated_string()` method. usage_key = mock.MagicMock() usage_key.to_deprecated_string.return_value = u"เՇє๓ เ๔" course_key = mock.MagicMock() course_key.to_deprecated_string.return_value = u"¢συяѕє ι∂" xblock.scope_ids = mock.MagicMock() xblock.scope_ids.usage_id = usage_key xblock.xmodule_runtime = mock.MagicMock() xblock.xmodule_runtime.course_id = course_key student_item = xblock.get_student_item_dict() # Expect that we correctly serialize the opaque keys self.assertEqual(student_item['item_id'], u"เՇє๓ เ๔") self.assertEqual(student_item['course_id'], u"¢συяѕє ι∂")
agpl-3.0
anupcshan/bazel
tools/android/build_incremental_dexmanifest.py
26
3862
# Copyright 2015 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Construct a dex manifest from a set of input .dex.zip files. Usage: %s <output manifest> <input zip file>* %s @<params file> Input files must be either .zip files containing one or more .dex files or .dex files. A manifest file is written that contains one line for each input dex in the following form: <input zip> <path in input zip> <path in output zip> <MD5 checksum> or <input dex> - <path in output zip> <SHA-256 checksum> """ import hashlib import os import shutil import sys import tempfile import zipfile class DexmanifestBuilder(object): """Implementation of the dex manifest builder.""" def __init__(self): self.manifest_lines = [] self.dir_counter = 1 self.output_dex_counter = 1 self.checksums = set() self.tmpdir = None def __enter__(self): self.tmpdir = tempfile.mkdtemp() return self def __exit__(self, unused_type, unused_value, unused_traceback): shutil.rmtree(self.tmpdir, True) def Checksum(self, filename): """Compute the SHA-256 checksum of a file.""" h = hashlib.sha256() with file(filename, "r") as f: while True: data = f.read(65536) if not data: break h.update(data) return h.hexdigest() def AddDex(self, input_dex_or_zip, zippath, dex): """Adds a dex file to the output. Args: input_dex_or_zip: the input file written to the manifest zippath: the zip path written to the manifest or None if the input file is not a .zip . dex: the dex file to be added Returns: None. """ fs_checksum = self.Checksum(dex) if fs_checksum in self.checksums: return self.checksums.add(fs_checksum) zip_dex = "incremental_classes%d.dex" % self.output_dex_counter self.output_dex_counter += 1 self.manifest_lines.append("%s %s %s %s" %( input_dex_or_zip, zippath if zippath else "-", zip_dex, fs_checksum)) def Run(self, argv): """Creates a dex manifest.""" if len(argv) < 1: raise Exception("At least one argument expected") if argv[0][0] == "@": if len(argv) != 1: raise IOError("A parameter file should be the only argument") with file(argv[0][1:]) as param_file: argv = [a.strip() for a in param_file.readlines()] for input_filename in argv[1:]: input_filename = input_filename.strip() if input_filename.endswith(".zip"): with zipfile.ZipFile(input_filename, "r") as input_dex_zip: input_dex_dir = os.path.join(self.tmpdir, str(self.dir_counter)) os.makedirs(input_dex_dir) self.dir_counter += 1 for input_dex_dex in input_dex_zip.namelist(): if not input_dex_dex.endswith(".dex"): continue input_dex_zip.extract(input_dex_dex, input_dex_dir) fs_dex = input_dex_dir + "/" + input_dex_dex self.AddDex(input_filename, input_dex_dex, fs_dex) elif input_filename.endswith(".dex"): self.AddDex(input_filename, None, input_filename) with file(argv[0], "w") as manifest: manifest.write("\n".join(self.manifest_lines)) def main(argv): with DexmanifestBuilder() as b: b.Run(argv[1:]) if __name__ == "__main__": main(sys.argv)
apache-2.0
elitak/pexpect
fdpexpect.py
2
2443
"""This is like pexpect, but will work on any file descriptor that you pass it. So you are reponsible for opening and close the file descriptor. $Id$ """ from pexpect import * import os __all__ = ['fdspawn'] class fdspawn (spawn): """This is like pexpect.spawn but allows you to supply your own open file descriptor. For example, you could use it to read through a file looking for patterns, or to control a modem or serial device. """ def __init__ (self, fd, args=[], timeout=30, maxread=2000, searchwindowsize=None, logfile=None): """This takes a file descriptor (an int) or an object that support the fileno() method (returning an int). All Python file-like objects support fileno(). """ ### TODO: Add better handling of trying to use fdspawn in place of spawn ### TODO: (overload to allow fdspawn to also handle commands as spawn does. if type(fd) != type(0) and hasattr(fd, 'fileno'): fd = fd.fileno() if type(fd) != type(0): raise ExceptionPexpect ('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.') try: # make sure fd is a valid file descriptor os.fstat(fd) except OSError: raise ExceptionPexpect, 'The fd argument is not a valid file descriptor.' self.args = None self.command = None spawn.__init__(self, None, args, timeout, maxread, searchwindowsize, logfile) self.child_fd = fd self.own_fd = False self.closed = False self.name = '<file descriptor %d>' % fd def __del__ (self): return def close (self): if self.child_fd == -1: return if self.own_fd: self.close (self) else: self.flush() os.close(self.child_fd) self.child_fd = -1 self.closed = True def isalive (self): """This checks if the file descriptor is still valid. If os.fstat() does not raise an exception then we assume it is alive. """ if self.child_fd == -1: return False try: os.fstat(self.child_fd) return True except: return False def terminate (self, force=False): raise ExceptionPexpect ('This method is not valid for file descriptors.') def kill (self, sig): return
mit
ff94315/hiwifi-openwrt-HC5661-HC5761
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/root-ralink/usr/lib/python2.7/encodings/iso8859_3.py
593
13345
""" Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='iso8859-3', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\x80' # 0x80 -> <control> u'\x81' # 0x81 -> <control> u'\x82' # 0x82 -> <control> u'\x83' # 0x83 -> <control> u'\x84' # 0x84 -> <control> u'\x85' # 0x85 -> <control> u'\x86' # 0x86 -> <control> u'\x87' # 0x87 -> <control> u'\x88' # 0x88 -> <control> u'\x89' # 0x89 -> <control> u'\x8a' # 0x8A -> <control> u'\x8b' # 0x8B -> <control> u'\x8c' # 0x8C -> <control> u'\x8d' # 0x8D -> <control> u'\x8e' # 0x8E -> <control> u'\x8f' # 0x8F -> <control> u'\x90' # 0x90 -> <control> u'\x91' # 0x91 -> <control> u'\x92' # 0x92 -> <control> u'\x93' # 0x93 -> <control> u'\x94' # 0x94 -> <control> u'\x95' # 0x95 -> <control> u'\x96' # 0x96 -> <control> u'\x97' # 0x97 -> <control> u'\x98' # 0x98 -> <control> u'\x99' # 0x99 -> <control> u'\x9a' # 0x9A -> <control> u'\x9b' # 0x9B -> <control> u'\x9c' # 0x9C -> <control> u'\x9d' # 0x9D -> <control> u'\x9e' # 0x9E -> <control> u'\x9f' # 0x9F -> <control> u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\u0126' # 0xA1 -> LATIN CAPITAL LETTER H WITH STROKE u'\u02d8' # 0xA2 -> BREVE u'\xa3' # 0xA3 -> POUND SIGN u'\xa4' # 0xA4 -> CURRENCY SIGN u'\ufffe' u'\u0124' # 0xA6 -> LATIN CAPITAL LETTER H WITH CIRCUMFLEX u'\xa7' # 0xA7 -> SECTION SIGN u'\xa8' # 0xA8 -> DIAERESIS u'\u0130' # 0xA9 -> LATIN CAPITAL LETTER I WITH DOT ABOVE u'\u015e' # 0xAA -> LATIN CAPITAL LETTER S WITH CEDILLA u'\u011e' # 0xAB -> LATIN CAPITAL LETTER G WITH BREVE u'\u0134' # 0xAC -> LATIN CAPITAL LETTER J WITH CIRCUMFLEX u'\xad' # 0xAD -> SOFT HYPHEN u'\ufffe' u'\u017b' # 0xAF -> LATIN CAPITAL LETTER Z WITH DOT ABOVE u'\xb0' # 0xB0 -> DEGREE SIGN u'\u0127' # 0xB1 -> LATIN SMALL LETTER H WITH STROKE u'\xb2' # 0xB2 -> SUPERSCRIPT TWO u'\xb3' # 0xB3 -> SUPERSCRIPT THREE u'\xb4' # 0xB4 -> ACUTE ACCENT u'\xb5' # 0xB5 -> MICRO SIGN u'\u0125' # 0xB6 -> LATIN SMALL LETTER H WITH CIRCUMFLEX u'\xb7' # 0xB7 -> MIDDLE DOT u'\xb8' # 0xB8 -> CEDILLA u'\u0131' # 0xB9 -> LATIN SMALL LETTER DOTLESS I u'\u015f' # 0xBA -> LATIN SMALL LETTER S WITH CEDILLA u'\u011f' # 0xBB -> LATIN SMALL LETTER G WITH BREVE u'\u0135' # 0xBC -> LATIN SMALL LETTER J WITH CIRCUMFLEX u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF u'\ufffe' u'\u017c' # 0xBF -> LATIN SMALL LETTER Z WITH DOT ABOVE u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\ufffe' u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\u010a' # 0xC5 -> LATIN CAPITAL LETTER C WITH DOT ABOVE u'\u0108' # 0xC6 -> LATIN CAPITAL LETTER C WITH CIRCUMFLEX u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\ufffe' u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\u0120' # 0xD5 -> LATIN CAPITAL LETTER G WITH DOT ABOVE u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xd7' # 0xD7 -> MULTIPLICATION SIGN u'\u011c' # 0xD8 -> LATIN CAPITAL LETTER G WITH CIRCUMFLEX u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\u016c' # 0xDD -> LATIN CAPITAL LETTER U WITH BREVE u'\u015c' # 0xDE -> LATIN CAPITAL LETTER S WITH CIRCUMFLEX u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\ufffe' u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS u'\u010b' # 0xE5 -> LATIN SMALL LETTER C WITH DOT ABOVE u'\u0109' # 0xE6 -> LATIN SMALL LETTER C WITH CIRCUMFLEX u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS u'\ufffe' u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\u0121' # 0xF5 -> LATIN SMALL LETTER G WITH DOT ABOVE u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf7' # 0xF7 -> DIVISION SIGN u'\u011d' # 0xF8 -> LATIN SMALL LETTER G WITH CIRCUMFLEX u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS u'\u016d' # 0xFD -> LATIN SMALL LETTER U WITH BREVE u'\u015d' # 0xFE -> LATIN SMALL LETTER S WITH CIRCUMFLEX u'\u02d9' # 0xFF -> DOT ABOVE ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
gpl-2.0
beni55/viewfinder
backend/op/build_archive_op.py
13
27303
# Copyright 2013 Viewfinder Inc. All Rights Reserved. """Viewfinder BuildArchiveOperation. This operation builds an archive of content for a given user and sends the user an email with a link that can be used to retrieve a zip file of their content. The zip contains all source needed to invoke the web client and display the user's conversations. The link is an S3 signed URL that will expire after 24 hours. Note: This operation runs as user 0 so that only one will be active at any given time. This works as a throttling mechanism. """ __authors__ = ['mike@emailscrubbed.com (Mike Purtell)'] import calendar import datetime import json import logging import os import random import shutil import string from tornado import gen, httpclient, options, process from viewfinder.backend.base import constants, util from viewfinder.backend.base.environ import ServerEnvironment from viewfinder.backend.base.exceptions import ServiceUnavailableError, NotFoundError from viewfinder.backend.base.secrets import GetSecret from viewfinder.backend.db import db_client from viewfinder.backend.db.episode import Episode from viewfinder.backend.db.followed import Followed from viewfinder.backend.db.follower import Follower from viewfinder.backend.db.photo import Photo from viewfinder.backend.db.post import Post from viewfinder.backend.db.user import User from viewfinder.backend.db.user_photo import UserPhoto from viewfinder.backend.db.user_post import UserPost from viewfinder.backend.db.viewpoint import Viewpoint from viewfinder.backend.op.viewfinder_op import ViewfinderOperation from viewfinder.backend.resources.message.error_messages import SERVICE_UNAVAILABLE from viewfinder.backend.resources.resources_mgr import ResourcesManager from viewfinder.backend.services.email_mgr import EmailManager from viewfinder.backend.storage.object_store import ObjectStore from viewfinder.backend.www import base, www_util, photo_store CONVO_FOLDER_NAME = 'conversations' def _CanViewViewpointContent(viewpoint, follower): """Returns true if the given follower is allowed to view the viewpoint's content: 1. Follower must exist 2. Viewpoint must not be removed by the follower """ if viewpoint is None or follower is None or not follower.CanViewContent(): return False return True def _MakeViewpointMetadataDict(viewpoint, follower): """Returns a viewpoint metadata dictionary appropriate for a service query response. The response dictionary contains valid photo urls for the viewpoint's cover photo. """ def _GetNormalizedViewpointTitle(vp_dict): """Normalize the viewpoint title so that it can be used as a directory name in the archive. This will strip anything except for upper/lower case letters, digits and the space character. It will also truncate it to 100 characters to avoid file path limitations. """ norm_title = '' if vp_dict['type'] == Viewpoint.DEFAULT: norm_title = 'Personal Collection' elif vp_dict.get('title') is not None: for c in vp_dict['title']: if c in BuildArchiveOperation._PATH_WHITELIST: norm_title += c # Avoid creating a folder path that's too long. norm_title = norm_title[:100] return norm_title vp_dict = viewpoint.MakeMetadataDict(follower) norm_vp_title = _GetNormalizedViewpointTitle(vp_dict) # Append the viewpoint id to the path to ensure uniqueness. vp_dict['folder_name'] = ('%s/%s %s' % (CONVO_FOLDER_NAME, norm_vp_title, vp_dict['viewpoint_id'])).strip() if 'cover_photo' in vp_dict: vp_dict['cover_photo']['full_get_url'] = \ os.path.join(vp_dict['folder_name'], vp_dict['cover_photo']['photo_id'] + '.f.jpg') return vp_dict @gen.coroutine def _QueryFollowedForArchive(client, user_id): """Queries all viewpoints followed by the requested user (excluding the default/personal viewpoint).""" followed = yield gen.Task(Followed.RangeQuery, client, hash_key=user_id, range_desc=None, limit=None, col_names=['viewpoint_id'], excl_start_key=None) # Get the viewpoint associated with each follower object. viewpoint_keys = [db_client.DBKey(f.viewpoint_id, None) for f in followed] follower_keys = [db_client.DBKey(user_id, f.viewpoint_id) for f in followed] viewpoints, followers = yield [gen.Task(Viewpoint.BatchQuery, client, viewpoint_keys, None, must_exist=False), gen.Task(Follower.BatchQuery, client, follower_keys, None, must_exist=False)] # Formulate the viewpoints list into a dict for JSON output. response = {'viewpoints': [_MakeViewpointMetadataDict(v, f) for v, f in zip(viewpoints, followers) if v is not None and not v.IsDefault()]} raise gen.Return(response) @gen.coroutine def _QueryViewpointsForArchive(client, user_id, viewpoint_ids, get_followers=False, get_activities=False, get_episodes=False, get_comments=False, get_attributes=False): """Queries viewpoint metadata, as well as associated followers and episodes. """ @gen.coroutine def _QueryFollowers(): """Produces list of (followers, last_key) tuples, one for each viewpoint in the request.""" tasks = [] for vp_id in viewpoint_ids: if get_followers: tasks.append(Viewpoint.QueryFollowers(client, vp_id)) else: tasks.append(util.GenConstant(None)) follower_results = yield tasks raise gen.Return(follower_results) @gen.coroutine def _QueryActivities(): """Produces list of (activities, last_key) tuples, one for each viewpoint in the request.""" tasks = [] for vp_id in viewpoint_ids: if get_activities: tasks.append(gen.Task(Viewpoint.QueryActivities, client, vp_id)) else: tasks.append(util.GenConstant(None)) activity_results = yield tasks raise gen.Return(activity_results) @gen.coroutine def _QueryEpisodes(): """Produces list of (episodes, last_key) tuples, one for each viewpoint in the request.""" tasks = [] for vp_id in viewpoint_ids: if get_episodes: tasks.append(gen.Task(Viewpoint.QueryEpisodes, client, vp_id)) else: tasks.append(util.GenConstant(None)) episode_results = yield tasks raise gen.Return(episode_results) @gen.coroutine def _QueryComments(): """Produces list of (comments, last_key) tuples, one for each viewpoint in the request.""" tasks = [] for vp_id in viewpoint_ids: if get_comments: tasks.append(gen.Task(Viewpoint.QueryComments, client, vp_id)) else: tasks.append(util.GenConstant(None)) comment_results = yield tasks raise gen.Return(comment_results) viewpoint_keys = [db_client.DBKey(vp_id, None) for vp_id in viewpoint_ids] follower_keys = [db_client.DBKey(user_id, vp_id) for vp_id in viewpoint_ids] results = yield [gen.Task(Viewpoint.BatchQuery, client, viewpoint_keys, None, must_exist=False), gen.Task(Follower.BatchQuery, client, follower_keys, None, must_exist=False), _QueryFollowers(), _QueryActivities(), _QueryEpisodes(), _QueryComments()] viewpoints, followers, follower_id_results, activity_results, episode_results, comment_results = results zip_list = zip(viewpoints, followers, follower_id_results, activity_results, episode_results, comment_results) response_vp_dicts = [] for viewpoint, follower, follower_result, activity_result, episode_result, comment_result in zip_list: # Only return the viewpoint metadata if the caller is a follower of the viewpoint. if follower is not None and not follower.IsRemoved(): response_vp_dict = {'viewpoint_id': viewpoint.viewpoint_id} if get_attributes: response_vp_dict.update(_MakeViewpointMetadataDict(viewpoint, follower)) if get_followers: followers, last_key = follower_result response_vp_dict['followers'] = [foll.MakeFriendMetadataDict() for foll in followers] if last_key is not None: response_vp_dict['follower_last_key'] = www_util.FormatIntegralLastKey(last_key) if _CanViewViewpointContent(viewpoint, follower): if get_activities: activities, last_key = activity_result response_vp_dict['activities'] = [act.MakeMetadataDict() for act in activities] if last_key is not None: response_vp_dict['activity_last_key'] = last_key if get_episodes: episodes, last_key = episode_result response_vp_dict['episodes'] = [ep._asdict() for ep in episodes] if last_key is not None: response_vp_dict['episode_last_key'] = last_key if get_comments: comments, last_key = comment_result response_vp_dict['comments'] = [co._asdict() for co in comments] if last_key is not None: response_vp_dict['comment_last_key'] = last_key response_vp_dicts.append(response_vp_dict) raise gen.Return({'viewpoints': response_vp_dicts}) @gen.coroutine def _QueryUsersForArchive(client, requesting_user_id, user_ids): """Queries users by user id, filtering by friendships.""" user_friend_list = yield gen.Task(User.QueryUsers, client, requesting_user_id, user_ids) user_dicts = yield [gen.Task(user.MakeUserMetadataDict, client, requesting_user_id, forward_friend, reverse_friend) for user, forward_friend, reverse_friend in user_friend_list] response = {'users': user_dicts} raise gen.Return(response) @gen.coroutine def _QueryEpisodesForArchive(client, obj_store, user_id, episode_ids): """Queries posts from the specified episodes. """ def _MakePhotoDict(post, photo, user_post, user_photo): ph_dict = photo.MakeMetadataDict(post, user_post, user_photo) # Do not return access URLs for posts which have been removed. if not post.IsRemoved(): ph_dict['full_get_url'] = photo_store.GeneratePhotoUrl(obj_store, ph_dict['photo_id'], '.f') return ph_dict # Get all requested episodes, along with posts for each episode. episode_keys = [db_client.DBKey(ep_id, None) for ep_id in episode_ids] post_tasks = [] for ep_id in episode_ids: post_tasks.append(gen.Task(Post.RangeQuery, client, ep_id, None, None, None, excl_start_key=None)) episodes, posts_list = yield [gen.Task(Episode.BatchQuery, client, episode_keys, None, must_exist=False), gen.Multi(post_tasks)] # Get viewpoint records for all viewpoints containing episodes. viewpoint_keys = [db_client.DBKey(viewpoint_id, None) for viewpoint_id in set(ep.viewpoint_id for ep in episodes if ep is not None)] # Get follower records for all viewpoints containing episodes, along with photo and user post objects. follower_keys = [db_client.DBKey(user_id, db_key.hash_key) for db_key in viewpoint_keys] all_posts = [post for posts in posts_list if posts is not None for post in posts] photo_keys = [db_client.DBKey(post.photo_id, None) for post in all_posts] user_post_keys = [db_client.DBKey(user_id, Post.ConstructPostId(post.episode_id, post.photo_id)) for post in all_posts] if user_id: # TODO(ben): we can probably skip this for the web view user_photo_task = gen.Task(UserPhoto.BatchQuery, client, [db_client.DBKey(user_id, post.photo_id) for post in all_posts], None, must_exist=False) else: user_photo_task = util.GenConstant(None) viewpoints, followers, photos, user_posts, user_photos = yield [ gen.Task(Viewpoint.BatchQuery, client, viewpoint_keys, None, must_exist=False), gen.Task(Follower.BatchQuery, client, follower_keys, None, must_exist=False), gen.Task(Photo.BatchQuery, client, photo_keys, None), gen.Task(UserPost.BatchQuery, client, user_post_keys, None, must_exist=False), user_photo_task, ] # Get set of viewpoint ids to which the current user has access. viewable_viewpoint_ids = set(viewpoint.viewpoint_id for viewpoint, follower in zip(viewpoints, followers) if _CanViewViewpointContent(viewpoint, follower)) response_dict = {'episodes': []} for ep_id, episode, posts in zip(episode_ids, episodes, posts_list): # Gather list of (post, photo, user_post) tuples for this episode. photo_info_list = [] for post in posts: photo = photos.pop(0) user_post = user_posts.pop(0) user_photo = user_photos.pop(0) if user_photos is not None else None assert photo.photo_id == post.photo_id, (episode, post, photo) if user_photo: assert user_photo.photo_id == photo.photo_id assert user_photo.user_id == user_id photo_info_list.append((post, photo, user_post, user_photo)) if episode is not None and episode.viewpoint_id in viewable_viewpoint_ids: response_ep_dict = {'episode_id': ep_id} response_ep_dict.update(episode._asdict()) response_ep_dict['photos'] = [_MakePhotoDict(photo, post, user_post, user_photo) for photo, post, user_post, user_photo in photo_info_list] if len(photo_info_list) > 0: response_ep_dict['last_key'] = photo_info_list[-1][0].photo_id response_dict['episodes'].append(response_ep_dict) raise gen.Return(response_dict) class BuildArchiveOperation(ViewfinderOperation): """ Operation to: 1) Clear temporary directory used to construct zip file content. 2) Collect a given user's content into a temporary directory. 3) Copy web client code into the same temporary directory. 4) Zip the temp directory up. 5) Put the zip file into S3. 6) Generate a signed URL referencing the zip file in S3. 7) Email the signed URL to the user. """ _PATH_WHITELIST = ' ' + string.ascii_letters + string.digits _OFFBOARDING_DIR_NAME = 'offboarding' _ZIP_FILE_NAME = 'vf.zip' _CONTENT_DIR_NAME = 'viewfinder' # 3 days for user to retrieve their zip file. _S3_ZIP_FILE_ACCESS_EXPIRATION = 3 * constants.SECONDS_PER_DAY def __init__(self, client, user_id, email): super(BuildArchiveOperation, self).__init__(client) self._user_id = user_id self._email = email self._notify_timestamp = self._op.timestamp self._photo_obj_store = ObjectStore.GetInstance(ObjectStore.PHOTO) self._user_zips_obj_store = ObjectStore.GetInstance(ObjectStore.USER_ZIPS) self._offboarding_assets_dir_path = ResourcesManager.Instance().GetOffboardingPath() self._temp_dir_path = os.path.join(ServerEnvironment.GetViewfinderTempDirPath(), BuildArchiveOperation._OFFBOARDING_DIR_NAME) self._zip_file_path = os.path.join(self._temp_dir_path, BuildArchiveOperation._ZIP_FILE_NAME) self._content_dir_path = os.path.join(self._temp_dir_path, BuildArchiveOperation._CONTENT_DIR_NAME) self._data_dir_path = os.path.join(self._content_dir_path, CONVO_FOLDER_NAME) @classmethod @gen.coroutine def Execute(cls, client, user_id, email): """Entry point called by the operation framework.""" yield BuildArchiveOperation(client, user_id, email)._BuildArchive() def _ResetArchiveDir(self): """Get our temp directory into a known clean state.""" # Make sure certain directories already exists. if not os.path.exists(ServerEnvironment.GetViewfinderTempDirPath()): os.mkdir(ServerEnvironment.GetViewfinderTempDirPath()) if not os.path.exists(self._temp_dir_path): os.mkdir(self._temp_dir_path) # Blow away any previously existing content. if os.path.exists(self._content_dir_path): shutil.rmtree(self._content_dir_path) assert not os.path.exists(self._content_dir_path) # Blow away any previous zip file. if os.path.exists(self._zip_file_path): os.remove(self._zip_file_path) assert not os.path.exists(self._zip_file_path) # Recreate the content directory. os.mkdir(self._content_dir_path) os.mkdir(self._data_dir_path) @gen.coroutine def _ProcessPhoto(self, folder_path, photo_id, url): http_client = httpclient.AsyncHTTPClient() try: response = yield http_client.fetch(url, method='GET', validate_cert=options.options.validate_cert) except httpclient.HTTPError as e: if e.code == 404: logging.warning('Photo not found for users(%d) archive: %s' % (self._user_id, photo_id + '.f')) return else: logging.warning('Photo store S3 GET request error: [%s] %s' % (type(e).__name__, e.message)) raise ServiceUnavailableError(SERVICE_UNAVAILABLE) if response.code != 200: raise AssertionError('failure on GET request for photo %s: %s' % (photo_id + '.f', response)) # Write the image to the jpg file. # TODO(mike): Consider moving this IO to thread pool to avoid blocking on main thread. with open(os.path.join(folder_path, photo_id + '.f.jpg'), mode='wb') as f: f.write(response.body) @gen.coroutine def _VerifyPhotoExists(self, folder_path, photo_id): """The file for this photo should already exist.""" assert os.path.exists(os.path.join(folder_path, photo_id + '.f.jpg')) @gen.coroutine def _ProcessViewpoint(self, vp_dict): results_dict = yield _QueryViewpointsForArchive(self._client, self._user_id, [vp_dict['viewpoint_id']], get_activities=True, get_attributes=True, get_comments=True, get_episodes=True) viewpoint_folder_path = os.path.join(self._content_dir_path, vp_dict['folder_name']) # Now, grab the photos! episode_ids = [ep_dict['episode_id'] for ep_dict in results_dict['viewpoints'][0]['episodes']] episodes_dict = yield _QueryEpisodesForArchive(self._client, self._photo_obj_store, self._user_id, episode_ids) photos_to_fetch = dict() photos_to_merge = dict() # Gather photo URL's to request and replace URL's with archive paths. for ep_dict in episodes_dict['episodes']: for photo_dict in ep_dict['photos']: if photo_dict.get('full_get_url') is not None: photos_to_fetch[photo_dict['photo_id']] = photo_dict['full_get_url'] photo_dict['full_get_url'] = os.path.join(vp_dict['folder_name'], photo_dict['photo_id'] + '.f.jpg') photos_to_merge[ep_dict['episode_id']] = ep_dict['photos'] # Merge the photo metadata from query_episodes into the query_viewpoint response. for ep_dict in results_dict['viewpoints'][0]['episodes']: ep_dict['photos'] = photos_to_merge[ep_dict['episode_id']] if os.path.exists(viewpoint_folder_path): # Because the viewpoint folder already exists, let's just verify that everything else exists. assert os.path.exists(os.path.join(viewpoint_folder_path,'metadata.jsn')) for photo_id,url in photos_to_fetch.items(): yield self._VerifyPhotoExists(viewpoint_folder_path, photo_id) else: # TODO(mike): Consider moving this IO to thread pool to avoid blocking on main thread. os.mkdir(viewpoint_folder_path) with open(os.path.join(viewpoint_folder_path,'metadata.jsn'), mode='wb') as f: f.write("viewfinder.jsonp_data =") json.dump(results_dict['viewpoints'][0], f) # Now, fetch all of the photos for this episode. # We'll do this serially since writing the files will be done with blocking-IO and we don't want to # overwhelm the server with the blocking-IO. for photo_id,url in photos_to_fetch.items(): yield self._ProcessPhoto(viewpoint_folder_path, photo_id, url) @gen.coroutine def _BuildArchive(self): """Drive overall archive process as outlined in class header comment.""" logging.info('building archive for user: %d' % self._user_id) # Prepare temporary destination folder (delete existing. We'll always start from scratch). self._ResetArchiveDir() # Copy in base assets and javascript which will drive browser experience of content for users. proc = process.Subprocess(['cp', '-R', os.path.join(self._offboarding_assets_dir_path, 'web_code'), self._content_dir_path]) code = yield gen.Task(proc.set_exit_callback) if code != 0: logging.error('Error copying offboarding assets: %d' % code) raise IOError() # Top level iteration is over viewpoints. # For each viewpoint, # iterate over activities and collect photos/episodes as needed. # Build various 'tables' in json format: # Activity, Comment, Episode, Photo, ... # viewpoints_dict = yield _QueryFollowedForArchive(self._client, self._user_id) viewpoint_ids = [viewpoint['viewpoint_id'] for viewpoint in viewpoints_dict['viewpoints']] followers_dict = yield _QueryViewpointsForArchive(self._client, self._user_id, viewpoint_ids, get_followers=True) for viewpoint, followers in zip(viewpoints_dict['viewpoints'], followers_dict['viewpoints']): viewpoint['followers'] = followers # Query user info for all users referenced by any of the viewpoints. users_to_query = list({f['follower_id'] for vp in followers_dict['viewpoints'] for f in vp['followers']}) users_dict = yield _QueryUsersForArchive(self._client, self._user_id, users_to_query) top_level_metadata_dict = dict(viewpoints_dict.items() + users_dict.items()) # Write the top level metadata to the root of the archive. # TODO(mike): Consider moving this IO to thread pool to avoid blocking on main thread. with open(os.path.join(self._content_dir_path, 'viewpoints.jsn'), mode='wb') as f: # Need to set metadata as variable for JS code. f.write("viewfinder.jsonp_data =") json.dump(top_level_metadata_dict, f) # Now, process each viewpoint. for vp_dict in top_level_metadata_dict['viewpoints']: if Follower.REMOVED not in vp_dict['labels']: yield self._ProcessViewpoint(vp_dict) # Now, generate user specific view file: index.html. # This is the file that the user will open to launch the web client view of their data. recipient_user = yield gen.Task(User.Query, self._client, self._user_id, None) user_info = {'user_id' : recipient_user.user_id, 'name' : recipient_user.name, 'email' : recipient_user.email, 'phone' : recipient_user.phone, 'default_viewpoint_id' : recipient_user.private_vp_id } view_local = ResourcesManager().Instance().GenerateTemplate('view_local.html', user_info=user_info, viewpoint_id=None) with open(os.path.join(self._content_dir_path, 'index.html'), mode='wb') as f: f.write(view_local) with open(os.path.join(self._content_dir_path, 'README.txt'), mode='wb') as f: f.write("This Viewfinder archive contains both a readable local HTML file " + "and backup folders including all photos included in those conversations.\n") # Exec zip command relative to the parent of content dir so that paths in zip are relative to that. proc = process.Subprocess(['zip', '-r', BuildArchiveOperation._ZIP_FILE_NAME, BuildArchiveOperation._CONTENT_DIR_NAME], cwd=self._temp_dir_path) code = yield gen.Task(proc.set_exit_callback) if code != 0: logging.error('Error creating offboarding zip file: %d' % code) raise IOError() # Key is: "{user_id}/{timestamp}_{random}/Viewfinder.zip" # timestamp is utc unix timestamp. s3_key = '%d/%d_%d/Viewfinder.zip' % (self._user_id, calendar.timegm(datetime.datetime.utcnow().utctimetuple()), int(random.random() * 1000000)) if options.options.fileobjstore: # Next, upload this to S3 (really fileobjstore in this case). with open(self._zip_file_path, mode='rb') as f: s3_data = f.read() yield gen.Task(self._user_zips_obj_store.Put, s3_key, s3_data) else: # Running against AWS S3, so use awscli to upload zip file into S3. s3_path = 's3://' + ObjectStore.USER_ZIPS_BUCKET + '/' + s3_key # Use awscli to copy file into S3. proc = process.Subprocess(['aws', 's3', 'cp', self._zip_file_path, s3_path, '--region', 'us-east-1'], stdout=process.Subprocess.STREAM, stderr=process.Subprocess.STREAM, env={'AWS_ACCESS_KEY_ID': GetSecret('aws_access_key_id'), 'AWS_SECRET_ACCESS_KEY': GetSecret('aws_secret_access_key')}) result, error, code = yield [ gen.Task(proc.stdout.read_until_close), gen.Task(proc.stderr.read_until_close), gen.Task(proc.set_exit_callback) ] if code != 0: logging.error("%d = 'aws s3 cp %s %s': %s" % (code, self._zip_file_path, s3_path, error)) if result and len(result) > 0: logging.info("aws result: %s" % result) raise IOError() # Generate signed URL to S3 for given user zip. Only allow link to live for 3 days. s3_url = self._user_zips_obj_store.GenerateUrl(s3_key, cache_control='private,max-age=%d' % self._S3_ZIP_FILE_ACCESS_EXPIRATION, expires_in=3 * self._S3_ZIP_FILE_ACCESS_EXPIRATION) logging.info('user zip uploaded: %s' % s3_url) # Finally, send the user an email with the link to download the zip files just uploaded to s3. email_args = {'from': EmailManager.Instance().GetInfoAddress(), 'to': self._email, 'subject': 'Your Viewfinder archive download is ready'} fmt_args = {'archive_url': s3_url, 'hello_name': recipient_user.given_name or recipient_user.name} email_args['text'] = ResourcesManager.Instance().GenerateTemplate('user_zip.email', is_html=False, **fmt_args) yield gen.Task(EmailManager.Instance().SendEmail, description='user archive zip', **email_args)
apache-2.0
PersianWikipedia/pywikibot-core
tests/ui_options_tests.py
2
5713
# -*- coding: utf-8 -*- """Bot tests for input_choice options.""" # # (C) Pywikibot team, 2015-2018 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, division, unicode_literals from pywikibot import bot, bot_choice from tests.aspects import unittest, TestCase message = bot.Option.formatted class TestChoiceOptions(TestCase): """Test cases for input_choice Option.""" TEST_RE = "'int' object has no attribute 'lower'" SEQ_EMPTY_RE = 'The sequence is empty.' net = False def test_formatted(self): """Test static method Option.formatted.""" self.assertEqual(message('Question:', [], None), 'Question: ()') def test_output(self): """Test OutputOption.""" option = bot_choice.OutputOption() with self.assertRaisesRegex(NotImplementedError, ''): message('?', [option], None) def test_standard(self): """Test StandardOption.""" option = bot.StandardOption('Test', 'T') self.assertEqual(option.option, 'Test') self.assertEqual(option.shortcut, 't') self.assertEqual(option.shortcut, option.result(None)) self.assertEqual(option.format(None), '[t]est') self.assertEqual(option.format(), '[t]est') self.assertEqual(option.format(default=None), '[t]est') self.assertEqual(option.format('t'), '[T]est') self.assertEqual(option.format(default='t'), '[T]est') self.assertTrue(option.test('Test')) self.assertTrue(option.test('t')) self.assertTrue(option.test('T')) self.assertFalse(option.test('?')) self.assertIs(option.handled('T'), option) self.assertIsNone(option.handled('?')) self.assertEqual(message('?', [option], None), '? ([t]est)') self.assertEqual(message('?', [option]), '? ([t]est)') self.assertEqual(message('?', [option], 't'), '? ([T]est)') self.assertEqual(message('?', [option], default='t'), '? ([T]est)') def test_Nested(self): """Test NestedOption.""" standard = bot.StandardOption('Test', 'T') option = bot.NestedOption('Next', 'x', 'Nested:', [standard]) self.assertEqual(option.format('x'), 'Ne[X]t') self.assertEqual(option.format(), 'Ne[x]t') self.assertEqual(option._output, 'Nested: ([t]est)') self.assertEqual(message('?', [option], 't'), '? (Ne[x]t)') self.assertIs(standard.handled('t'), standard) self.assertIs(option.handled('x'), option) self.assertIs(option.handled('t'), standard) def test_Integer(self): """Test IntegerOption.""" option = bot.IntegerOption(maximum=5, prefix='r') self.assertEqual(option.format('2'), 'r<number> [1-5]') self.assertEqual(option.format('r2'), 'r<number> [1-[2]-5]') self.assertEqual(option.format(default='r2'), 'r<number> [1-[2]-5]') self.assertEqual(option.format(), 'r<number> [1-5]') self.assertEqual(message('?', [option], None), '? (r<number> [1-5])') self.assertEqual(message('?', [option], 'r3'), '? (r<number> [1-[3]-5])') self.assertRaisesRegex(AttributeError, self.TEST_RE, option.test, 1) self.assertFalse(option.test('0')) self.assertFalse(option.test('r0')) self.assertFalse(option.test('r6')) self.assertIsNone(option.handled('r6')) for i in range(1, 6): self.assertTrue(option.test('r{}'.format(i))) self.assertEqual(option.handled('r{}'.format(i)), option) self.assertEqual(option.result('r{}'.format(i)), ('r', i)) def test_List(self): """Test ListOption.""" self.assertRaisesRegex(ValueError, self.SEQ_EMPTY_RE, bot.ListOption, []) options = ['foo', 'bar'] option = bot.ListOption(options) self.assertEqual(message('?', [option], None), '? (<number> [1-2])') self.assertEqual(message('?', [option]), '? (<number> [1-2])') self.assertEqual(message('?', [option], '2'), '? (<number> [1-[2]])') self.assertEqual(message('?', [option], default='2'), '? (<number> [1-[2]])') options.pop() self.assertEqual(message('?', [option], None), '? (<number> [1])') self.assertEqual(message('?', [option], '1'), '? (<number> [[1]])') options.pop() self.assertRaisesRegex(ValueError, self.SEQ_EMPTY_RE, option.format, None) self.assertRaisesRegex(ValueError, self.SEQ_EMPTY_RE, option.format) self.assertFalse(option.test('0')) options += ['baz', 'quux', 'norf'] self.assertEqual(message('?', [option], None), '? (<number> [1-3])') for prefix in ('', 'r', 'st'): option = bot.ListOption(options, prefix=prefix) self.assertEqual(message('?', [option]), '? ({}<number> [1-3])'.format(prefix)) for i, elem in enumerate(options, 1): self.assertTrue(option.test('{}{}'.format(prefix, i))) self.assertIs(option.handled('{}{}' .format(prefix, i)), option) self.assertEqual(option.result('{}{}'.format(prefix, i)), (prefix, elem)) self.assertFalse(option.test('{}{}' .format(prefix, len(options) + 1))) self.assertIsNone(option.handled('{}{}'.format( prefix, len(options) + 1))) if __name__ == '__main__': # pragma: no cover try: unittest.main() except SystemExit: pass
mit
axinging/sky_engine
build/download_sdk_extras.py
34
2628
#!/usr/bin/env python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script to download sdk/extras packages on the bots from google storage. The script expects arguments that specify zips file in the google storage bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will be extracted in the android_tools/sdk/extras directory on the test bots. This script will not do anything for developers. TODO(navabi): Move this script (crbug.com/459819). """ import json import os import shutil import subprocess import sys import zipfile SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android')) sys.path.insert(1, os.path.join(CHROME_SRC, 'tools')) from pylib import constants import find_depot_tools DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras' SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras') SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__), 'android_sdk_extras.json') def clean_and_extract(dir_name, package_name, zip_file): local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name) if os.path.exists(local_dir): shutil.rmtree(local_dir) local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file) with zipfile.ZipFile(local_zip) as z: z.extractall(path=SDK_EXTRAS_PATH) def main(): if not os.environ.get('CHROME_HEADLESS'): # This is not a buildbot checkout. return 0 # Update the android_sdk_extras.json file to update downloaded packages. with open(SDK_EXTRAS_JSON_FILE) as json_file: packages = json.load(json_file) for package in packages: local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip']) if not os.path.exists(local_zip): package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip']) try: subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7', 'cp', package_zip, local_zip]) except subprocess.CalledProcessError: print ('WARNING: Failed to download SDK packages. If this bot compiles ' 'for Android, it may have errors.') return 0 # Always clean dir and extract zip to ensure correct contents. clean_and_extract(package['dir_name'], package['package'], package['zip']) if __name__ == '__main__': sys.exit(main())
bsd-3-clause
sammerry/ansible
lib/ansible/plugins/action/raw.py
106
1665
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.plugins.action import ActionBase import re class ActionModule(ActionBase): TRANSFERS_FILES = False def run(self, tmp=None, task_vars=dict()): if self._play_context.check_mode: # in --check mode, always skip this module execution return dict(skipped=True) executable = self._task.args.get('executable') result = self._low_level_execute_command(self._task.args.get('_raw_params'), tmp=tmp, executable=executable) # for some modules (script, raw), the sudo success key # may leak into the stdout due to the way the sudo/su # command is constructed, so we filter that out here if result.get('stdout','').strip().startswith('BECOME-SUCCESS-'): result['stdout'] = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', result['stdout']) return result
gpl-3.0
nexedi/dream
dream/simulation/reformat.py
1
4677
# =========================================================================== # Copyright 2013 Nexedi SA # # This file is part of DREAM. # # DREAM is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DREAM is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with DREAM. If not, see <http://www.gnu.org/licenses/>. # =========================================================================== import json import sys def get_name(node): name = node.get_name() if name.startswith('"') and name.endswith('"'): name = name[1:-1] return name def positionGraph(g): import pydot graph = pydot.Dot() for node in g['nodes']: graph.add_node(pydot.Node(node)) for edge, (source, destination, data) in g['edges'].items(): graph.add_edge(pydot.Edge(source, destination)) new_graph = pydot.graph_from_dot_data(graph.create_dot()) # calulate the ratio from the size of the bounding box ratio = new_graph.get_bb() origin_left, origin_top, max_left, max_top = [float(p) for p in new_graph.get_bb()[1:-1].split(',')] ratio_top = max_top - origin_top ratio_left = max_left - origin_left preference_dict = dict() for node in new_graph.get_nodes(): # skip technical nodes if node.get_name() in ('graph', 'node', 'edge'): continue left, top = [float(p) for p in node.get_pos()[1:-1].split(",")] preference_dict[get_name(node)] = dict( top=1-(top/ratio_top), left=1-(left/ratio_left),) return preference_dict def format(m): for node in m['nodes'].values(): if 'setupTime' in node: setupTime = node['setupTime'] if 'mean' in setupTime: setupTime['mean'] = float(setupTime['mean']) if 'loadTime' in node: loadTime = node['loadTime'] if 'mean' in loadTime: loadTime['mean'] = float(loadTime['mean']) if 'processingTime' in node: processingTime = node['processingTime'] if 'mean' in processingTime: processingTime['mean'] = float(processingTime['mean']) if 'entity' in node: entity = node['entity'] if not entity.startswith('Dream.'): node['entity'] = 'Dream.%s' % entity if 'wip' in node: for job in node['wip']: if 'route' in job: for r in job['route']: print r r.pop("stepNumber", None) if 'processingTime' in r: processingTime = r['processingTime'] if 'mean' in processingTime: processingTime['mean'] = float(processingTime['mean']) if 'loadTime' in r: loadTime = r['loadTime'] if 'mean' in loadTime: loadTime['mean'] = float(loadTime['mean']) if 'setupTime' in r: setupTime = r['setupTime'] if 'mean' in setupTime: setupTime['mean'] = float(setupTime['mean']) for component in job.get('componentsList', []): for r in component['route']: r.pop("stepNumber", None) return m def migrate_to_new_format(graph): new_graph = {'graph': {'edge': {}}} for node_id, node_data in graph['nodes'].items(): # some cleanups if node_data.get('capacity'): node_data['capacity'] = float(node_data['capacity']) node_data.pop('isDummy', None) node_data.pop('id', None) node_data.pop('element_id', None) coordinate = graph['preference']['coordinates'].get(node_id) node_data['coordinate'] = coordinate print node_data['_class'] # TODO: discuss the new processing time data structure ! new_graph['graph']['node'] = graph['nodes'] for edge_id, (source, destination, data) in graph['edges'].items(): data['_class'] = 'Dream.Edge' data['source'] = source data['destination'] = destination new_graph['graph']['edge'][edge_id] = data return new_graph with open(sys.argv[1]) as infile: m = json.load(infile) #m = migrate_to_new_format(m) #m.update(preferences=positionGraph(m)) for result in m['result']['result_list']: result['elementList'] = [e for e in result['elementList'] if e['id'] != 'TraceFile'] with open(sys.argv[1], "w") as outfile: json.dump(m, outfile, sort_keys=True, indent=4, separators=(',', ': '))
gpl-3.0
levilucio/SyVOLT
GM2AUTOSAR_MM/merge_inter_layer_rules/Himesis/HMoveOneTraceLinkDiffRulesLHS.py
2
15516
from core.himesis import Himesis, HimesisPreConditionPatternLHS import cPickle as pickle from uuid import UUID class HMoveOneTraceLinkDiffRulesLHS(HimesisPreConditionPatternLHS): def __init__(self): """ Creates the himesis graph representing the AToM3 model HMoveOneTraceLinkDiffRulesLHS. """ # Flag this instance as compiled now self.is_compiled = True super(HMoveOneTraceLinkDiffRulesLHS, self).__init__(name='HMoveOneTraceLinkDiffRulesLHS', num_nodes=4, edges=[]) # Add the edges self.add_edges([(0, 1), (1, 3)]) # Set the graph attributes self["mm__"] = pickle.loads("""(lp1 S'MT_pre__GM2AUTOSAR_MM' p2 aS'MoTifRule' p3 a.""") self["MT_constraint__"] = """#if len([i for i in graph.neighbors(PreNode('9').index) if graph.vs[i]['mm__'] == 'backward_link']) == 0: # return True #return False return True """ self["name"] = """""" self["GUID__"] = UUID('85c38c61-ac11-4277-8746-7fefff84aba4') # Set the node attributes self.vs[0]["MT_subtypeMatching__"] = True self.vs[0]["MT_pre__classtype"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[0]["MT_label__"] = """9""" self.vs[0]["MT_subtypes__"] = pickle.loads("""(lp1 S'MT_pre__EcuInstance' p2 aS'MT_pre__System' p3 aS'MT_pre__SystemMapping' p4 aS'MT_pre__ComponentPrototype' p5 aS'MT_pre__SwCompToEcuMapping_component' p6 aS'MT_pre__CompositionType' p7 aS'MT_pre__PPortPrototype' p8 aS'MT_pre__SwcToEcuMapping' p9 aS'MT_pre__SoftwareComposition' p10 aS'MT_pre__RPortPrototype' p11 aS'MT_pre__PortPrototype' p12 aS'MT_pre__ComponentType' p13 a.""") self.vs[0]["MT_dirty__"] = False self.vs[0]["mm__"] = """MT_pre__MetaModelElement_T""" self.vs[0]["MT_pre__cardinality"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[0]["MT_pre__name"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[0]["GUID__"] = UUID('327ee70a-3b6f-4f54-9f42-399f82ca4e81') self.vs[1]["MT_subtypeMatching__"] = False self.vs[1]["MT_label__"] = """10""" self.vs[1]["MT_subtypes__"] = pickle.loads("""(lp1 .""") self.vs[1]["MT_dirty__"] = False self.vs[1]["mm__"] = """MT_pre__trace_link""" self.vs[1]["GUID__"] = UUID('07192f99-68ae-4da4-bc7b-f56f6c969da3') self.vs[2]["MT_pivotOut__"] = """element1""" self.vs[2]["MT_subtypeMatching__"] = True self.vs[2]["MT_pre__classtype"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[2]["MT_pivotIn__"] = """element1""" self.vs[2]["MT_label__"] = """7""" self.vs[2]["MT_subtypes__"] = pickle.loads("""(lp1 S'MT_pre__VirtualDevice' p2 aS'MT_pre__Distributable' p3 aS'MT_pre__Signal' p4 aS'MT_pre__ExecFrame' p5 aS'MT_pre__ECU' p6 a.""") self.vs[2]["MT_dirty__"] = False self.vs[2]["mm__"] = """MT_pre__MetaModelElement_S""" self.vs[2]["MT_pre__cardinality"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[2]["MT_pre__name"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[2]["GUID__"] = UUID('e9dfc43d-745a-40f7-bd25-b87773154dcb') self.vs[3]["MT_pivotOut__"] = """element2""" self.vs[3]["MT_subtypeMatching__"] = True self.vs[3]["MT_pre__classtype"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[3]["MT_pivotIn__"] = """element2""" self.vs[3]["MT_label__"] = """8""" self.vs[3]["MT_subtypes__"] = pickle.loads("""(lp1 S'MT_pre__VirtualDevice' p2 aS'MT_pre__Distributable' p3 aS'MT_pre__Signal' p4 aS'MT_pre__ExecFrame' p5 aS'MT_pre__ECU' p6 a.""") self.vs[3]["MT_dirty__"] = False self.vs[3]["mm__"] = """MT_pre__MetaModelElement_S""" self.vs[3]["MT_pre__cardinality"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[3]["MT_pre__name"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[3]["GUID__"] = UUID('86af9b3d-cac8-46b4-92ab-5cf9f5196b73') def eval_classtype9(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_cardinality9(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_name9(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_classtype7(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_cardinality7(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_name7(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_classtype8(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_cardinality8(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_name8(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def constraint(self, PreNode, graph): """ Executable constraint code. @param PreNode: Function taking an integer as parameter and returns the node corresponding to that label. """ #if len([i for i in graph.neighbors(PreNode('9').index) if graph.vs[i]['mm__'] == 'backward_link']) == 0: # return True #return False return True
mit
daineseh/linkit-smart-7688-duo-example
control_relay_and_sensor_and_servo_via_Flask/control_relay_and_sensor_and_servo_via_Flask.py
1
1915
#!/usr/bin/env python from flask import Flask, request, abort import json import os import serial s = None #f = os.popen('ifconfig br-lan | grep "inet\ addr" | cut -d: -f2 | cut -d" " -f1') # AP model f = os.popen('ifconfig apcli0 | grep "inet\ addr" | cut -d: -f2 | cut -d" " -f1') # Station model inet_addr = f.read() app = Flask(__name__) enum_values = ['0', '1'] def setup(): global s # open serial COM port to /dev/ttyS0, which maps to UART0(D0/D1) # the baudrate is set to 57600 and should be the same as the one # specified in the Arduino sketch uploaded to ATMega32U4. s = serial.Serial("/dev/ttyS0", 57600) @app.route("/api/switchServo") def servo(): status = request.args.get("status") if not status: abort(404) status = status.encode("ascii") if status not in enum_values: abort(404) if (status == '1'): s.write("servo1") elif (status == '0'): s.write("servo0") return json.dumps({"status": 200, "message": s.readline().strip()}) @app.route("/api/switchRelay") def relay(): status = request.args.get("status") if not status: abort(404) status = status.encode("ascii") if status not in enum_values: abort(404) if (status == '1'): s.write("relay1") elif (status == '0'): s.write("relay0") return json.dumps({"status": 200, "message": s.readline().strip()}) @app.route("/api/getSensor") def sensor(): status = request.args.get("status") if not status: abort(404) status = status.encode("ascii") if status not in enum_values: abort(404) s.write("sensor") return json.dumps({"status": 200, "message": s.readline().strip()}) @app.route('/') def main(): return json.dumps({"status":200, "message": "OK"}) if __name__ == '__main__': setup() app.debug = True app.run(host = inet_addr, port=5000)
mit