repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
spinicist/ITK
Wrapping/Generators/Python/Tests/wrappingCoverage.py
8
2909
#========================================================================== # # Copyright Insight Software Consortium # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0.txt # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #==========================================================================*/ from __future__ import print_function import sys import re import itk import os from optparse import OptionParser parser = OptionParser(usage='wrappingCoverage.py paths') parser.add_option( "-b", "--base", dest="base", default="Filter", help="Base string used to search for the classes (default: Filter).") parser.add_option( "-e", "--exclude", dest="exclude", default=None, help="Path of a file with one class to exclude per line (default: None).") parser.add_option( "-E", "--no-error", action="store_true", dest="noError", help="Don't generate an error code if all the classes are not wrapped.") opts, args = parser.parse_args() # declares classes which will not be wrapped excluded = set([]) if opts.exclude: with open(opts.exclude, 'r') as fp: to_exclude = [c.strip() for c in fp.readlines()] excluded.update(set(to_exclude)) # get classes from sources headers = [] for d in args: headers += sum([f for p, d, f in os.walk(d) if "Deprecated" not in p and "TestKernel" not in p], []) classes = set([f[len('itk'):-len('.h')] for f in headers if f.startswith("itk") and not f.startswith("itkv3") and f.endswith(opts.base + ".h")]) - excluded # get filter from wrapper files # remove classes which are not in the toolkit (external projects, # PyImageFilter, ...) wrapped = set([a for a in dir(itk) if a.endswith(opts.base)] ).intersection(classes) nonWrapped = classes - wrapped # print non wrapped classes without much text to stdout, so they can be # easily reused for f in sorted(nonWrapped): print(f) # and print stats in stderr to avoid poluting the list above print("", file=sys.stderr) print('%i %s' % (len(classes), opts.base), file=sys.stderr) print('%i wrapped %s' % (len(wrapped), opts.base), file=sys.stderr) print('%i non wrapped %s' % (len(nonWrapped), opts.base), file=sys.stderr) covered = len(wrapped) / float(len(classes)) print('%f%% covered' % (covered * 100), file=sys.stderr) print("", file=sys.stderr) if not opts.noError: sys.exit(len(nonWrapped))
apache-2.0
solarjoe/numpy
tools/npy_tempita/compat3.py
42
1094
from __future__ import absolute_import, division, print_function import sys __all__ = ['PY3', 'b', 'basestring_', 'bytes', 'next', 'is_unicode', 'iteritems'] PY3 = True if sys.version_info[0] == 3 else False if sys.version_info[0] < 3: def next(obj): return obj.next() def iteritems(d, **kw): return d.iteritems(**kw) b = bytes = str basestring_ = basestring else: def b(s): if isinstance(s, str): return s.encode('latin1') return bytes(s) def iteritems(d, **kw): return iter(d.items(**kw)) next = next basestring_ = (bytes, str) bytes = bytes text = str def is_unicode(obj): if sys.version_info[0] < 3: return isinstance(obj, unicode) else: return isinstance(obj, str) def coerce_text(v): if not isinstance(v, basestring_): if sys.version_info[0] < 3: attr = '__unicode__' else: attr = '__str__' if hasattr(v, attr): return unicode(v) else: return bytes(v) return v
bsd-3-clause
wulczer/ansible
lib/ansible/inventory/__init__.py
13
25475
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ############################################# import fnmatch import os import sys import re import subprocess import ansible.constants as C from ansible.inventory.ini import InventoryParser from ansible.inventory.script import InventoryScript from ansible.inventory.dir import InventoryDirectory from ansible.inventory.group import Group from ansible.inventory.host import Host from ansible import errors from ansible import utils class Inventory(object): """ Host inventory for ansible. """ __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset', 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list', '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir'] def __init__(self, host_list=C.DEFAULT_HOST_LIST, vault_password=None): # the host file file, or script path, or list of hosts # if a list, inventory data will NOT be loaded self.host_list = host_list self._vault_password=vault_password # caching to avoid repeated calculations, particularly with # external inventory scripts. self._vars_per_host = {} self._vars_per_group = {} self._hosts_cache = {} self._groups_list = {} self._pattern_cache = {} # to be set by calling set_playbook_basedir by playbook code self._playbook_basedir = None # the inventory object holds a list of groups self.groups = [] # a list of host(names) to contain current inquiries to self._restriction = None self._also_restriction = None self._subset = None if isinstance(host_list, basestring): if "," in host_list: host_list = host_list.split(",") host_list = [ h for h in host_list if h and h.strip() ] if host_list is None: self.parser = None elif isinstance(host_list, list): self.parser = None all = Group('all') self.groups = [ all ] ipv6_re = re.compile('\[([a-f:A-F0-9]*[%[0-z]+]?)\](?::(\d+))?') for x in host_list: m = ipv6_re.match(x) if m: all.add_host(Host(m.groups()[0], m.groups()[1])) else: if ":" in x: tokens = x.rsplit(":", 1) # if there is ':' in the address, then this is an ipv6 if ':' in tokens[0]: all.add_host(Host(x)) else: all.add_host(Host(tokens[0], tokens[1])) else: all.add_host(Host(x)) elif os.path.exists(host_list): if os.path.isdir(host_list): # Ensure basedir is inside the directory self.host_list = os.path.join(self.host_list, "") self.parser = InventoryDirectory(filename=host_list) self.groups = self.parser.groups.values() else: # check to see if the specified file starts with a # shebang (#!/), so if an error is raised by the parser # class we can show a more apropos error shebang_present = False try: inv_file = open(host_list) first_line = inv_file.readlines()[0] inv_file.close() if first_line.startswith('#!'): shebang_present = True except: pass if utils.is_executable(host_list): try: self.parser = InventoryScript(filename=host_list) self.groups = self.parser.groups.values() except: if not shebang_present: raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \ "If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list) else: raise else: try: self.parser = InventoryParser(filename=host_list) self.groups = self.parser.groups.values() except: if shebang_present: raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \ "Perhaps you want to correct this with `chmod +x %s`?" % host_list) else: raise utils.plugins.vars_loader.add_directory(self.basedir(), with_subdir=True) else: raise errors.AnsibleError("Unable to find an inventory file, specify one with -i ?") self._vars_plugins = [ x for x in utils.plugins.vars_loader.all(self) ] # get group vars from group_vars/ files and vars plugins for group in self.groups: group.vars = utils.combine_vars(group.vars, self.get_group_variables(group.name, vault_password=self._vault_password)) # get host vars from host_vars/ files and vars plugins for host in self.get_hosts(): host.vars = utils.combine_vars(host.vars, self.get_host_variables(host.name, vault_password=self._vault_password)) def _match(self, str, pattern_str): try: if pattern_str.startswith('~'): return re.search(pattern_str[1:], str) else: return fnmatch.fnmatch(str, pattern_str) except Exception, e: raise errors.AnsibleError('invalid host pattern: %s' % pattern_str) def _match_list(self, items, item_attr, pattern_str): results = [] try: if not pattern_str.startswith('~'): pattern = re.compile(fnmatch.translate(pattern_str)) else: pattern = re.compile(pattern_str[1:]) except Exception, e: raise errors.AnsibleError('invalid host pattern: %s' % pattern_str) for item in items: if pattern.match(getattr(item, item_attr)): results.append(item) return results def get_hosts(self, pattern="all"): """ find all host names matching a pattern string, taking into account any inventory restrictions or applied subsets. """ # process patterns if isinstance(pattern, list): pattern = ';'.join(pattern) patterns = pattern.replace(";",":").split(":") hosts = self._get_hosts(patterns) # exclude hosts not in a subset, if defined if self._subset: subset = self._get_hosts(self._subset) hosts = [ h for h in hosts if h in subset ] # exclude hosts mentioned in any restriction (ex: failed hosts) if self._restriction is not None: hosts = [ h for h in hosts if h.name in self._restriction ] if self._also_restriction is not None: hosts = [ h for h in hosts if h.name in self._also_restriction ] return hosts def _get_hosts(self, patterns): """ finds hosts that match a list of patterns. Handles negative matches as well as intersection matches. """ # Host specifiers should be sorted to ensure consistent behavior pattern_regular = [] pattern_intersection = [] pattern_exclude = [] for p in patterns: if p.startswith("!"): pattern_exclude.append(p) elif p.startswith("&"): pattern_intersection.append(p) elif p: pattern_regular.append(p) # if no regular pattern was given, hence only exclude and/or intersection # make that magically work if pattern_regular == []: pattern_regular = ['all'] # when applying the host selectors, run those without the "&" or "!" # first, then the &s, then the !s. patterns = pattern_regular + pattern_intersection + pattern_exclude hosts = [] for p in patterns: # avoid resolving a pattern that is a plain host if p in self._hosts_cache: hosts.append(self.get_host(p)) else: that = self.__get_hosts(p) if p.startswith("!"): hosts = [ h for h in hosts if h not in that ] elif p.startswith("&"): hosts = [ h for h in hosts if h in that ] else: to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ] hosts.extend(to_append) return hosts def __get_hosts(self, pattern): """ finds hosts that positively match a particular pattern. Does not take into account negative matches. """ if pattern in self._pattern_cache: return self._pattern_cache[pattern] (name, enumeration_details) = self._enumeration_info(pattern) hpat = self._hosts_in_unenumerated_pattern(name) result = self._apply_ranges(pattern, hpat) self._pattern_cache[pattern] = result return result def _enumeration_info(self, pattern): """ returns (pattern, limits) taking a regular pattern and finding out which parts of it correspond to start/stop offsets. limits is a tuple of (start, stop) or None """ # Do not parse regexes for enumeration info if pattern.startswith('~'): return (pattern, None) # The regex used to match on the range, which can be [x] or [x-y]. pattern_re = re.compile("^(.*)\[([-]?[0-9]+)(?:(?:-)([0-9]+))?\](.*)$") m = pattern_re.match(pattern) if m: (target, first, last, rest) = m.groups() first = int(first) if last: if first < 0: raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range") last = int(last) else: last = first return (target, (first, last)) else: return (pattern, None) def _apply_ranges(self, pat, hosts): """ given a pattern like foo, that matches hosts, return all of hosts given a pattern like foo[0:5], where foo matches hosts, return the first 6 hosts """ # If there are no hosts to select from, just return the # empty set. This prevents trying to do selections on an empty set. # issue#6258 if not hosts: return hosts (loose_pattern, limits) = self._enumeration_info(pat) if not limits: return hosts (left, right) = limits if left == '': left = 0 if right == '': right = 0 left=int(left) right=int(right) try: if left != right: return hosts[left:right] else: return [ hosts[left] ] except IndexError: raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat) def _create_implicit_localhost(self, pattern): new_host = Host(pattern) new_host.set_variable("ansible_python_interpreter", sys.executable) new_host.set_variable("ansible_connection", "local") ungrouped = self.get_group("ungrouped") if ungrouped is None: self.add_group(Group('ungrouped')) ungrouped = self.get_group('ungrouped') self.get_group('all').add_child_group(ungrouped) ungrouped.add_host(new_host) return new_host def _hosts_in_unenumerated_pattern(self, pattern): """ Get all host names matching the pattern """ results = [] hosts = [] hostnames = set() # ignore any negative checks here, this is handled elsewhere pattern = pattern.replace("!","").replace("&", "") def __append_host_to_results(host): if host not in results and host.name not in hostnames: hostnames.add(host.name) results.append(host) groups = self.get_groups() for group in groups: if pattern == 'all': for host in group.get_hosts(): __append_host_to_results(host) else: if self._match(group.name, pattern): for host in group.get_hosts(): __append_host_to_results(host) else: matching_hosts = self._match_list(group.get_hosts(), 'name', pattern) for host in matching_hosts: __append_host_to_results(host) if pattern in ["localhost", "127.0.0.1"] and len(results) == 0: new_host = self._create_implicit_localhost(pattern) results.append(new_host) return results def clear_pattern_cache(self): ''' called exclusively by the add_host plugin to allow patterns to be recalculated ''' self._pattern_cache = {} def groups_for_host(self, host): if host in self._hosts_cache: return self._hosts_cache[host].get_groups() else: return [] def groups_list(self): if not self._groups_list: groups = {} for g in self.groups: groups[g.name] = [h.name for h in g.get_hosts()] ancestors = g.get_ancestors() for a in ancestors: if a.name not in groups: groups[a.name] = [h.name for h in a.get_hosts()] self._groups_list = groups return self._groups_list def get_groups(self): return self.groups def get_host(self, hostname): if hostname not in self._hosts_cache: self._hosts_cache[hostname] = self._get_host(hostname) return self._hosts_cache[hostname] def _get_host(self, hostname): if hostname in ['localhost','127.0.0.1']: for host in self.get_group('all').get_hosts(): if host.name in ['localhost', '127.0.0.1']: return host return self._create_implicit_localhost(hostname) else: for group in self.groups: for host in group.get_hosts(): if hostname == host.name: return host return None def get_group(self, groupname): for group in self.groups: if group.name == groupname: return group return None def get_group_variables(self, groupname, update_cached=False, vault_password=None): if groupname not in self._vars_per_group or update_cached: self._vars_per_group[groupname] = self._get_group_variables(groupname, vault_password=vault_password) return self._vars_per_group[groupname] def _get_group_variables(self, groupname, vault_password=None): group = self.get_group(groupname) if group is None: raise Exception("group not found: %s" % groupname) vars = {} # plugin.get_group_vars retrieves just vars for specific group vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')] for updated in vars_results: if updated is not None: vars = utils.combine_vars(vars, updated) # Read group_vars/ files vars = utils.combine_vars(vars, self.get_group_vars(group)) return vars def get_variables(self, hostname, update_cached=False, vault_password=None): host = self.get_host(hostname) if not host: raise Exception("host not found: %s" % hostname) return host.get_variables() def get_host_variables(self, hostname, update_cached=False, vault_password=None): if hostname not in self._vars_per_host or update_cached: self._vars_per_host[hostname] = self._get_host_variables(hostname, vault_password=vault_password) return self._vars_per_host[hostname] def _get_host_variables(self, hostname, vault_password=None): host = self.get_host(hostname) if host is None: raise errors.AnsibleError("host not found: %s" % hostname) vars = {} # plugin.run retrieves all vars (also from groups) for host vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')] for updated in vars_results: if updated is not None: vars = utils.combine_vars(vars, updated) # plugin.get_host_vars retrieves just vars for specific host vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')] for updated in vars_results: if updated is not None: vars = utils.combine_vars(vars, updated) # still need to check InventoryParser per host vars # which actually means InventoryScript per host, # which is not performant if self.parser is not None: vars = utils.combine_vars(vars, self.parser.get_host_variables(host)) # Read host_vars/ files vars = utils.combine_vars(vars, self.get_host_vars(host)) return vars def add_group(self, group): if group.name not in self.groups_list(): self.groups.append(group) self._groups_list = None # invalidate internal cache else: raise errors.AnsibleError("group already in inventory: %s" % group.name) def list_hosts(self, pattern="all"): """ return a list of hostnames for a pattern """ result = [ h.name for h in self.get_hosts(pattern) ] if len(result) == 0 and pattern in ["localhost", "127.0.0.1"]: result = [pattern] return result def list_groups(self): return sorted([ g.name for g in self.groups ], key=lambda x: x) # TODO: remove this function def get_restriction(self): return self._restriction def restrict_to(self, restriction): """ Restrict list operations to the hosts given in restriction. This is used to exclude failed hosts in main playbook code, don't use this for other reasons. """ if not isinstance(restriction, list): restriction = [ restriction ] self._restriction = restriction def also_restrict_to(self, restriction): """ Works like restict_to but offers an additional restriction. Playbooks use this to implement serial behavior. """ if not isinstance(restriction, list): restriction = [ restriction ] self._also_restriction = restriction def subset(self, subset_pattern): """ Limits inventory results to a subset of inventory that matches a given pattern, such as to select a given geographic of numeric slice amongst a previous 'hosts' selection that only select roles, or vice versa. Corresponds to --limit parameter to ansible-playbook """ if subset_pattern is None: self._subset = None else: subset_pattern = subset_pattern.replace(',',':') subset_pattern = subset_pattern.replace(";",":").split(":") results = [] # allow Unix style @filename data for x in subset_pattern: if x.startswith("@"): fd = open(x[1:]) results.extend(fd.read().split("\n")) fd.close() else: results.append(x) self._subset = results def lift_restriction(self): """ Do not restrict list operations """ self._restriction = None def lift_also_restriction(self): """ Clears the also restriction """ self._also_restriction = None def is_file(self): """ did inventory come from a file? """ if not isinstance(self.host_list, basestring): return False return os.path.exists(self.host_list) def basedir(self): """ if inventory came from a file, what's the directory? """ if not self.is_file(): return None dname = os.path.dirname(self.host_list) if dname is None or dname == '' or dname == '.': cwd = os.getcwd() return os.path.abspath(cwd) return os.path.abspath(dname) def src(self): """ if inventory came from a file, what's the directory and file name? """ if not self.is_file(): return None return self.host_list def playbook_basedir(self): """ returns the directory of the current playbook """ return self._playbook_basedir def set_playbook_basedir(self, dir): """ sets the base directory of the playbook so inventory can use it as a basedir for host_ and group_vars, and other things. """ # Only update things if dir is a different playbook basedir if dir != self._playbook_basedir: self._playbook_basedir = dir # get group vars from group_vars/ files for group in self.groups: group.vars = utils.combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True)) # get host vars from host_vars/ files for host in self.get_hosts(): host.vars = utils.combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True)) # invalidate cache self._vars_per_host = {} self._vars_per_group = {} def get_host_vars(self, host, new_pb_basedir=False): """ Read host_vars/ files """ return self._get_hostgroup_vars(host=host, group=None, new_pb_basedir=new_pb_basedir) def get_group_vars(self, group, new_pb_basedir=False): """ Read group_vars/ files """ return self._get_hostgroup_vars(host=None, group=group, new_pb_basedir=new_pb_basedir) def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False): """ Loads variables from group_vars/<groupname> and host_vars/<hostname> in directories parallel to the inventory base directory or in the same directory as the playbook. Variables in the playbook dir will win over the inventory dir if files are in both. """ results = {} scan_pass = 0 _basedir = self.basedir() # look in both the inventory base directory and the playbook base directory # unless we do an update for a new playbook base dir if not new_pb_basedir: basedirs = [_basedir, self._playbook_basedir] else: basedirs = [self._playbook_basedir] for basedir in basedirs: # this can happen from particular API usages, particularly if not run # from /usr/bin/ansible-playbook if basedir is None: continue scan_pass = scan_pass + 1 # it's not an eror if the directory does not exist, keep moving if not os.path.exists(basedir): continue # save work of second scan if the directories are the same if _basedir == self._playbook_basedir and scan_pass != 1: continue if group and host is None: # load vars in dir/group_vars/name_of_group base_path = os.path.join(basedir, "group_vars/%s" % group.name) results = utils.load_vars(base_path, results, vault_password=self._vault_password) elif host and group is None: # same for hostvars in dir/host_vars/name_of_host base_path = os.path.join(basedir, "host_vars/%s" % host.name) results = utils.load_vars(base_path, results, vault_password=self._vault_password) # all done, results is a dictionary of variables for this particular host. return results
gpl-3.0
tensorflow/models
official/nlp/nhnet/configs.py
1
3203
# Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common NHNet/Bert2Bert configuration.""" from typing import List, Text import dataclasses from official.modeling.hyperparams import base_config @dataclasses.dataclass class BERT2BERTConfig(base_config.Config): """High-level configurations for BERT2BERT model. These include parameters that are not directly related to the experiment, e.g. encoder, decoder, prediction, training, etc. """ vocab_size: int = 30522 hidden_size: int = 768 num_hidden_layers: int = 12 num_attention_heads: int = 12 intermediate_size: int = 3072 hidden_act: str = "gelu" hidden_dropout_prob: float = 0.1 attention_probs_dropout_prob: float = 0.1 max_position_embeddings: int = 512 type_vocab_size: int = 2 initializer_range: float = 0.02 decoder_intermediate_size: int = 3072 num_decoder_attn_heads: int = 12 num_decoder_layers: int = 12 label_smoothing: float = 0.1 learning_rate: float = 0.05 learning_rate_warmup_steps: int = 20000 optimizer: str = "Adam" adam_beta1: float = 0.9 adam_beta2: float = 0.997 adam_epsilon: float = 1e-09 # predict params beam_size: int = 5 alpha: float = 0.6 initializer_gain: float = 1.0 use_cache: bool = True # input params input_sharding: bool = False input_data_not_padded: bool = False pad_token_id: int = 0 end_token_id: int = 102 start_token_id: int = 101 @dataclasses.dataclass class NHNetConfig(BERT2BERTConfig): """High-level configurations for NHNet model. These include parameters that are not directly related to the experiment, e.g. encoder, decoder, prediction, training, etc. """ multi_channel_cross_attention: bool = True passage_list: List[Text] = dataclasses.field( default_factory=lambda: [chr(ord("b") + i) for i in range(5)]) # Initialization method. # If init_from_bert2bert is false, we assume the checkpoint is from BERT # pretraining and only encoder and self-attention variables are initialized. init_from_bert2bert: bool = True UNITTEST_CONFIG = { "attention_probs_dropout_prob": 0.0, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 32, "max_position_embeddings": 128, "num_attention_heads": 2, "num_hidden_layers": 1, "type_vocab_size": 2, "vocab_size": 30522, "initializer_gain": 1.0, "decoder_intermediate_size": 32, "num_decoder_attn_heads": 2, "num_decoder_layers": 1, "use_cache": True, "input_data_not_padded": False, "pad_token_id": 0, "end_token_id": 102, "start_token_id": 101, }
apache-2.0
spezi77/android_kernel_htc_qsd8k_3.0
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
12980
5411
# SchedGui.py - Python extension for perf script, basic GUI code for # traces drawing and overview. # # Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com> # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. try: import wx except ImportError: raise ImportError, "You need to install the wxpython lib for this script" class RootFrame(wx.Frame): Y_OFFSET = 100 RECT_HEIGHT = 100 RECT_SPACE = 50 EVENT_MARKING_WIDTH = 5 def __init__(self, sched_tracer, title, parent = None, id = -1): wx.Frame.__init__(self, parent, id, title) (self.screen_width, self.screen_height) = wx.GetDisplaySize() self.screen_width -= 10 self.screen_height -= 10 self.zoom = 0.5 self.scroll_scale = 20 self.sched_tracer = sched_tracer self.sched_tracer.set_root_win(self) (self.ts_start, self.ts_end) = sched_tracer.interval() self.update_width_virtual() self.nr_rects = sched_tracer.nr_rectangles() + 1 self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) # whole window panel self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height)) # scrollable container self.scroll = wx.ScrolledWindow(self.panel) self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale) self.scroll.EnableScrolling(True, True) self.scroll.SetFocus() # scrollable drawing area self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2)) self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint) self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Bind(wx.EVT_PAINT, self.on_paint) self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Fit() self.Fit() self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING) self.txt = None self.Show(True) def us_to_px(self, val): return val / (10 ** 3) * self.zoom def px_to_us(self, val): return (val / self.zoom) * (10 ** 3) def scroll_start(self): (x, y) = self.scroll.GetViewStart() return (x * self.scroll_scale, y * self.scroll_scale) def scroll_start_us(self): (x, y) = self.scroll_start() return self.px_to_us(x) def paint_rectangle_zone(self, nr, color, top_color, start, end): offset_px = self.us_to_px(start - self.ts_start) width_px = self.us_to_px(end - self.ts_start) offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) width_py = RootFrame.RECT_HEIGHT dc = self.dc if top_color is not None: (r, g, b) = top_color top_color = wx.Colour(r, g, b) brush = wx.Brush(top_color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH) width_py -= RootFrame.EVENT_MARKING_WIDTH offset_py += RootFrame.EVENT_MARKING_WIDTH (r ,g, b) = color color = wx.Colour(r, g, b) brush = wx.Brush(color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, width_py) def update_rectangles(self, dc, start, end): start += self.ts_start end += self.ts_start self.sched_tracer.fill_zone(start, end) def on_paint(self, event): dc = wx.PaintDC(self.scroll_panel) self.dc = dc width = min(self.width_virtual, self.screen_width) (x, y) = self.scroll_start() start = self.px_to_us(x) end = self.px_to_us(x + width) self.update_rectangles(dc, start, end) def rect_from_ypixel(self, y): y -= RootFrame.Y_OFFSET rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT: return -1 return rect def update_summary(self, txt): if self.txt: self.txt.Destroy() self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50)) def on_mouse_down(self, event): (x, y) = event.GetPositionTuple() rect = self.rect_from_ypixel(y) if rect == -1: return t = self.px_to_us(x) + self.ts_start self.sched_tracer.mouse_down(rect, t) def update_width_virtual(self): self.width_virtual = self.us_to_px(self.ts_end - self.ts_start) def __zoom(self, x): self.update_width_virtual() (xpos, ypos) = self.scroll.GetViewStart() xpos = self.us_to_px(x) / self.scroll_scale self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos) self.Refresh() def zoom_in(self): x = self.scroll_start_us() self.zoom *= 2 self.__zoom(x) def zoom_out(self): x = self.scroll_start_us() self.zoom /= 2 self.__zoom(x) def on_key_press(self, event): key = event.GetRawKeyCode() if key == ord("+"): self.zoom_in() return if key == ord("-"): self.zoom_out() return key = event.GetKeyCode() (x, y) = self.scroll.GetViewStart() if key == wx.WXK_RIGHT: self.scroll.Scroll(x + 1, y) elif key == wx.WXK_LEFT: self.scroll.Scroll(x - 1, y) elif key == wx.WXK_DOWN: self.scroll.Scroll(x, y + 1) elif key == wx.WXK_UP: self.scroll.Scroll(x, y - 1)
gpl-2.0
bertucho/epic-movie-quotes-quiz
dialogos/build/scrapy/tests/test_utils_url.py
15
10500
import unittest from scrapy.spiders import Spider from scrapy.utils.url import url_is_from_any_domain, url_is_from_spider, canonicalize_url __doctests__ = ['scrapy.utils.url'] class UrlUtilsTest(unittest.TestCase): def test_url_is_from_any_domain(self): url = 'http://www.wheele-bin-art.co.uk/get/product/123' self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.co.uk'])) self.assertFalse(url_is_from_any_domain(url, ['art.co.uk'])) url = 'http://wheele-bin-art.co.uk/get/product/123' self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.co.uk'])) self.assertFalse(url_is_from_any_domain(url, ['art.co.uk'])) url = 'http://www.Wheele-Bin-Art.co.uk/get/product/123' self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.CO.UK'])) self.assertTrue(url_is_from_any_domain(url, ['WHEELE-BIN-ART.CO.UK'])) url = 'http://192.169.0.15:8080/mypage.html' self.assertTrue(url_is_from_any_domain(url, ['192.169.0.15:8080'])) self.assertFalse(url_is_from_any_domain(url, ['192.169.0.15'])) url = 'javascript:%20document.orderform_2581_1190810811.mode.value=%27add%27;%20javascript:%20document.orderform_2581_1190810811.submit%28%29' self.assertFalse(url_is_from_any_domain(url, ['testdomain.com'])) self.assertFalse(url_is_from_any_domain(url+'.testdomain.com', ['testdomain.com'])) def test_url_is_from_spider(self): spider = Spider(name='example.com') self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider)) self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', spider)) self.assertFalse(url_is_from_spider('http://www.example.org/some/page.html', spider)) self.assertFalse(url_is_from_spider('http://www.example.net/some/page.html', spider)) def test_url_is_from_spider_class_attributes(self): class MySpider(Spider): name = 'example.com' self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', MySpider)) self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', MySpider)) self.assertFalse(url_is_from_spider('http://www.example.org/some/page.html', MySpider)) self.assertFalse(url_is_from_spider('http://www.example.net/some/page.html', MySpider)) def test_url_is_from_spider_with_allowed_domains(self): spider = Spider(name='example.com', allowed_domains=['example.org', 'example.net']) self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider)) self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', spider)) self.assertTrue(url_is_from_spider('http://example.com/some/page.html', spider)) self.assertTrue(url_is_from_spider('http://www.example.org/some/page.html', spider)) self.assertTrue(url_is_from_spider('http://www.example.net/some/page.html', spider)) self.assertFalse(url_is_from_spider('http://www.example.us/some/page.html', spider)) spider = Spider(name='example.com', allowed_domains=set(('example.com', 'example.net'))) self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider)) spider = Spider(name='example.com', allowed_domains=('example.com', 'example.net')) self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider)) def test_url_is_from_spider_with_allowed_domains_class_attributes(self): class MySpider(Spider): name = 'example.com' allowed_domains = ('example.org', 'example.net') self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', MySpider)) self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', MySpider)) self.assertTrue(url_is_from_spider('http://example.com/some/page.html', MySpider)) self.assertTrue(url_is_from_spider('http://www.example.org/some/page.html', MySpider)) self.assertTrue(url_is_from_spider('http://www.example.net/some/page.html', MySpider)) self.assertFalse(url_is_from_spider('http://www.example.us/some/page.html', MySpider)) def test_canonicalize_url(self): # simplest case self.assertEqual(canonicalize_url("http://www.example.com/"), "http://www.example.com/") # always return a str assert isinstance(canonicalize_url(u"http://www.example.com"), str) # append missing path self.assertEqual(canonicalize_url("http://www.example.com"), "http://www.example.com/") # typical usage self.assertEqual(canonicalize_url("http://www.example.com/do?a=1&b=2&c=3"), "http://www.example.com/do?a=1&b=2&c=3") self.assertEqual(canonicalize_url("http://www.example.com/do?c=1&b=2&a=3"), "http://www.example.com/do?a=3&b=2&c=1") self.assertEqual(canonicalize_url("http://www.example.com/do?&a=1"), "http://www.example.com/do?a=1") # sorting by argument values self.assertEqual(canonicalize_url("http://www.example.com/do?c=3&b=5&b=2&a=50"), "http://www.example.com/do?a=50&b=2&b=5&c=3") # using keep_blank_values self.assertEqual(canonicalize_url("http://www.example.com/do?b=&a=2", keep_blank_values=False), "http://www.example.com/do?a=2") self.assertEqual(canonicalize_url("http://www.example.com/do?b=&a=2"), "http://www.example.com/do?a=2&b=") self.assertEqual(canonicalize_url("http://www.example.com/do?b=&c&a=2", keep_blank_values=False), "http://www.example.com/do?a=2") self.assertEqual(canonicalize_url("http://www.example.com/do?b=&c&a=2"), "http://www.example.com/do?a=2&b=&c=") self.assertEqual(canonicalize_url(u'http://www.example.com/do?1750,4'), 'http://www.example.com/do?1750%2C4=') # spaces self.assertEqual(canonicalize_url("http://www.example.com/do?q=a space&a=1"), "http://www.example.com/do?a=1&q=a+space") self.assertEqual(canonicalize_url("http://www.example.com/do?q=a+space&a=1"), "http://www.example.com/do?a=1&q=a+space") self.assertEqual(canonicalize_url("http://www.example.com/do?q=a%20space&a=1"), "http://www.example.com/do?a=1&q=a+space") # normalize percent-encoding case (in paths) self.assertEqual(canonicalize_url("http://www.example.com/a%a3do"), "http://www.example.com/a%A3do"), # normalize percent-encoding case (in query arguments) self.assertEqual(canonicalize_url("http://www.example.com/do?k=b%a3"), "http://www.example.com/do?k=b%A3") # non-ASCII percent-encoding in paths self.assertEqual(canonicalize_url("http://www.example.com/a do?a=1"), "http://www.example.com/a%20do?a=1"), self.assertEqual(canonicalize_url("http://www.example.com/a %20do?a=1"), "http://www.example.com/a%20%20do?a=1"), self.assertEqual(canonicalize_url("http://www.example.com/a do\xc2\xa3.html?a=1"), "http://www.example.com/a%20do%C2%A3.html?a=1") # non-ASCII percent-encoding in query arguments self.assertEqual(canonicalize_url(u"http://www.example.com/do?price=\xa3500&a=5&z=3"), u"http://www.example.com/do?a=5&price=%C2%A3500&z=3") self.assertEqual(canonicalize_url("http://www.example.com/do?price=\xc2\xa3500&a=5&z=3"), "http://www.example.com/do?a=5&price=%C2%A3500&z=3") self.assertEqual(canonicalize_url("http://www.example.com/do?price(\xc2\xa3)=500&a=1"), "http://www.example.com/do?a=1&price%28%C2%A3%29=500") # urls containing auth and ports self.assertEqual(canonicalize_url(u"http://user:pass@www.example.com:81/do?now=1"), u"http://user:pass@www.example.com:81/do?now=1") # remove fragments self.assertEqual(canonicalize_url(u"http://user:pass@www.example.com/do?a=1#frag"), u"http://user:pass@www.example.com/do?a=1") self.assertEqual(canonicalize_url(u"http://user:pass@www.example.com/do?a=1#frag", keep_fragments=True), u"http://user:pass@www.example.com/do?a=1#frag") # dont convert safe characters to percent encoding representation self.assertEqual(canonicalize_url( "http://www.simplybedrooms.com/White-Bedroom-Furniture/Bedroom-Mirror:-Josephine-Cheval-Mirror.html"), "http://www.simplybedrooms.com/White-Bedroom-Furniture/Bedroom-Mirror:-Josephine-Cheval-Mirror.html") # urllib.quote uses a mapping cache of encoded characters. when parsing # an already percent-encoded url, it will fail if that url was not # percent-encoded as utf-8, that's why canonicalize_url must always # convert the urls to string. the following test asserts that # functionality. self.assertEqual(canonicalize_url(u'http://www.example.com/caf%E9-con-leche.htm'), 'http://www.example.com/caf%E9-con-leche.htm') # domains are case insensitive self.assertEqual(canonicalize_url("http://www.EXAMPLE.com/"), "http://www.example.com/") # quoted slash and question sign self.assertEqual(canonicalize_url("http://foo.com/AC%2FDC+rocks%3f/?yeah=1"), "http://foo.com/AC%2FDC+rocks%3F/?yeah=1") self.assertEqual(canonicalize_url("http://foo.com/AC%2FDC/"), "http://foo.com/AC%2FDC/") if __name__ == "__main__": unittest.main()
mit
mbaijal/incubator-mxnet
python/mxnet/lr_scheduler.py
18
11542
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Scheduling learning rate.""" import logging from math import cos, pi class LRScheduler(object): """Base class of a learning rate scheduler. A scheduler returns a new learning rate based on the number of updates that have been performed. Parameters ---------- base_lr : float, optional The initial learning rate. warmup_steps: int number of warmup steps used before this scheduler starts decay warmup_begin_lr: float if using warmup, the learning rate from which it starts warming up warmup_mode: string warmup can be done in two modes. 'linear' mode gradually increases lr with each step in equal increments 'constant' mode keeps lr at warmup_begin_lr for warmup_steps """ def __init__(self, base_lr=0.01, warmup_steps=0, warmup_begin_lr=0, warmup_mode='linear'): self.base_lr = base_lr assert isinstance(warmup_steps, int) self.warmup_steps = warmup_steps self.warmup_final_lr = base_lr self.warmup_begin_lr = warmup_begin_lr if self.warmup_begin_lr > self.warmup_final_lr: raise ValueError("Base lr has to be higher than warmup_begin_lr") if self.warmup_steps < 0: raise ValueError("Warmup steps has to be positive or 0") if warmup_mode not in ['linear', 'constant']: raise ValueError("Supports only linear and constant modes of warmup") self.warmup_mode = warmup_mode def get_warmup_lr(self, num_update): assert num_update < self.warmup_steps if self.warmup_mode == 'linear': increase = (self.warmup_final_lr - self.warmup_begin_lr) \ * float(num_update) / float(self.warmup_steps) return self.warmup_begin_lr + increase elif self.warmup_mode == 'constant': return self.warmup_begin_lr else: raise ValueError("Invalid warmup mode %s"%self.warmup_mode) def __call__(self, num_update): """Return a new learning rate. The ``num_update`` is the upper bound of the number of updates applied to every weight. Assume the optimizer has updated *i*-th weight by *k_i* times, namely ``optimizer.update(i, weight_i)`` is called by *k_i* times. Then:: num_update = max([k_i for all i]) Parameters ---------- num_update: int the maximal number of updates applied to a weight. """ raise NotImplementedError("must override this") class FactorScheduler(LRScheduler): """Reduce the learning rate by a factor for every *n* steps. It returns a new learning rate by:: base_lr * pow(factor, floor(num_update/step)) Parameters ---------- step : int Changes the learning rate for every n updates. factor : float, optional The factor to change the learning rate. stop_factor_lr : float, optional Stop updating the learning rate if it is less than this value. """ def __init__(self, step, factor=1, stop_factor_lr=1e-8, base_lr=0.01, warmup_steps=0, warmup_begin_lr=0, warmup_mode='linear'): super(FactorScheduler, self).__init__(base_lr, warmup_steps, warmup_begin_lr, warmup_mode) if step < 1: raise ValueError("Schedule step must be greater or equal than 1 round") if factor > 1.0: raise ValueError("Factor must be no more than 1 to make lr reduce") self.step = step self.factor = factor self.stop_factor_lr = stop_factor_lr self.count = 0 def __call__(self, num_update): if num_update < self.warmup_steps: return self.get_warmup_lr(num_update) # NOTE: use while rather than if (for continuing training via load_epoch) while num_update > self.count + self.step: self.count += self.step self.base_lr *= self.factor if self.base_lr < self.stop_factor_lr: self.base_lr = self.stop_factor_lr logging.info("Update[%d]: now learning rate arrived at %0.5e, will not " "change in the future", num_update, self.base_lr) else: logging.info("Update[%d]: Change learning rate to %0.5e", num_update, self.base_lr) return self.base_lr class MultiFactorScheduler(LRScheduler): """Reduce the learning rate by given a list of steps. Assume there exists *k* such that:: step[k] <= num_update and num_update < step[k+1] Then calculate the new learning rate by:: base_lr * pow(factor, k+1) Parameters ---------- step: list of int The list of steps to schedule a change factor: float The factor to change the learning rate. warmup_steps: int number of warmup steps used before this scheduler starts decay warmup_begin_lr: float if using warmup, the learning rate from which it starts warming up warmup_mode: string warmup can be done in two modes. 'linear' mode gradually increases lr with each step in equal increments 'constant' mode keeps lr at warmup_begin_lr for warmup_steps """ def __init__(self, step, factor=1, base_lr=0.01, warmup_steps=0, warmup_begin_lr=0, warmup_mode='linear'): super(MultiFactorScheduler, self).__init__(base_lr, warmup_steps, warmup_begin_lr, warmup_mode) assert isinstance(step, list) and len(step) >= 1 for i, _step in enumerate(step): if i != 0 and step[i] <= step[i-1]: raise ValueError("Schedule step must be an increasing integer list") if _step < 1: raise ValueError("Schedule step must be greater or equal than 1 round") if factor > 1.0: raise ValueError("Factor must be no more than 1 to make lr reduce") self.step = step self.cur_step_ind = 0 self.factor = factor self.count = 0 def __call__(self, num_update): if num_update < self.warmup_steps: return self.get_warmup_lr(num_update) # NOTE: use while rather than if (for continuing training via load_epoch) while self.cur_step_ind <= len(self.step)-1: if num_update > self.step[self.cur_step_ind]: self.count = self.step[self.cur_step_ind] self.cur_step_ind += 1 self.base_lr *= self.factor logging.info("Update[%d]: Change learning rate to %0.5e", num_update, self.base_lr) else: return self.base_lr return self.base_lr class PolyScheduler(LRScheduler): """ Reduce the learning rate according to a polynomial of given power. Calculate the new learning rate, after warmup if any, by:: final_lr + (start_lr - final_lr) * (1-nup/max_nup)^pwr if nup < max_nup, 0 otherwise. Parameters ---------- max_update: int maximum number of updates before the decay reaches final learning rate. base_lr: float base learning rate to start from pwr: int power of the decay term as a function of the current number of updates. final_lr: float final learning rate after all steps warmup_steps: int number of warmup steps used before this scheduler starts decay warmup_begin_lr: float if using warmup, the learning rate from which it starts warming up warmup_mode: string warmup can be done in two modes. 'linear' mode gradually increases lr with each step in equal increments 'constant' mode keeps lr at warmup_begin_lr for warmup_steps """ def __init__(self, max_update, base_lr=0.01, pwr=2, final_lr=0, warmup_steps=0, warmup_begin_lr=0, warmup_mode='linear'): super(PolyScheduler, self).__init__(base_lr, warmup_steps, warmup_begin_lr, warmup_mode) assert isinstance(max_update, int) if max_update < 1: raise ValueError("maximum number of updates must be strictly positive") self.power = pwr self.base_lr_orig = self.base_lr self.max_update = max_update self.final_lr = final_lr self.max_steps = self.max_update - self.warmup_steps def __call__(self, num_update): if num_update < self.warmup_steps: return self.get_warmup_lr(num_update) if num_update <= self.max_update: self.base_lr = self.final_lr + (self.base_lr_orig - self.final_lr) * \ pow(1 - float(num_update - self.warmup_steps) / float(self.max_steps), self.power) return self.base_lr class CosineScheduler(LRScheduler): """ Reduce the learning rate according to a cosine function Calculate the new learning rate by:: final_lr + (start_lr - final_lr) * (1+cos(pi * nup/max_nup))/2 if nup < max_nup, 0 otherwise. Parameters ---------- max_update: int maximum number of updates before the decay reaches 0 base_lr: float base learning rate final_lr: float final learning rate after all steps warmup_steps: int number of warmup steps used before this scheduler starts decay warmup_begin_lr: float if using warmup, the learning rate from which it starts warming up warmup_mode: string warmup can be done in two modes. 'linear' mode gradually increases lr with each step in equal increments 'constant' mode keeps lr at warmup_begin_lr for warmup_steps """ def __init__(self, max_update, base_lr=0.01, final_lr=0, warmup_steps=0, warmup_begin_lr=0, warmup_mode='linear'): super(CosineScheduler, self).__init__(base_lr, warmup_steps, warmup_begin_lr, warmup_mode) assert isinstance(max_update, int) if max_update < 1: raise ValueError("maximum number of updates must be strictly positive") self.base_lr_orig = base_lr self.max_update = max_update self.final_lr = final_lr self.max_steps = self.max_update - self.warmup_steps def __call__(self, num_update): if num_update < self.warmup_steps: return self.get_warmup_lr(num_update) if num_update <= self.max_update: self.base_lr = self.final_lr + (self.base_lr_orig - self.final_lr) * \ (1 + cos(pi * (num_update - self.warmup_steps) / self.max_steps)) / 2 return self.base_lr
apache-2.0
mick-d/nipype_source
nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py
5
1293
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from nipype.testing import assert_equal from nipype.interfaces.freesurfer.utils import MRIMarchingCubes def test_MRIMarchingCubes_inputs(): input_map = dict(args=dict(argstr='%s', ), connectivity_value=dict(argstr='%d', position=-1, usedefault=True, ), environ=dict(nohash=True, usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), in_file=dict(argstr='%s', mandatory=True, position=1, ), label_value=dict(argstr='%d', mandatory=True, position=2, ), out_file=dict(argstr='./%s', genfile=True, position=-2, ), subjects_dir=dict(), terminal_output=dict(mandatory=True, nohash=True, ), ) inputs = MRIMarchingCubes.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_MRIMarchingCubes_outputs(): output_map = dict(surface=dict(), ) outputs = MRIMarchingCubes.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
bsd-3-clause
firebitsbr/discover
parsers/parse-nmap.py
7
4345
#!/usr/bin/env python # # by Saviour Emmanuel from xml.dom import minidom class NMAP_XMLParser(object): def __init__(self,file_path): self._xml_object = object() self._xml_path = file_path self._output_path = str() self._csv_string = str() self._open_xml() def _open_xml(self): '''Open the XML file on class construction''' self._xml_object = minidom.parse(self._xml_path) def setCSVPath(self,output_path): '''Set the path to dump the CSV file''' if not output_path.lower().endswith(".csv"): output_path = output_path + ".csv" self._output_path = output_path def _iter_hosts(self): '''Fetch the <host> tags from the XML file''' hosts_nodes = self._xml_object.getElementsByTagName("host") for host_node in hosts_nodes: yield(host_node) def _get_IP_Address(self,info): '''Fetch the IP address from the XML object''' ip_address = str() info_detail = info.getElementsByTagName("address") for address in info_detail: if(address.getAttribute("addrtype") == "ipv4"): ip_address = address.getAttribute("addr") break return(ip_address) def _get_FQDN(self,info): '''Get the FQDN aka domain/hostname''' fqdn = str() info_detail = info.getElementsByTagName("hostname") for hostname in info_detail: if(hostname.getAttribute("name")): # thanks to Kevin fqdn = hostname.getAttribute("name") # for the bug fix break return(fqdn) def _get_OS(self,info): '''Determine the OS by the greatest percentage in accuracy''' os = str() os_hash = dict() percentage = list() info_detail = info.getElementsByTagName("osmatch") for os_detail in info_detail: guessed_os = os_detail.getAttribute("name") accuracy = os_detail.getAttribute("accuracy") if(guessed_os and accuracy): os_hash[float(accuracy)] = guessed_os percentages = os_hash.keys() if(percentages): max_percent = max(percentages) os = os_hash[max_percent] return(os) def _get_iter_Port_Information(self,info): '''Fetch port and service information''' info_detail = info.getElementsByTagName("port") for port_details in info_detail: protocol = port_details.getAttribute("protocol") port_number = port_details.getAttribute("portid") port_service = port_details.getElementsByTagName("state") for port_services in port_service: port_state = port_services.getAttribute("state") if(port_state == "open"): service_info = port_details.getElementsByTagName("service") for service_details in service_info: service = service_details.getAttribute("name") product = service_details.getAttribute("product") version = service_details.getAttribute("version") yield(port_number,protocol,service,product,version) def _parse_XML_details(self): '''Initiate parsing of nmap XML file and create CSV string object''' csv_header = "IP Address,FQDN,OS,Port,Protocol,Service,Name,Version\n" csv_format = '{0},"{1}","{2}",{3},{4},"{5}","{6}","{7}"\n' self._csv_string += csv_header for info in self._iter_hosts(): ip = self._get_IP_Address(info) fqdn = self._get_FQDN(info) os = self._get_OS(info) for port,protocol,service,product,version in self._get_iter_Port_Information(info): self._csv_string += csv_format.format(ip,fqdn,os,port,protocol,service,product,version) def dumpCSV(self): '''Write CSV output file to disk''' self._parse_XML_details() csv_output = open(self._output_path,"w") csv_output.write(self._csv_string) csv_output.close() if __name__ == "__main__": nmap_xml = NMAP_XMLParser("nmap.xml") # Input file nmap_xml.setCSVPath("nmap.csv") # Output file nmap_xml.dumpCSV()
bsd-3-clause
Crach1015/plugin.video.superpack
zip/plugin.video.Robinhood2.0/youtubedl.py
255
1840
# -*- coding: utf-8 -*- import xbmc,xbmcgui try: from YDStreamExtractor import getVideoInfo from YDStreamExtractor import handleDownload except Exception: print 'importing Error. You need youtubedl module which is in official xbmc.org' xbmc.executebuiltin("XBMC.Notification(LiveStreamsPro,Please [COLOR yellow]install Youtube-dl[/COLOR] module ,10000,"")") def single_YD(url,download=False,dl_info=False,audio=False): if dl_info: handleDownload(dl_info,bg=True) return else: info = getVideoInfo(url,quality=3,resolve_redirects=True) if info is None: print 'Fail to extract' return None elif info and download : if audio: try: for s in info.streams(): print 'len(s[',len(s['ytdl_format']['formats']) for i in range(len(s['ytdl_format']['formats'])): if s['ytdl_format']['formats'][i]['format_id'] == '140': print 'm4a found' audio_url = s['ytdl_format']['formats'][i]['url'].encode('utf-8','ignore') title = s['title'].encode('utf-8','ignore') info = {'url':audio_url,'title':title,'media_type':'audio'} break except Exception: print 'audio download failed' return handleDownload(info,bg=True) else: for s in info.streams(): try: stream_url = s['xbmc_url'].encode('utf-8','ignore') print stream_url return stream_url except Exception: return None
gpl-2.0
kmoocdev2/edx-platform
lms/djangoapps/verify_student/management/commands/manual_verifications.py
13
3261
""" Django admin commands related to verify_student """ import logging import os from pprint import pformat from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from lms.djangoapps.verify_student.models import ManualVerification from lms.djangoapps.verify_student.utils import earliest_allowed_verification_date log = logging.getLogger(__name__) class Command(BaseCommand): """ This method attempts to manually verify users. Example usage: $ ./manage.py lms manual_verifications --email-ids-file <absolute path of file with email ids (one per line)> """ help = 'Manually verifies one or more users passed as an argument list.' def add_arguments(self, parser): parser.add_argument( '--email-ids-file', action='store', dest='email_ids_file', default=None, help='Path of the file to read email id from.', type=str, required=True ) def handle(self, *args, **options): email_ids_file = options['email_ids_file'] if email_ids_file: if not os.path.exists(email_ids_file): raise CommandError(u'Pass the correct absolute path to email ids file as --email-ids-file argument.') total_emails, failed_emails = self._generate_manual_verification_from_file(email_ids_file) if failed_emails: log.error(u'Completed manual verification. {} of {} failed.'.format( len(failed_emails), total_emails )) log.error('Failed emails:{}'.format(pformat(failed_emails))) else: log.info('Successfully generated manual verification for {} emails.'.format(total_emails)) def _generate_manual_verification_from_file(self, email_ids_file): """ Generate manual verification for the emails provided in the email ids file. Arguments: email_ids_file (str): path of the file containing email ids. Returns: (total_emails, failed_emails): a tuple containing count of emails processed and a list containing emails whose verifications could not be processed. """ failed_emails = [] with open(email_ids_file, 'r') as file_handler: email_ids = file_handler.readlines() total_emails = len(email_ids) log.info(u'Creating manual verification for {} emails.'.format(total_emails)) for email_id in email_ids: try: email_id = email_id.strip() user = User.objects.get(email=email_id) ManualVerification.objects.get_or_create( user=user, status='approved', created_at__gte=earliest_allowed_verification_date(), defaults={'name': user.profile.name}, ) except User.DoesNotExist: failed_emails.append(email_id) err_msg = u'Tried to verify email {}, but user not found' log.error(err_msg.format(email_id)) return total_emails, failed_emails
agpl-3.0
bansallab/roundup
_214_scrape.py
1
6944
import csv from urllib.request import Request, urlopen import dateutil.parser import re from os import system from sys import argv from bs4 import BeautifulSoup from datetime import date import scrape_util default_sale, base_url, prefix = scrape_util.get_market(argv) default_sale = default_sale[0] report_path = 'market-report.php' temp_raw = scrape_util.ReportRaw(argv, prefix) sale_pattern = [ re.compile( r'(?P<name>[^,]+),' r'(?P<city>[^\d,]+),?\s+' r'(?P<head>\d+)\s*' r'(?P<cattle>.+?)[\s_]{2,}' r'(?P<weight>[\d,\.]*)\s+' r'\$(?P<price>[\d,\.]+)\s*' r'(?P<price_type>/Hd|/Cwt)?', re.IGNORECASE ), re.compile( r'(?P<name>.+?)\s{2,}' r'(?P<city>)' r'(?P<head>\d+)\s+' r'(?P<cattle>.+?)\s{2,}' r'(?P<weight>[\d,\.]*)\s+' r'\$(?P<price>[\d,\.]+)\s*' r'(?P<price_type>/Hd|/Cwt)?', re.IGNORECASE ), re.compile( r'(?P<name>[^,]+),' r'(?P<city>.+?)\s{2,}' r'(?P<head>)' r'(?P<cattle>.+?)\s{2,}' r'(?P<weight>[\d,\.]*)\s+' r'\$(?P<price>[\d,\.]+)\s*' r'(?P<price_type>/Hd|/Cwt)?', re.IGNORECASE ), ] not_cattle_pattern = re.compile(r'goat|hog|ewe|buck|lamb|kid|sow|mare', re.IGNORECASE) head_pattern = re.compile(r'([,\d]+)\s+he?a?d', re.IGNORECASE) def get_sale_head(line): """Return the total number of head sold at the sale. If present, the number is usually at the top of the market report.""" for this_line in line: match = head_pattern.search(this_line) if match: return match.group(1).replace(',','') def get_sale_date(this_report): """Return the date of the sale.""" date_string = this_report.get_text().replace('.pdf', '') sale_date = dateutil.parser.parse(date_string, fuzzy=True).date() if sale_date > date.today(): sale_date = None return sale_date def is_sale(this_line): """Determine whether a given line describes a sale of cattle.""" is_not_succinct = len(this_line.split()) > 3 has_price = '$' in this_line return has_price and is_not_succinct def get_sale(line): """Convert the input into a dictionary, with keys matching the CSV column headers in the scrape_util module. """ for p in sale_pattern: match = p.search(line) if match: break if not_cattle_pattern.search(match.group('cattle')): return {} sale = { 'consignor_name': match.group('name'), 'consignor_city': match.group('city'), 'cattle_head': match.group('head'), 'cattle_cattle': match.group('cattle'), 'cattle_avg_weight': match.group('weight').replace(',', '').replace('.', ''), } price = match.group('price').replace(',', '') if match.group('price_type') == '/Hd': sale['cattle_price'] = price else: sale['cattle_price_cwt'] = price sale = {k: v.strip() for k, v in sale.items() if v.strip()} return sale def write_sale(line, this_default_sale, writer): """Extract sales from a list of report lines and write them to a CSV file.""" for this_line in line: if is_sale(this_line): sale = this_default_sale.copy() sale.update(get_sale(this_line)) if sale != this_default_sale: writer.writerow(sale) def main(): # Collect individual reports into a list request = Request( base_url + report_path, headers = scrape_util.url_header, ) with urlopen(request) as io: soup = BeautifulSoup(io.read(), 'lxml') content = soup.find('div', id = 'content') report = content.find_all('a') # Locate existing CSV files archive = scrape_util.ArchiveFolder(argv, prefix) # Write a CSV file for each report not in the archive for this_report in report: if 'horse' in this_report.get_text().lower(): continue sale_date = get_sale_date(this_report) io_name = archive.new_csv(sale_date) # Stop iteration if this report is already archived if not io_name: continue # Initialize the default sale dictionary this_default_sale = default_sale.copy() this_default_sale.update({ 'sale_year': sale_date.year, 'sale_month': sale_date.month, 'sale_day': sale_date.day, }) # create temporary text file from downloaded pdf pdf_url = base_url + this_report['href'] request = Request( pdf_url, headers = scrape_util.url_header, ) with urlopen(request) as io: response = io.read() with temp_raw.open('wb') as io: io.write(response) system(scrape_util.pdftotext.format(str(temp_raw))) # read sale text into line list temp_txt = temp_raw.with_suffix('.txt') with temp_txt.open('r') as io: original_line = [this_line.strip() for this_line in io.readlines() if this_line.strip()] if not original_line: temp_img = temp_raw.with_suffix('.tiff') system(scrape_util.convert.format("-density 400x400", str(temp_raw), str(temp_img))) system(scrape_util.tesseract.format("-c preserve_interword_spaces=1", str(temp_img), str(temp_txt.with_suffix('')))) with temp_txt.open('r') as io: original_line = [this_line.strip() for this_line in io.readlines() if this_line.strip()] temp_raw.clean() # # Default split index set at 120 to handle Jan 22, 2015 report with one column of sale # split_index = 120 # # Look for line with two sales and the index to split the line into two columns # for this_line in original_line: # if re.search(r'([0-9,]+\.[0-9]{2}).+?([0-9,]+\.[0-9]{2})', this_line): # match = re.search(r'(/cwt|/he?a?d?)', this_line, re.IGNORECASE) # if match: # split_index = this_line.find(match.group(1)) + len(match.group()) # break # column1 = list(this_line[0:split_index].strip() for this_line in original_line) # column2 = list(this_line[split_index+1:].strip() for this_line in original_line) # line = column1 + column2 line = list(filter(bool, original_line)) if not line: continue sale_head = get_sale_head(line) this_default_sale['sale_head'] = sale_head # Open a new CSV file and write each sale with io_name.open('w', encoding='utf-8') as io: writer = csv.DictWriter(io, scrape_util.header, lineterminator='\n') writer.writeheader() write_sale(line, this_default_sale, writer) if __name__ == '__main__': main()
mit
kwrl/trondheim.kodeklubben.no
backend/wsgi/usermanagement/forms.py
2
1601
from django import forms from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm, AuthenticationForm from django.forms import ModelForm class RegistrationForm(UserCreationForm): email = forms.EmailField(required=True) first_name = forms.CharField(required=True) last_name = forms.CharField(required=True) def __init__(self, request=None, *args, **kwargs): super(RegistrationForm, self).__init__(*args, **kwargs) self.fields['username'].label = "Brukernavn" self.fields['password1'].label = "Passord" self.fields['password2'].label = "Gjenta passord" self.fields['email'].label = "Epost" self.fields['first_name'].label = "Fornavn" self.fields['last_name'].label = "Etternavn" self.fields['username'].help_text = "" self.fields['password2'].help_text= "" class Meta: model = User fields = ("username", "email", "password1", "password2", "first_name","last_name") def save(self, commit=True): user = super(RegistrationForm, self).save(commit=False) user.email = self.cleaned_data["email"] if commit: user.save() return user class LoginForm(AuthenticationForm): def __init__(self, request=None, *args, **kwargs): super(LoginForm, self).__init__(*args, **kwargs) self.fields['username'].label = "Brukernavn" self.fields['password'].label = "Passord" class UserEditForm(ModelForm): class Meta: model = User fields = ["first_name","last_name"]
gpl-3.0
houshengbo/nova_vmware_compute_driver
nova/scheduler/manager.py
2
11613
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2010 OpenStack, LLC. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Scheduler Service """ import sys from nova.compute import rpcapi as compute_rpcapi from nova.compute import utils as compute_utils from nova.compute import vm_states import nova.context from nova import db from nova import exception from nova import manager from nova import notifications from nova.openstack.common import cfg from nova.openstack.common import excutils from nova.openstack.common import importutils from nova.openstack.common import log as logging from nova.openstack.common.notifier import api as notifier from nova import quota LOG = logging.getLogger(__name__) scheduler_driver_opt = cfg.StrOpt('scheduler_driver', default='nova.scheduler.filter_scheduler.FilterScheduler', help='Default driver to use for the scheduler') CONF = cfg.CONF CONF.register_opt(scheduler_driver_opt) QUOTAS = quota.QUOTAS class SchedulerManager(manager.Manager): """Chooses a host to run instances on.""" RPC_API_VERSION = '2.5' def __init__(self, scheduler_driver=None, *args, **kwargs): if not scheduler_driver: scheduler_driver = CONF.scheduler_driver self.driver = importutils.import_object(scheduler_driver) super(SchedulerManager, self).__init__(*args, **kwargs) def post_start_hook(self): """After we start up and can receive messages via RPC, tell all compute nodes to send us their capabilities. """ ctxt = nova.context.get_admin_context() compute_rpcapi.ComputeAPI().publish_service_capabilities(ctxt) def update_service_capabilities(self, context, service_name, host, capabilities): """Process a capability update from a service node.""" if not isinstance(capabilities, list): capabilities = [capabilities] for capability in capabilities: if capability is None: capability = {} self.driver.update_service_capabilities(service_name, host, capability) def create_volume(self, context, volume_id, snapshot_id, reservations=None, image_id=None): #function removed in RPC API 2.3 pass def live_migration(self, context, instance, dest, block_migration, disk_over_commit): try: return self.driver.schedule_live_migration( context, instance, dest, block_migration, disk_over_commit) except Exception as ex: with excutils.save_and_reraise_exception(): self._set_vm_state_and_notify('live_migration', {'vm_state': vm_states.ERROR}, context, ex, {}) def run_instance(self, context, request_spec, admin_password, injected_files, requested_networks, is_first_time, filter_properties): """Tries to call schedule_run_instance on the driver. Sets instance vm_state to ERROR on exceptions """ try: return self.driver.schedule_run_instance(context, request_spec, admin_password, injected_files, requested_networks, is_first_time, filter_properties) except exception.NoValidHost as ex: # don't re-raise self._set_vm_state_and_notify('run_instance', {'vm_state': vm_states.ERROR, 'task_state': None}, context, ex, request_spec) except Exception as ex: with excutils.save_and_reraise_exception(): self._set_vm_state_and_notify('run_instance', {'vm_state': vm_states.ERROR, 'task_state': None}, context, ex, request_spec) def prep_resize(self, context, image, request_spec, filter_properties, instance, instance_type, reservations): """Tries to call schedule_prep_resize on the driver. Sets instance vm_state to ACTIVE on NoHostFound Sets vm_state to ERROR on other exceptions """ try: kwargs = { 'context': context, 'image': image, 'request_spec': request_spec, 'filter_properties': filter_properties, 'instance': instance, 'instance_type': instance_type, 'reservations': reservations, } return self.driver.schedule_prep_resize(**kwargs) except exception.NoValidHost as ex: self._set_vm_state_and_notify('prep_resize', {'vm_state': vm_states.ACTIVE, 'task_state': None}, context, ex, request_spec) if reservations: QUOTAS.rollback(context, reservations) except Exception as ex: with excutils.save_and_reraise_exception(): self._set_vm_state_and_notify('prep_resize', {'vm_state': vm_states.ERROR, 'task_state': None}, context, ex, request_spec) if reservations: QUOTAS.rollback(context, reservations) def _set_vm_state_and_notify(self, method, updates, context, ex, request_spec): """changes VM state and notifies""" # FIXME(comstud): Re-factor this somehow. Not sure this belongs in the # scheduler manager like this. We should make this easier. # run_instance only sends a request_spec, and an instance may or may # not have been created in the API (or scheduler) already. If it was # created, there's a 'uuid' set in the instance_properties of the # request_spec. # (littleidea): I refactored this a bit, and I agree # it should be easier :) # The refactoring could go further but trying to minimize changes # for essex timeframe LOG.warning(_("Failed to schedule_%(method)s: %(ex)s") % locals()) vm_state = updates['vm_state'] properties = request_spec.get('instance_properties', {}) # NOTE(vish): We shouldn't get here unless we have a catastrophic # failure, so just set all instances to error. if uuid # is not set, instance_uuids will be set to [None], this # is solely to preserve existing behavior and can # be removed along with the 'if instance_uuid:' if we can # verify that uuid is always set. uuids = [properties.get('uuid')] for instance_uuid in request_spec.get('instance_uuids') or uuids: if instance_uuid: compute_utils.add_instance_fault_from_exc(context, instance_uuid, ex, sys.exc_info()) state = vm_state.upper() LOG.warning(_('Setting instance to %(state)s state.'), locals(), instance_uuid=instance_uuid) # update instance state and notify on the transition (old_ref, new_ref) = db.instance_update_and_get_original( context, instance_uuid, updates) notifications.send_update(context, old_ref, new_ref, service="scheduler") payload = dict(request_spec=request_spec, instance_properties=properties, instance_id=instance_uuid, state=vm_state, method=method, reason=ex) notifier.notify(context, notifier.publisher_id("scheduler"), 'scheduler.' + method, notifier.ERROR, payload) # NOTE (masumotok) : This method should be moved to nova.api.ec2.admin. # Based on bexar design summit discussion, # just put this here for bexar release. def show_host_resources(self, context, host): """Shows the physical/usage resource given by hosts. :param context: security context :param host: hostname :returns: example format is below:: {'resource':D, 'usage':{proj_id1:D, proj_id2:D}} D: {'vcpus': 3, 'memory_mb': 2048, 'local_gb': 2048, 'vcpus_used': 12, 'memory_mb_used': 10240, 'local_gb_used': 64} """ # Getting compute node info and related instances info compute_ref = db.service_get_all_compute_by_host(context, host) compute_ref = compute_ref[0] instance_refs = db.instance_get_all_by_host(context, compute_ref['host']) # Getting total available/used resource compute_ref = compute_ref['compute_node'][0] resource = {'vcpus': compute_ref['vcpus'], 'memory_mb': compute_ref['memory_mb'], 'local_gb': compute_ref['local_gb'], 'vcpus_used': compute_ref['vcpus_used'], 'memory_mb_used': compute_ref['memory_mb_used'], 'local_gb_used': compute_ref['local_gb_used']} usage = dict() if not instance_refs: return {'resource': resource, 'usage': usage} # Getting usage resource per project project_ids = [i['project_id'] for i in instance_refs] project_ids = list(set(project_ids)) for project_id in project_ids: vcpus = [i['vcpus'] for i in instance_refs if i['project_id'] == project_id] mem = [i['memory_mb'] for i in instance_refs if i['project_id'] == project_id] root = [i['root_gb'] for i in instance_refs if i['project_id'] == project_id] ephemeral = [i['ephemeral_gb'] for i in instance_refs if i['project_id'] == project_id] usage[project_id] = {'vcpus': sum(vcpus), 'memory_mb': sum(mem), 'root_gb': sum(root), 'ephemeral_gb': sum(ephemeral)} return {'resource': resource, 'usage': usage} @manager.periodic_task def _expire_reservations(self, context): QUOTAS.expire(context) def get_backdoor_port(self, context): return self.backdoor_port
apache-2.0
JavaRabbit/CS496_capstone
language/api/analyze.py
8
2746
#!/usr/bin/env python # Copyright 2016 Google, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Analyzes text using the Google Cloud Natural Language API.""" import argparse import json import sys import googleapiclient.discovery def get_native_encoding_type(): """Returns the encoding type that matches Python's native strings.""" if sys.maxunicode == 65535: return 'UTF16' else: return 'UTF32' def analyze_entities(text, encoding='UTF32'): body = { 'document': { 'type': 'PLAIN_TEXT', 'content': text, }, 'encoding_type': encoding, } service = googleapiclient.discovery.build('language', 'v1') request = service.documents().analyzeEntities(body=body) response = request.execute() return response def analyze_sentiment(text, encoding='UTF32'): body = { 'document': { 'type': 'PLAIN_TEXT', 'content': text, }, 'encoding_type': encoding } service = googleapiclient.discovery.build('language', 'v1') request = service.documents().analyzeSentiment(body=body) response = request.execute() return response def analyze_syntax(text, encoding='UTF32'): body = { 'document': { 'type': 'PLAIN_TEXT', 'content': text, }, 'encoding_type': encoding } service = googleapiclient.discovery.build('language', 'v1') request = service.documents().analyzeSyntax(body=body) response = request.execute() return response if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('command', choices=[ 'entities', 'sentiment', 'syntax']) parser.add_argument('text') args = parser.parse_args() if args.command == 'entities': result = analyze_entities(args.text, get_native_encoding_type()) elif args.command == 'sentiment': result = analyze_sentiment(args.text, get_native_encoding_type()) elif args.command == 'syntax': result = analyze_syntax(args.text, get_native_encoding_type()) print(json.dumps(result, indent=2))
apache-2.0
cedi4155476/QGIS
python/ext-libs/pygments/formatters/svg.py
362
5867
# -*- coding: utf-8 -*- """ pygments.formatters.svg ~~~~~~~~~~~~~~~~~~~~~~~ Formatter for SVG output. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.formatter import Formatter from pygments.util import get_bool_opt, get_int_opt __all__ = ['SvgFormatter'] def escape_html(text): """Escape &, <, > as well as single and double quotes for HTML.""" return text.replace('&', '&amp;'). \ replace('<', '&lt;'). \ replace('>', '&gt;'). \ replace('"', '&quot;'). \ replace("'", '&#39;') class2style = {} class SvgFormatter(Formatter): """ Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles. By default, this formatter outputs a full SVG document including doctype declaration and the ``<svg>`` root element. *New in Pygments 0.9.* Additional options accepted: `nowrap` Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and don't add a XML declaration and a doctype. If true, the `fontfamily` and `fontsize` options are ignored. Defaults to ``False``. `fontfamily` The value to give the wrapping ``<g>`` element's ``font-family`` attribute, defaults to ``"monospace"``. `fontsize` The value to give the wrapping ``<g>`` element's ``font-size`` attribute, defaults to ``"14px"``. `xoffset` Starting offset in X direction, defaults to ``0``. `yoffset` Starting offset in Y direction, defaults to the font size if it is given in pixels, or ``20`` else. (This is necessary since text coordinates refer to the text baseline, not the top edge.) `ystep` Offset to add to the Y coordinate for each subsequent line. This should roughly be the text size plus 5. It defaults to that value if the text size is given in pixels, or ``25`` else. `spacehack` Convert spaces in the source to ``&#160;``, which are non-breaking spaces. SVG provides the ``xml:space`` attribute to control how whitespace inside tags is handled, in theory, the ``preserve`` value could be used to keep all whitespace as-is. However, many current SVG viewers don't obey that rule, so this option is provided as a workaround and defaults to ``True``. """ name = 'SVG' aliases = ['svg'] filenames = ['*.svg'] def __init__(self, **options): # XXX outencoding Formatter.__init__(self, **options) self.nowrap = get_bool_opt(options, 'nowrap', False) self.fontfamily = options.get('fontfamily', 'monospace') self.fontsize = options.get('fontsize', '14px') self.xoffset = get_int_opt(options, 'xoffset', 0) fs = self.fontsize.strip() if fs.endswith('px'): fs = fs[:-2].strip() try: int_fs = int(fs) except: int_fs = 20 self.yoffset = get_int_opt(options, 'yoffset', int_fs) self.ystep = get_int_opt(options, 'ystep', int_fs + 5) self.spacehack = get_bool_opt(options, 'spacehack', True) self._stylecache = {} def format_unencoded(self, tokensource, outfile): """ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)`` tuples and write it into ``outfile``. For our implementation we put all lines in their own 'line group'. """ x = self.xoffset y = self.yoffset if not self.nowrap: if self.encoding: outfile.write('<?xml version="1.0" encoding="%s"?>\n' % self.encoding) else: outfile.write('<?xml version="1.0"?>\n') outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" ' '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/' 'svg10.dtd">\n') outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n') outfile.write('<g font-family="%s" font-size="%s">\n' % (self.fontfamily, self.fontsize)) outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (x, y)) for ttype, value in tokensource: style = self._get_style(ttype) tspan = style and '<tspan' + style + '>' or '' tspanend = tspan and '</tspan>' or '' value = escape_html(value) if self.spacehack: value = value.expandtabs().replace(' ', '&#160;') parts = value.split('\n') for part in parts[:-1]: outfile.write(tspan + part + tspanend) y += self.ystep outfile.write('</text>\n<text x="%s" y="%s" ' 'xml:space="preserve">' % (x, y)) outfile.write(tspan + parts[-1] + tspanend) outfile.write('</text>') if not self.nowrap: outfile.write('</g></svg>\n') def _get_style(self, tokentype): if tokentype in self._stylecache: return self._stylecache[tokentype] otokentype = tokentype while not self.style.styles_token(tokentype): tokentype = tokentype.parent value = self.style.style_for_token(tokentype) result = '' if value['color']: result = ' fill="#' + value['color'] + '"' if value['bold']: result += ' font-weight="bold"' if value['italic']: result += ' font-style="italic"' self._stylecache[otokentype] = result return result
gpl-2.0
bbc/kamaelia
Sketches/AB/backup/Bookmarks/LiveAnalysis.py
3
37390
#! /usr/bin/python ''' Analyses saved data in DB to give something more useful. Saves to output DB ready for display in web interface Any looking at natural language engines / subtitles should be done here or in components following this Need to ensure one rogue user can't cause a trend - things must be mentioned by several ''' # Having added this as a component, the printed output is a bit confusing, so 'Analysis component: ' has been added to everything. from datetime import datetime from datetime import timedelta import math import re import time from Axon.Component import component from Axon.Ipc import producerFinished from Axon.Ipc import shutdownMicroprocess from Axon.ThreadedComponent import threadedcomponent import MySQLdb import cjson import nltk from nltk import FreqDist class LiveAnalysis(threadedcomponent): Inboxes = { "inbox" : "Unused", "nltk" : "Receives data back from the NLTK component", "nltkfinal" : "Receives data back from the final NLTK analysis component", "control" : "" } Outboxes = { "outbox" : "Unused", "nltk" : "Sends data out to the NLTK component", "nltkfinal" : "Sends data out to the final NLTK analysis component", "signal" : "" } def __init__(self, dbuser, dbpass): super(LiveAnalysis, self).__init__() self.dbuser = dbuser self.dbpass = dbpass # List of 'common' words so they can be labelled as such when the data is stored self.exclusions = ["a","able","about","across","after","all","almost","also","am",\ "among","an","and","any","are","as","at","be","because","been","but",\ "by","can","cannot","could","dear","did","do","does","either","else",\ "ever","every","for","from","get","got","had","has","have","he","her",\ "hers","him","his","how","however","i","if","in","into","is","it",\ "its","just","least","let","like","likely","may","me","might","most",\ "must","my","neither","no","nor","not","of","off","often","on","only",\ "or","other","our","own","rather","said","say","says","she","should",\ "since","so","some","than","that","the","their","them","then","there",\ "these","they","this","tis","to","too","twas","up","us","wants","was","we",\ "were","what","when","where","which","while","who","whom","why","will",\ "with","would","yet","you","your","via","rt"] def dbConnect(self,dbuser,dbpass): db = MySQLdb.connect(user=dbuser,passwd=dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def main(self): # Calculate running total and mean etc cursor = self.dbConnect(self.dbuser,self.dbpass) while not self.finished(): # The below does LIVE and FINAL analysis - do NOT run DataAnalyser at the same time print "Analysis component: Checking for new data..." # Stage 1: Live analysis - could do with a better way to do the first query (indexed field 'analsed' to speed up for now) # Could move this into the main app to take a copy of tweets on arrival, but would rather solve separately if poss cursor.execute("""SELECT tid,pid,timestamp,text,tweet_id,programme_position FROM rawdata WHERE analysed = 0 ORDER BY tid LIMIT 5000""") data = cursor.fetchall() # Cycle through all the as yet unanalysed tweets for result in data: tid = result[0] pid = result[1] tweettime = result[2] # Timestamp based on the tweet's created_at field tweettext = result[3] tweetid = result[4] # This is the real tweet ID, tid just makes a unique identifier as each tweet can be stored against several pids progpos = result[5] # Position through the programme that the tweet was made dbtime = datetime.utcfromtimestamp(tweettime) # Each tweet will be grouped into chunks of one minute to make display better, so set the seconds to zero # This particular time is only used for console display now as a more accurate one calculated from programme position is found later dbtime = dbtime.replace(second=0) print "Analysis component: Analysing new tweet for pid", pid, "(" + str(dbtime) + "):" print "Analysis component: '" + tweettext + "'" cursor.execute("""SELECT duration FROM programmes_unique WHERE pid = %s""",(pid)) progdata = cursor.fetchone() duration = progdata[0] cursor.execute("""SELECT totaltweets,meantweets,mediantweets,modetweets,stdevtweets,timediff,timestamp,utcoffset FROM programmes WHERE pid = %s ORDER BY timestamp DESC""",(pid)) progdata2 = cursor.fetchone() totaltweets = progdata2[0] # Increment the total tweets recorded for this programme's broadcast totaltweets += 1 meantweets = progdata2[1] mediantweets = progdata2[2] modetweets = progdata2[3] stdevtweets = progdata2[4] timediff = progdata2[5] timestamp = progdata2[6] utcoffset = progdata2[7] # Need to work out the timestamp to assign to the entry in analysed data progstart = timestamp - timediff progmins = int(progpos / 60) analysedstamp = int(progstart + (progmins * 60)) # Ensure that this tweet occurs within the length of the programme, otherwise for the purposes of this program it's useless if progpos > 0 and progpos <= duration: cursor.execute("""SELECT did,totaltweets,wordfreqexpected,wordfrequnexpected FROM analyseddata WHERE pid = %s AND timestamp = %s""",(pid,analysedstamp)) analyseddata = cursor.fetchone() # Just in case of a missing raw json object (ie. programme terminated before it was stored - allow it to be skipped if not found after 30 secs) failcounter = 0 # Pass this tweet to the NLTK analysis component self.send([pid,tweetid],"nltk") while not self.dataReady("nltk"): # if failcounter >= 3000: # nltkdata = list() # break time.sleep(0.01) # failcounter += 1 #if failcounter < 3000: # Receive back a list of words and their frequency for this tweet, including whether or not they are common, an entity etc if 1: nltkdata = self.recv("nltk") if analyseddata == None: # No tweets yet recorded for this minute minutetweets = 1 cursor.execute("""INSERT INTO analyseddata (pid,totaltweets,timestamp) VALUES (%s,%s,%s)""", (pid,minutetweets,analysedstamp)) for word in nltkdata: # Check if we're storing a word or phrase here if nltkdata[word][0] == 1: cursor.execute("""INSERT INTO wordanalysis (pid,timestamp,phrase,count,is_keyword,is_entity,is_common) VALUES (%s,%s,%s,%s,%s,%s,%s)""", (pid,analysedstamp,word,nltkdata[word][1],nltkdata[word][2],nltkdata[word][3],nltkdata[word][4])) else: cursor.execute("""INSERT INTO wordanalysis (pid,timestamp,word,count,is_keyword,is_entity,is_common) VALUES (%s,%s,%s,%s,%s,%s,%s)""", (pid,analysedstamp,word,nltkdata[word][1],nltkdata[word][2],nltkdata[word][3],nltkdata[word][4])) else: did = analyseddata[0] minutetweets = analyseddata[1] # Get current number of tweets for this minute minutetweets += 1 # Add one to it for this tweet cursor.execute("""UPDATE analyseddata SET totaltweets = %s WHERE did = %s""",(minutetweets,did)) for word in nltkdata: # Check if we're storing a word or phrase if nltkdata[word][0] == 1: cursor.execute("""SELECT wid,count FROM wordanalysis WHERE pid = %s AND timestamp = %s AND phrase LIKE %s""",(pid,analysedstamp,word)) # Check if this phrase has already been stored for this minute - if so, increment the count wordcheck = cursor.fetchone() if wordcheck == None: cursor.execute("""INSERT INTO wordanalysis (pid,timestamp,phrase,count,is_keyword,is_entity,is_common) VALUES (%s,%s,%s,%s,%s,%s,%s)""", (pid,analysedstamp,word,nltkdata[word][1],nltkdata[word][2],nltkdata[word][3],nltkdata[word][4])) else: cursor.execute("""UPDATE wordanalysis SET count = %s WHERE wid = %s""",(nltkdata[word][1] + wordcheck[1],wordcheck[0])) else: cursor.execute("""SELECT wid,count FROM wordanalysis WHERE pid = %s AND timestamp = %s AND word LIKE %s""",(pid,analysedstamp,word)) # Check if this word has already been stored for this minute - if so, increment the count wordcheck = cursor.fetchone() if wordcheck == None: cursor.execute("""INSERT INTO wordanalysis (pid,timestamp,word,count,is_keyword,is_entity,is_common) VALUES (%s,%s,%s,%s,%s,%s,%s)""", (pid,analysedstamp,word,nltkdata[word][1],nltkdata[word][2],nltkdata[word][3],nltkdata[word][4])) else: cursor.execute("""UPDATE wordanalysis SET count = %s WHERE wid = %s""",(nltkdata[word][1] + wordcheck[1],wordcheck[0])) # Averages / stdev are calculated roughly based on the programme's running time at this point progdate = datetime.utcfromtimestamp(timestamp) + timedelta(seconds=utcoffset) actualstart = progdate - timedelta(seconds=timediff) actualtweettime = datetime.utcfromtimestamp(tweettime + utcoffset) # Calculate how far through the programme this tweet occurred runningtime = actualtweettime - actualstart runningtime = runningtime.seconds if runningtime < 0: runningtime = 0 else: runningtime = float(runningtime) / 60 try: meantweets = totaltweets / runningtime except ZeroDivisionError, e: meantweets = 0 cursor.execute("""SELECT totaltweets FROM analyseddata WHERE pid = %s AND timestamp >= %s AND timestamp < %s""",(pid,progstart,analysedstamp+duration)) analyseddata = cursor.fetchall() runningtime = int(runningtime) tweetlist = list() for result in analyseddata: totaltweetsmin = result[0] # Create a list of each minute and the total tweets for that minute in the programme tweetlist.append(int(totaltweetsmin)) # Ensure tweetlist has enough entries # If a minute has no tweets, it won't have a database record, so this has to be added if len(tweetlist) < runningtime: additions = runningtime - len(tweetlist) while additions > 0: tweetlist.append(0) additions -= 1 # Order by programme position 0,1,2, mins etc tweetlist.sort() mediantweets = tweetlist[int(len(tweetlist)/2)] modes = dict() stdevlist = list() for tweet in tweetlist: modes[tweet] = tweetlist.count(tweet) stdevlist.append((tweet - meantweets)*(tweet - meantweets)) modeitems = [[v, k] for k, v in modes.items()] modeitems.sort(reverse=True) modetweets = int(modeitems[0][1]) stdevtweets = 0 for val in stdevlist: stdevtweets += val try: stdevtweets = math.sqrt(stdevtweets / runningtime) except ZeroDivisionError, e: stdevtweets = 0 # Finished analysis - update DB cursor.execute("""UPDATE programmes SET totaltweets = %s, meantweets = %s, mediantweets = %s, modetweets = %s, stdevtweets = %s WHERE pid = %s AND timestamp = %s""",(totaltweets,meantweets,mediantweets,modetweets,stdevtweets,pid,timestamp)) else: print "Analysis component: Skipping tweet - falls outside the programme's running time" # Mark the tweet as analysed cursor.execute("""UPDATE rawdata SET analysed = 1 WHERE tid = %s""",(tid)) print "Analysis component: Done!" # Stage 2: If all raw tweets analysed and imported = 1 (all data for this programme stored and programme finished), finalise the analysis - could do bookmark identification here too? cursor.execute("""SELECT pid,totaltweets,meantweets,mediantweets,modetweets,stdevtweets,timestamp,timediff FROM programmes WHERE imported = 1 AND analysed = 0 LIMIT 5000""") data = cursor.fetchall() # Cycle through each programme that's ready for final analysis for result in data: pid = result[0] cursor.execute("""SELECT duration,title FROM programmes_unique WHERE pid = %s""",(pid)) data2 = cursor.fetchone() duration = data2[0] totaltweets = result[1] meantweets = result[2] mediantweets = result[3] modetweets = result[4] stdevtweets = result[5] title = data2[1] timestamp = result[6] timediff = result[7] # Cycle through checking if all tweets for this programme have been analysed - if so finalise the stats cursor.execute("""SELECT tid FROM rawdata WHERE analysed = 0 AND pid = %s""", (pid)) if cursor.fetchone() == None: # OK to finalise stats here print "Analysis component: Finalising stats for pid:", pid, "(" + title + ")" meantweets = float(totaltweets) / (duration / 60) # Mean tweets per minute cursor.execute("""SELECT totaltweets FROM analyseddata WHERE pid = %s AND timestamp >= %s AND timestamp < %s""",(pid,timestamp-timediff,timestamp+duration-timediff)) analyseddata = cursor.fetchall() runningtime = duration / 60 tweetlist = list() for result in analyseddata: totaltweetsmin = result[0] tweetlist.append(int(totaltweetsmin)) # Ensure tweetlist has enough entries - as above, if no tweets are recorded for a minute it won't be present in the DB if len(tweetlist) < runningtime: additions = runningtime - len(tweetlist) while additions > 0: tweetlist.append(0) additions -= 1 tweetlist.sort() mediantweets = tweetlist[int(len(tweetlist)/2)] modes = dict() stdevlist = list() for tweet in tweetlist: modes[tweet] = tweetlist.count(tweet) stdevlist.append((tweet - meantweets)*(tweet - meantweets)) modeitems = [[v, k] for k, v in modes.items()] modeitems.sort(reverse=True) modetweets = int(modeitems[0][1]) stdevtweets = 0 for val in stdevlist: stdevtweets += val try: stdevtweets = math.sqrt(stdevtweets / runningtime) except ZeroDivisionError, e: stdevtweets = 0 if 1: # This data is purely a readout to the terminal at the moment associated with word and phrase frequency, and retweets sqltimestamp1 = timestamp - timediff sqltimestamp2 = timestamp + duration - timediff cursor.execute("""SELECT tweet_id FROM rawdata WHERE pid = %s AND timestamp >= %s AND timestamp < %s""", (pid,sqltimestamp1,sqltimestamp2)) rawtweetids = cursor.fetchall() tweetids = list() for tweet in rawtweetids: tweetids.append(tweet[0]) if len(tweetids) > 0: # Just in case of a missing raw json object (ie. programme terminated before it was stored - allow it to be skipped if not found after 30 secs) failcounter = 0 self.send([pid,tweetids],"nltkfinal") while not self.dataReady("nltkfinal"): # if failcounter >= 3000: # nltkdata = list() # break time.sleep(0.01) # failcounter += 1 #if failcounter < 3000: if 1: nltkdata = self.recv("nltkfinal") cursor.execute("""UPDATE programmes SET meantweets = %s, mediantweets = %s, modetweets = %s, stdevtweets = %s, analysed = 1 WHERE pid = %s AND timestamp = %s""",(meantweets,mediantweets,modetweets,stdevtweets,pid,timestamp)) print "Analysis component: Done!" # Sleep here until more data is available to analyse print "Analysis component: Sleeping for 10 seconds..." time.sleep(10) class LiveAnalysisNLTK(component): Inboxes = { "inbox" : "Receives a tweet ID and its related PID for NLTK analysis [pid,tweetid]", "tweetfixer" : "Received data back from the tweet fixing components (tweet json)", "control" : "" } Outboxes = { "outbox" : "Sends out analysed words/phrases in the format {'word/phrase' : [is_phrase,count,is_keyword,is_entity,is_common]}", "tweetfixer" : "Sends out data to the tweet fixing components (tweet json)", "signal" : "" } def __init__(self, dbuser, dbpass): super(LiveAnalysisNLTK, self).__init__() self.dbuser = dbuser self.dbpass = dbpass self.exclusions = ["a","able","about","across","after","all","almost","also","am",\ "among","an","and","any","are","as","at","be","because","been","but",\ "by","can","cannot","could","dear","did","do","does","either","else",\ "ever","every","for","from","get","got","had","has","have","he","her",\ "hers","him","his","how","however","i","if","in","into","is","it",\ "its","just","least","let","like","likely","may","me","might","most",\ "must","my","neither","no","nor","not","of","off","often","on","only",\ "or","other","our","own","rather","said","say","says","she","should",\ "since","so","some","than","that","the","their","them","then","there",\ "these","they","this","tis","to","too","twas","up","us","wants","was","we",\ "were","what","when","where","which","while","who","whom","why","will",\ "with","would","yet","you","your","via","rt"] def dbConnect(self,dbuser,dbpass): db = MySQLdb.connect(user=dbuser,passwd=dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def spellingFixer(self,text): # This function attempts to normalise some common Twitter mis-spellings and accentuations # Fix ahahahahahaha and hahahahaha # Doesn't catch bahahahaha TODO # Also seem to be missing HAHAHAHA - case issue? TODO # Some sort of issue with Nooooooo too - it's just getting erased? #TODO text = re.sub("\S{0,}(ha){2,}\S{0,}","haha",text,re.I) # fix looooooool and haaaaaaaaaaa - fails for some words at the mo, for example welllll will be converted to wel, and hmmm to hm etc # Perhaps we could define both 'lol' and 'lool' as words, then avoid the above problem by reducing repeats to a max of 2 x = re.findall(r'((\D)\2*)',text,re.I) for entry in sorted(x,reverse=True): if len(entry[0])>2: text = text.replace(entry[0],entry[1]).strip() if len(text) == 1: text += text return text def main(self): cursor = self.dbConnect(self.dbuser,self.dbpass) while not self.finished(): if self.dataReady("inbox"): data = self.recv("inbox") pid = data[0] tweetid = data[1] # There is a possibility at this point that the tweet won't yet be in the DB. # We'll have to stall for now if that happens but eventually it should be ensured tweets will be in the DB first # Issue #TODO - Words that appear as part of a keyword but not the whole thing won't get marked as being a keyword (e.g. Blue Peter - two diff words) # Need to check for each word if it forms part of a phrase which is also a keyword # If so, don't count is as a word, count the whole thing as a phrase and remember not to count it more than once # May actually store phrases AS WELL AS keywords tweetdata = None while tweetdata == None: # Retrieve the tweet json corresponding to the ID receieved cursor.execute("""SELECT tweet_json FROM rawtweets WHERE tweet_id = %s""",(tweetid)) tweetdata = cursor.fetchone() if tweetdata == None: self.pause() yield 1 tweetjson = cjson.decode(tweetdata[0]) keywords = dict() # Find the keywords relating to the PID received cursor.execute("""SELECT keyword,type FROM keywords WHERE pid = %s""",(pid)) keyworddata = cursor.fetchall() for word in keyworddata: wordname = word[0].lower() keywords[wordname] = word[1] # Send the tweet off to have retweets fixed, links analysed etc self.send(tweetjson,"tweetfixer") while not self.dataReady("tweetfixer"): self.pause() yield 1 tweetjson = self.recv("tweetfixer") # Format: {"word" : [is_phrase,count,is_keyword,is_entity,is_common]} wordfreqdata = dict() for item in tweetjson['entities']['user_mentions']: if wordfreqdata.has_key("@" + item['screen_name']): wordfreqdata["@" + item['screen_name']][1] += 1 else: if item['screen_name'].lower() in keywords or "@" + item['screen_name'].lower() in keywords: wordfreqdata["@" + item['screen_name']] = [0,1,1,1,0] else: wordfreqdata["@" + item['screen_name']] = [0,1,0,1,0] for item in tweetjson['entities']['urls']: if wordfreqdata.has_key(item['url']): wordfreqdata[item['url']][1] += 1 else: wordfreqdata[item['url']] = [0,1,0,1,0] for item in tweetjson['entities']['hashtags']: if wordfreqdata.has_key("#" + item['text']): wordfreqdata["#" + item['text']][1] += 1 else: if item['text'].lower() in keywords or "#" + item['text'].lower() in keywords: wordfreqdata["#" + item['text']] = [0,1,1,1,0] else: wordfreqdata["#" + item['text']] = [0,1,0,1,0] tweettext = self.spellingFixer(tweetjson['filtered_text']).split() for word in tweettext: if word[0] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[1:] if word != "": # Done twice to capture things like 'this is a "quote".' if len(word) >= 2: if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and word[len(word)-2:len(word)] != "s'" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and word[len(word)-2:len(word)] != "s'" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] elif word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word != "": if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word != "": if word in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""": word = "" if word != "": if wordfreqdata.has_key(word): wordfreqdata[word][1] += 1 else: if word.lower() in self.exclusions: exclude = 1 else: exclude = 0 for row in keywords: if word.lower() in row: wordfreqdata[word] = [0,1,1,0,exclude] break else: wordfreqdata[word] = [0,1,0,0,exclude] self.send(wordfreqdata,"outbox") self.pause() yield 1 class FinalAnalysisNLTK(component): Inboxes = { "inbox" : "Receives a list of tweet IDs and their related PID for NLTK analysis [pid,[tweetid,tweetid]]", "tweetfixer" : "Received data back from the tweet fixing components (tweet json)", "control" : "" } Outboxes = { "outbox" : "urrently sends nothing out, just prints to screen - needs work", #TODO "tweetfixer" : "Sends out data to the tweet fixing components (tweet json)", "signal" : "" } def __init__(self, dbuser, dbpass): super(FinalAnalysisNLTK, self).__init__() self.dbuser = dbuser self.dbpass = dbpass self.exclusions = ["a","able","about","across","after","all","almost","also","am",\ "among","an","and","any","are","as","at","be","because","been","but",\ "by","can","cannot","could","dear","did","do","does","either","else",\ "ever","every","for","from","get","got","had","has","have","he","her",\ "hers","him","his","how","however","i","if","in","into","is","it",\ "its","just","least","let","like","likely","may","me","might","most",\ "must","my","neither","no","nor","not","of","off","often","on","only",\ "or","other","our","own","rather","said","say","says","she","should",\ "since","so","some","than","that","the","their","them","then","there",\ "these","they","this","tis","to","too","twas","up","us","wants","was","we",\ "were","what","when","where","which","while","who","whom","why","will",\ "with","would","yet","you","your","via","rt"] def dbConnect(self,dbuser,dbpass): db = MySQLdb.connect(user=dbuser,passwd=dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def spellingFixer(self,text): # Fix ahahahahahaha and hahahahaha text = re.sub("\S{0,}(ha){2,}\S{0,}","haha",text,re.I) # E-mail filter text = re.sub("\S{1,}@\S{1,}.\S{1,}","",text,re.I) # fix looooooool and haaaaaaaaaaa - fails for some words at the mo, for example welllll will be converted to wel, and hmmm to hm etc # Perhaps we could define both 'lol' and 'lool' as words, then avoid the above problem by reducing repeats to a max of 2 x = re.findall(r'((\D)\2*)',text,re.I) for entry in sorted(x,reverse=True): if len(entry[0])>2: text = text.replace(entry[0],entry[1]).strip() if len(text) == 1: text += text return text def main(self): # Calculate running total and mean etc cursor = self.dbConnect(self.dbuser,self.dbpass) while not self.finished(): if self.dataReady("inbox"): data = self.recv("inbox") pid = data[0] tweetids = data[1] retweetcache = dict() # Issue #TODO - Words that appear as part of a keyword but not the whole thing won't get marked as being a keyword (e.g. Blue Peter - two diff words) # Need to check for each word if it forms part of a phrase which is also a keyword # If so, don't count is as a word, count the whole thing as a phrase and remember not to count it more than once # May actually store phrases AS WELL AS keywords keywords = dict() # Find keywords for this PID cursor.execute("""SELECT keyword,type FROM keywords WHERE pid = %s""",(pid)) keyworddata = cursor.fetchall() for word in keyworddata: wordname = word[0].lower() if "^" in wordname: wordbits = wordname.split("^") wordname = wordbits[0] wordbits = wordname.split() # Only looking at phrases here (more than one word) if len(wordbits) > 1: keywords[wordname] = word[1] filteredtext = list() for tweetid in tweetids: # Cycle through each tweet and find its JSON tweetdata = None while tweetdata == None: cursor.execute("""SELECT tweet_json FROM rawtweets WHERE tweet_id = %s""",(tweetid)) tweetdata = cursor.fetchone() if tweetdata != None: tweetjson = cjson.decode(tweetdata[0]) self.send(tweetjson,"tweetfixer") while not self.dataReady("tweetfixer"): self.pause() yield 1 tweetjson = self.recv("tweetfixer") # Identify retweets if tweetjson.has_key('retweeted_status'): if tweetjson['retweeted_status'].has_key('id'): statusid = tweetjson['retweeted_status']['id'] if retweetcache.has_key(statusid): retweetcache[statusid][0] += 1 else: retweetcache[statusid] = [1,tweetjson['retweeted_status']['text']] tweettext = self.spellingFixer(tweetjson['filtered_text']).split() for word in tweettext: if word[0] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[1:] if word != "": # Done twice to capture things like 'this is a "quote".' if len(word) >= 2: if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and word[len(word)-2:len(word)] != "s'" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and word[len(word)-2:len(word)] != "s'" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] elif word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word != "": if word[len(word)-1] in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and not (len(word) <= 3 and (word[0] == ":" or word[0] == ";")): word = word[:len(word)-1] if word != "": if word in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""": word = "" if word != "": filteredtext.append(word) # Format: {"word" : [is_phrase,count,is_keyword,is_entity,is_common]} # Need to change this for retweets as they should include all the text content if truncated - need some clever merging FIXME TODO wordfreqdata = dict() # Look for phrases - very limited bigram_fd = FreqDist(nltk.bigrams(filteredtext)) print bigram_fd for entry in bigram_fd: if entry[0] not in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""" and entry[1] not in """!"#$%&()*+,-./:;<=>?@~[\\]?_'`{|}?""": if entry[0] not in self.exclusions and entry[1] not in self.exclusions: for word in keywords: print word if entry[0] in word and entry[1] in word: print "Keyword Match! " + str([entry[0],entry[1]]) break else: print [entry[0],entry[1]] print "Retweet data: " + str(retweetcache) self.send(None,"outbox") self.pause() yield 1
apache-2.0
Alecto3-D/testable-greeter
bb-master/sandbox/lib/python3.5/site-packages/buildbot_worker/commands/transfer.py
3
12816
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function import os import tarfile import tempfile from twisted.internet import defer from twisted.python import log from buildbot_worker.commands.base import Command class TransferCommand(Command): def finished(self, res): if self.debug: log.msg('finished: stderr=%r, rc=%r' % (self.stderr, self.rc)) # don't use self.sendStatus here, since we may no longer be running # if we have been interrupted upd = {'rc': self.rc} if self.stderr: upd['stderr'] = self.stderr self.builder.sendUpdate(upd) return res def interrupt(self): if self.debug: log.msg('interrupted') if self.interrupted: return self.rc = 1 self.interrupted = True # now we wait for the next trip around the loop. It abandon the file # when it sees self.interrupted set. class WorkerFileUploadCommand(TransferCommand): """ Upload a file from worker to build master Arguments: - ['workdir']: base directory to use - ['workersrc']: name of the worker-side file to read from - ['writer']: RemoteReference to a buildbot_worker.protocols.base.FileWriterProxy object - ['maxsize']: max size (in bytes) of file to write - ['blocksize']: max size for each data block - ['keepstamp']: whether to preserve file modified and accessed times """ debug = False requiredArgs = ['workdir', 'workersrc', 'writer', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.filename = args['workersrc'] self.writer = args['writer'] self.remaining = args['maxsize'] self.blocksize = args['blocksize'] self.keepstamp = args.get('keepstamp', False) self.stderr = None self.rc = 0 self.fp = None def start(self): if self.debug: log.msg('WorkerFileUploadCommand started') # Open file self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.filename)) accessed_modified = None try: if self.keepstamp: accessed_modified = (os.path.getatime(self.path), os.path.getmtime(self.path)) self.fp = open(self.path, 'rb') if self.debug: log.msg("Opened '%s' for upload" % self.path) except Exception: self.fp = None self.stderr = "Cannot open file '%s' for upload" % self.path self.rc = 1 if self.debug: log.msg("Cannot open file '%s' for upload" % self.path) self.sendStatus({'header': "sending %s" % self.path}) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) def _close_ok(res): if self.fp: self.fp.close() self.fp = None d1 = self.writer.callRemote("close") def _utime_ok(res): return self.writer.callRemote("utime", accessed_modified) if self.keepstamp: d1.addCallback(_utime_ok) return d1 def _close_err(f): self.rc = 1 if self.fp: self.fp.close() self.fp = None # call remote's close(), but keep the existing failure d1 = self.writer.callRemote("close") def eb(f2): log.msg("ignoring error from remote close():") log.err(f2) d1.addErrback(eb) d1.addBoth(lambda _: f) # always return _loop failure return d1 d.addCallbacks(_close_ok, _close_err) d.addBoth(self.finished) return d def _loop(self, fire_when_done): d = defer.maybeDeferred(self._writeBlock) def _done(finished): if finished: fire_when_done.callback(None) else: self._loop(fire_when_done) def _err(why): fire_when_done.errback(why) d.addCallbacks(_done, _err) return None def _writeBlock(self): """Write a block of data to the remote writer""" if self.interrupted or self.fp is None: if self.debug: log.msg('WorkerFileUploadCommand._writeBlock(): end') return True length = self.blocksize if self.remaining is not None and length > self.remaining: length = self.remaining if length <= 0: if self.stderr is None: self.stderr = 'Maximum filesize reached, truncating file \'%s\'' \ % self.path self.rc = 1 data = '' else: data = self.fp.read(length) if self.debug: log.msg('WorkerFileUploadCommand._writeBlock(): ' + 'allowed=%d readlen=%d' % (length, len(data))) if not data: log.msg("EOF: callRemote(close)") return True if self.remaining is not None: self.remaining = self.remaining - len(data) assert self.remaining >= 0 d = self.writer.callRemote('write', data) d.addCallback(lambda res: False) return d class WorkerDirectoryUploadCommand(WorkerFileUploadCommand): debug = False requiredArgs = ['workdir', 'workersrc', 'writer', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.dirname = args['workersrc'] self.writer = args['writer'] self.remaining = args['maxsize'] self.blocksize = args['blocksize'] self.compress = args['compress'] self.stderr = None self.rc = 0 def start(self): if self.debug: log.msg('WorkerDirectoryUploadCommand started') self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.dirname)) if self.debug: log.msg("path: %r" % self.path) # Create temporary archive fd, self.tarname = tempfile.mkstemp() self.fp = os.fdopen(fd, "rb+") if self.compress == 'bz2': mode = 'w|bz2' elif self.compress == 'gz': mode = 'w|gz' else: mode = 'w' # TODO: Use 'with' when depending on Python 2.7 # Not possible with older versions: # exceptions.AttributeError: 'TarFile' object has no attribute '__exit__' archive = tarfile.open(mode=mode, fileobj=self.fp) archive.add(self.path, '') archive.close() # Transfer it self.fp.seek(0) self.sendStatus({'header': "sending %s" % self.path}) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) def unpack(res): d1 = self.writer.callRemote("unpack") def unpack_err(f): self.rc = 1 return f d1.addErrback(unpack_err) d1.addCallback(lambda ignored: res) return d1 d.addCallback(unpack) d.addBoth(self.finished) return d def finished(self, res): self.fp.close() self.fp = None os.remove(self.tarname) return TransferCommand.finished(self, res) class WorkerFileDownloadCommand(TransferCommand): """ Download a file from master to worker Arguments: - ['workdir']: base directory to use - ['workerdest']: name of the worker-side file to be created - ['reader']: RemoteReference to a buildbot_worker.protocols.base.FileReaderProxy object - ['maxsize']: max size (in bytes) of file to write - ['blocksize']: max size for each data block - ['mode']: access mode for the new file """ debug = False requiredArgs = ['workdir', 'workerdest', 'reader', 'blocksize'] def setup(self, args): self.workdir = args['workdir'] self.filename = args['workerdest'] self.reader = args['reader'] self.bytes_remaining = args['maxsize'] self.blocksize = args['blocksize'] self.mode = args['mode'] self.stderr = None self.rc = 0 self.fp = None def start(self): if self.debug: log.msg('WorkerFileDownloadCommand starting') # Open file self.path = os.path.join(self.builder.basedir, self.workdir, os.path.expanduser(self.filename)) dirname = os.path.dirname(self.path) if not os.path.exists(dirname): os.makedirs(dirname) try: self.fp = open(self.path, 'wb') if self.debug: log.msg("Opened '%s' for download" % self.path) if self.mode is not None: # note: there is a brief window during which the new file # will have the worker's default (umask) mode before we # set the new one. Don't use this mode= feature to keep files # private: use the worker's umask for that instead. (it # is possible to call os.umask() before and after the open() # call, but cleaning up from exceptions properly is more of a # nuisance that way). os.chmod(self.path, self.mode) except IOError: # TODO: this still needs cleanup if self.fp: self.fp.close() self.fp = None self.stderr = "Cannot open file '%s' for download" % self.path self.rc = 1 if self.debug: log.msg("Cannot open file '%s' for download" % self.path) d = defer.Deferred() self._reactor.callLater(0, self._loop, d) def _close(res): # close the file, but pass through any errors from _loop d1 = self.reader.callRemote('close') d1.addErrback(log.err, 'while trying to close reader') d1.addCallback(lambda ignored: res) return d1 d.addBoth(_close) d.addBoth(self.finished) return d def _loop(self, fire_when_done): d = defer.maybeDeferred(self._readBlock) def _done(finished): if finished: fire_when_done.callback(None) else: self._loop(fire_when_done) def _err(why): fire_when_done.errback(why) d.addCallbacks(_done, _err) return None def _readBlock(self): """Read a block of data from the remote reader.""" if self.interrupted or self.fp is None: if self.debug: log.msg('WorkerFileDownloadCommand._readBlock(): end') return True length = self.blocksize if self.bytes_remaining is not None and length > self.bytes_remaining: length = self.bytes_remaining if length <= 0: if self.stderr is None: self.stderr = "Maximum filesize reached, truncating file '%s'" \ % self.path self.rc = 1 return True else: d = self.reader.callRemote('read', length) d.addCallback(self._writeData) return d def _writeData(self, data): if self.debug: log.msg('WorkerFileDownloadCommand._readBlock(): readlen=%d' % len(data)) if not data: return True if self.bytes_remaining is not None: self.bytes_remaining = self.bytes_remaining - len(data) assert self.bytes_remaining >= 0 self.fp.write(data) return False def finished(self, res): if self.fp: self.fp.close() self.fp = None return TransferCommand.finished(self, res)
mit
lisong/protobuf-ios-and-android-ndk
python/google/protobuf/service_reflection.py
601
11010
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Contains metaclasses used to create protocol service and service stub classes from ServiceDescriptor objects at runtime. The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to inject all useful functionality into the classes output by the protocol compiler at compile-time. """ __author__ = 'petar@google.com (Petar Petrov)' class GeneratedServiceType(type): """Metaclass for service classes created at runtime from ServiceDescriptors. Implementations for all methods described in the Service class are added here by this class. We also create properties to allow getting/setting all fields in the protocol message. The protocol compiler currently uses this metaclass to create protocol service classes at runtime. Clients can also manually create their own classes at runtime, as in this example: mydescriptor = ServiceDescriptor(.....) class MyProtoService(service.Service): __metaclass__ = GeneratedServiceType DESCRIPTOR = mydescriptor myservice_instance = MyProtoService() ... """ _DESCRIPTOR_KEY = 'DESCRIPTOR' def __init__(cls, name, bases, dictionary): """Creates a message service class. Args: name: Name of the class (ignored, but required by the metaclass protocol). bases: Base classes of the class being constructed. dictionary: The class dictionary of the class being constructed. dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object describing this protocol service type. """ # Don't do anything if this class doesn't have a descriptor. This happens # when a service class is subclassed. if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: return descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] service_builder = _ServiceBuilder(descriptor) service_builder.BuildService(cls) class GeneratedServiceStubType(GeneratedServiceType): """Metaclass for service stubs created at runtime from ServiceDescriptors. This class has similar responsibilities as GeneratedServiceType, except that it creates the service stub classes. """ _DESCRIPTOR_KEY = 'DESCRIPTOR' def __init__(cls, name, bases, dictionary): """Creates a message service stub class. Args: name: Name of the class (ignored, here). bases: Base classes of the class being constructed. dictionary: The class dictionary of the class being constructed. dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object describing this protocol service type. """ super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) # Don't do anything if this class doesn't have a descriptor. This happens # when a service stub is subclassed. if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: return descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] service_stub_builder = _ServiceStubBuilder(descriptor) service_stub_builder.BuildServiceStub(cls) class _ServiceBuilder(object): """This class constructs a protocol service class using a service descriptor. Given a service descriptor, this class constructs a class that represents the specified service descriptor. One service builder instance constructs exactly one service class. That means all instances of that class share the same builder. """ def __init__(self, service_descriptor): """Initializes an instance of the service class builder. Args: service_descriptor: ServiceDescriptor to use when constructing the service class. """ self.descriptor = service_descriptor def BuildService(self, cls): """Constructs the service class. Args: cls: The class that will be constructed. """ # CallMethod needs to operate with an instance of the Service class. This # internal wrapper function exists only to be able to pass the service # instance to the method that does the real CallMethod work. def _WrapCallMethod(srvc, method_descriptor, rpc_controller, request, callback): return self._CallMethod(srvc, method_descriptor, rpc_controller, request, callback) self.cls = cls cls.CallMethod = _WrapCallMethod cls.GetDescriptor = staticmethod(lambda: self.descriptor) cls.GetDescriptor.__doc__ = "Returns the service descriptor." cls.GetRequestClass = self._GetRequestClass cls.GetResponseClass = self._GetResponseClass for method in self.descriptor.methods: setattr(cls, method.name, self._GenerateNonImplementedMethod(method)) def _CallMethod(self, srvc, method_descriptor, rpc_controller, request, callback): """Calls the method described by a given method descriptor. Args: srvc: Instance of the service for which this method is called. method_descriptor: Descriptor that represent the method to call. rpc_controller: RPC controller to use for this method's execution. request: Request protocol message. callback: A callback to invoke after the method has completed. """ if method_descriptor.containing_service != self.descriptor: raise RuntimeError( 'CallMethod() given method descriptor for wrong service type.') method = getattr(srvc, method_descriptor.name) return method(rpc_controller, request, callback) def _GetRequestClass(self, method_descriptor): """Returns the class of the request protocol message. Args: method_descriptor: Descriptor of the method for which to return the request protocol message class. Returns: A class that represents the input protocol message of the specified method. """ if method_descriptor.containing_service != self.descriptor: raise RuntimeError( 'GetRequestClass() given method descriptor for wrong service type.') return method_descriptor.input_type._concrete_class def _GetResponseClass(self, method_descriptor): """Returns the class of the response protocol message. Args: method_descriptor: Descriptor of the method for which to return the response protocol message class. Returns: A class that represents the output protocol message of the specified method. """ if method_descriptor.containing_service != self.descriptor: raise RuntimeError( 'GetResponseClass() given method descriptor for wrong service type.') return method_descriptor.output_type._concrete_class def _GenerateNonImplementedMethod(self, method): """Generates and returns a method that can be set for a service methods. Args: method: Descriptor of the service method for which a method is to be generated. Returns: A method that can be added to the service class. """ return lambda inst, rpc_controller, request, callback: ( self._NonImplementedMethod(method.name, rpc_controller, callback)) def _NonImplementedMethod(self, method_name, rpc_controller, callback): """The body of all methods in the generated service class. Args: method_name: Name of the method being executed. rpc_controller: RPC controller used to execute this method. callback: A callback which will be invoked when the method finishes. """ rpc_controller.SetFailed('Method %s not implemented.' % method_name) callback(None) class _ServiceStubBuilder(object): """Constructs a protocol service stub class using a service descriptor. Given a service descriptor, this class constructs a suitable stub class. A stub is just a type-safe wrapper around an RpcChannel which emulates a local implementation of the service. One service stub builder instance constructs exactly one class. It means all instances of that class share the same service stub builder. """ def __init__(self, service_descriptor): """Initializes an instance of the service stub class builder. Args: service_descriptor: ServiceDescriptor to use when constructing the stub class. """ self.descriptor = service_descriptor def BuildServiceStub(self, cls): """Constructs the stub class. Args: cls: The class that will be constructed. """ def _ServiceStubInit(stub, rpc_channel): stub.rpc_channel = rpc_channel self.cls = cls cls.__init__ = _ServiceStubInit for method in self.descriptor.methods: setattr(cls, method.name, self._GenerateStubMethod(method)) def _GenerateStubMethod(self, method): return (lambda inst, rpc_controller, request, callback=None: self._StubMethod(inst, method, rpc_controller, request, callback)) def _StubMethod(self, stub, method_descriptor, rpc_controller, request, callback): """The body of all service methods in the generated stub class. Args: stub: Stub instance. method_descriptor: Descriptor of the invoked method. rpc_controller: Rpc controller to execute the method. request: Request protocol message. callback: A callback to execute when the method finishes. Returns: Response message (in case of blocking call). """ return stub.rpc_channel.CallMethod( method_descriptor, rpc_controller, request, method_descriptor.output_type._concrete_class, callback)
bsd-3-clause
40223119/2015w13
static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_assertions.py
738
15398
import datetime import warnings import unittest from itertools import product class Test_Assertions(unittest.TestCase): def test_AlmostEqual(self): self.assertAlmostEqual(1.00000001, 1.0) self.assertNotAlmostEqual(1.0000001, 1.0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.0000001, 1.0) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.00000001, 1.0) self.assertAlmostEqual(1.1, 1.0, places=0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, places=1) self.assertAlmostEqual(0, .1+.1j, places=0) self.assertNotAlmostEqual(0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertAlmostEqual, 0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 0, .1+.1j, places=0) self.assertAlmostEqual(float('inf'), float('inf')) self.assertRaises(self.failureException, self.assertNotAlmostEqual, float('inf'), float('inf')) def test_AmostEqualWithDelta(self): self.assertAlmostEqual(1.1, 1.0, delta=0.5) self.assertAlmostEqual(1.0, 1.1, delta=0.5) self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.1, 1.0, delta=0.5) self.assertRaises(TypeError, self.assertAlmostEqual, 1.1, 1.0, places=2, delta=2) self.assertRaises(TypeError, self.assertNotAlmostEqual, 1.1, 1.0, places=2, delta=2) first = datetime.datetime.now() second = first + datetime.timedelta(seconds=10) self.assertAlmostEqual(first, second, delta=datetime.timedelta(seconds=20)) self.assertNotAlmostEqual(first, second, delta=datetime.timedelta(seconds=5)) def test_assertRaises(self): def _raise(e): raise e self.assertRaises(KeyError, _raise, KeyError) self.assertRaises(KeyError, _raise, KeyError("key")) try: self.assertRaises(KeyError, lambda: None) except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: self.assertRaises(KeyError, _raise, ValueError) except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") with self.assertRaises(KeyError) as cm: try: raise KeyError except Exception as e: exc = e raise self.assertIs(cm.exception, exc) with self.assertRaises(KeyError): raise KeyError("key") try: with self.assertRaises(KeyError): pass except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: with self.assertRaises(KeyError): raise ValueError except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") def testAssertNotRegex(self): self.assertNotRegex('Ala ma kota', r'r+') try: self.assertNotRegex('Ala ma kota', r'k.t', 'Message') except self.failureException as e: self.assertIn("'kot'", e.args[0]) self.assertIn('Message', e.args[0]) else: self.fail('assertNotRegex should have failed.') class TestLongMessage(unittest.TestCase): """Test that the individual asserts honour longMessage. This actually tests all the message behaviour for asserts that use longMessage.""" def setUp(self): class TestableTestFalse(unittest.TestCase): longMessage = False failureException = self.failureException def testTest(self): pass class TestableTestTrue(unittest.TestCase): longMessage = True failureException = self.failureException def testTest(self): pass self.testableTrue = TestableTestTrue('testTest') self.testableFalse = TestableTestFalse('testTest') def testDefault(self): self.assertTrue(unittest.TestCase.longMessage) def test_formatMsg(self): self.assertEqual(self.testableFalse._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableFalse._formatMessage("foo", "bar"), "foo") self.assertEqual(self.testableTrue._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableTrue._formatMessage("foo", "bar"), "bar : foo") # This blows up if _formatMessage uses string concatenation self.testableTrue._formatMessage(object(), 'foo') def test_formatMessage_unicode_error(self): one = ''.join(chr(i) for i in range(255)) # this used to cause a UnicodeDecodeError constructing msg self.testableTrue._formatMessage(one, '\uFFFD') def assertMessages(self, methodName, args, errors): """ Check that methodName(*args) raises the correct error messages. errors should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ def getMethod(i): useTestableFalse = i < 2 if useTestableFalse: test = self.testableFalse else: test = self.testableTrue return getattr(test, methodName) for i, expected_regex in enumerate(errors): testMethod = getMethod(i) kwargs = {} withMsg = i % 2 if withMsg: kwargs = {"msg": "oops"} with self.assertRaisesRegex(self.failureException, expected_regex=expected_regex): testMethod(*args, **kwargs) def testAssertTrue(self): self.assertMessages('assertTrue', (False,), ["^False is not true$", "^oops$", "^False is not true$", "^False is not true : oops$"]) def testAssertFalse(self): self.assertMessages('assertFalse', (True,), ["^True is not false$", "^oops$", "^True is not false$", "^True is not false : oops$"]) def testNotEqual(self): self.assertMessages('assertNotEqual', (1, 1), ["^1 == 1$", "^oops$", "^1 == 1$", "^1 == 1 : oops$"]) def testAlmostEqual(self): self.assertMessages('assertAlmostEqual', (1, 2), ["^1 != 2 within 7 places$", "^oops$", "^1 != 2 within 7 places$", "^1 != 2 within 7 places : oops$"]) def testNotAlmostEqual(self): self.assertMessages('assertNotAlmostEqual', (1, 1), ["^1 == 1 within 7 places$", "^oops$", "^1 == 1 within 7 places$", "^1 == 1 within 7 places : oops$"]) def test_baseAssertEqual(self): self.assertMessages('_baseAssertEqual', (1, 2), ["^1 != 2$", "^oops$", "^1 != 2$", "^1 != 2 : oops$"]) def testAssertSequenceEqual(self): # Error messages are multiline so not testing on full message # assertTupleEqual and assertListEqual delegate to this method self.assertMessages('assertSequenceEqual', ([], [None]), ["\+ \[None\]$", "^oops$", r"\+ \[None\]$", r"\+ \[None\] : oops$"]) def testAssertSetEqual(self): self.assertMessages('assertSetEqual', (set(), set([None])), ["None$", "^oops$", "None$", "None : oops$"]) def testAssertIn(self): self.assertMessages('assertIn', (None, []), ['^None not found in \[\]$', "^oops$", '^None not found in \[\]$', '^None not found in \[\] : oops$']) def testAssertNotIn(self): self.assertMessages('assertNotIn', (None, [None]), ['^None unexpectedly found in \[None\]$', "^oops$", '^None unexpectedly found in \[None\]$', '^None unexpectedly found in \[None\] : oops$']) def testAssertDictEqual(self): self.assertMessages('assertDictEqual', ({}, {'key': 'value'}), [r"\+ \{'key': 'value'\}$", "^oops$", "\+ \{'key': 'value'\}$", "\+ \{'key': 'value'\} : oops$"]) def testAssertDictContainsSubset(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) self.assertMessages('assertDictContainsSubset', ({'key': 'value'}, {}), ["^Missing: 'key'$", "^oops$", "^Missing: 'key'$", "^Missing: 'key' : oops$"]) def testAssertMultiLineEqual(self): self.assertMessages('assertMultiLineEqual', ("", "foo"), [r"\+ foo$", "^oops$", r"\+ foo$", r"\+ foo : oops$"]) def testAssertLess(self): self.assertMessages('assertLess', (2, 1), ["^2 not less than 1$", "^oops$", "^2 not less than 1$", "^2 not less than 1 : oops$"]) def testAssertLessEqual(self): self.assertMessages('assertLessEqual', (2, 1), ["^2 not less than or equal to 1$", "^oops$", "^2 not less than or equal to 1$", "^2 not less than or equal to 1 : oops$"]) def testAssertGreater(self): self.assertMessages('assertGreater', (1, 2), ["^1 not greater than 2$", "^oops$", "^1 not greater than 2$", "^1 not greater than 2 : oops$"]) def testAssertGreaterEqual(self): self.assertMessages('assertGreaterEqual', (1, 2), ["^1 not greater than or equal to 2$", "^oops$", "^1 not greater than or equal to 2$", "^1 not greater than or equal to 2 : oops$"]) def testAssertIsNone(self): self.assertMessages('assertIsNone', ('not None',), ["^'not None' is not None$", "^oops$", "^'not None' is not None$", "^'not None' is not None : oops$"]) def testAssertIsNotNone(self): self.assertMessages('assertIsNotNone', (None,), ["^unexpectedly None$", "^oops$", "^unexpectedly None$", "^unexpectedly None : oops$"]) def testAssertIs(self): self.assertMessages('assertIs', (None, 'foo'), ["^None is not 'foo'$", "^oops$", "^None is not 'foo'$", "^None is not 'foo' : oops$"]) def testAssertIsNot(self): self.assertMessages('assertIsNot', (None, None), ["^unexpectedly identical: None$", "^oops$", "^unexpectedly identical: None$", "^unexpectedly identical: None : oops$"]) def assertMessagesCM(self, methodName, args, func, errors): """ Check that the correct error messages are raised while executing: with method(*args): func() *errors* should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ p = product((self.testableFalse, self.testableTrue), ({}, {"msg": "oops"})) for (cls, kwargs), err in zip(p, errors): method = getattr(cls, methodName) with self.assertRaisesRegex(cls.failureException, err): with method(*args, **kwargs) as cm: func() def testAssertRaises(self): self.assertMessagesCM('assertRaises', (TypeError,), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) def testAssertRaisesRegex(self): # test error not raised self.assertMessagesCM('assertRaisesRegex', (TypeError, 'unused regex'), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) # test error raised but with wrong message def raise_wrong_message(): raise TypeError('foo') self.assertMessagesCM('assertRaisesRegex', (TypeError, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$']) def testAssertWarns(self): self.assertMessagesCM('assertWarns', (UserWarning,), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) def testAssertWarnsRegex(self): # test error not raised self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'unused regex'), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) # test warning raised but with wrong message def raise_wrong_message(): warnings.warn('foo') self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$'])
gpl-3.0
ms-iot/python
cpython/Lib/plat-linux/CDROM.py
330
5035
# Generated by h2py from /usr/include/linux/cdrom.h CDROMPAUSE = 0x5301 CDROMRESUME = 0x5302 CDROMPLAYMSF = 0x5303 CDROMPLAYTRKIND = 0x5304 CDROMREADTOCHDR = 0x5305 CDROMREADTOCENTRY = 0x5306 CDROMSTOP = 0x5307 CDROMSTART = 0x5308 CDROMEJECT = 0x5309 CDROMVOLCTRL = 0x530a CDROMSUBCHNL = 0x530b CDROMREADMODE2 = 0x530c CDROMREADMODE1 = 0x530d CDROMREADAUDIO = 0x530e CDROMEJECT_SW = 0x530f CDROMMULTISESSION = 0x5310 CDROM_GET_MCN = 0x5311 CDROM_GET_UPC = CDROM_GET_MCN CDROMRESET = 0x5312 CDROMVOLREAD = 0x5313 CDROMREADRAW = 0x5314 CDROMREADCOOKED = 0x5315 CDROMSEEK = 0x5316 CDROMPLAYBLK = 0x5317 CDROMREADALL = 0x5318 CDROMGETSPINDOWN = 0x531d CDROMSETSPINDOWN = 0x531e CDROMCLOSETRAY = 0x5319 CDROM_SET_OPTIONS = 0x5320 CDROM_CLEAR_OPTIONS = 0x5321 CDROM_SELECT_SPEED = 0x5322 CDROM_SELECT_DISC = 0x5323 CDROM_MEDIA_CHANGED = 0x5325 CDROM_DRIVE_STATUS = 0x5326 CDROM_DISC_STATUS = 0x5327 CDROM_CHANGER_NSLOTS = 0x5328 CDROM_LOCKDOOR = 0x5329 CDROM_DEBUG = 0x5330 CDROM_GET_CAPABILITY = 0x5331 CDROMAUDIOBUFSIZ = 0x5382 DVD_READ_STRUCT = 0x5390 DVD_WRITE_STRUCT = 0x5391 DVD_AUTH = 0x5392 CDROM_SEND_PACKET = 0x5393 CDROM_NEXT_WRITABLE = 0x5394 CDROM_LAST_WRITTEN = 0x5395 CDROM_PACKET_SIZE = 12 CGC_DATA_UNKNOWN = 0 CGC_DATA_WRITE = 1 CGC_DATA_READ = 2 CGC_DATA_NONE = 3 CD_MINS = 74 CD_SECS = 60 CD_FRAMES = 75 CD_SYNC_SIZE = 12 CD_MSF_OFFSET = 150 CD_CHUNK_SIZE = 24 CD_NUM_OF_CHUNKS = 98 CD_FRAMESIZE_SUB = 96 CD_HEAD_SIZE = 4 CD_SUBHEAD_SIZE = 8 CD_EDC_SIZE = 4 CD_ZERO_SIZE = 8 CD_ECC_SIZE = 276 CD_FRAMESIZE = 2048 CD_FRAMESIZE_RAW = 2352 CD_FRAMESIZE_RAWER = 2646 CD_FRAMESIZE_RAW1 = (CD_FRAMESIZE_RAW-CD_SYNC_SIZE) CD_FRAMESIZE_RAW0 = (CD_FRAMESIZE_RAW-CD_SYNC_SIZE-CD_HEAD_SIZE) CD_XA_HEAD = (CD_HEAD_SIZE+CD_SUBHEAD_SIZE) CD_XA_TAIL = (CD_EDC_SIZE+CD_ECC_SIZE) CD_XA_SYNC_HEAD = (CD_SYNC_SIZE+CD_XA_HEAD) CDROM_LBA = 0x01 CDROM_MSF = 0x02 CDROM_DATA_TRACK = 0x04 CDROM_LEADOUT = 0xAA CDROM_AUDIO_INVALID = 0x00 CDROM_AUDIO_PLAY = 0x11 CDROM_AUDIO_PAUSED = 0x12 CDROM_AUDIO_COMPLETED = 0x13 CDROM_AUDIO_ERROR = 0x14 CDROM_AUDIO_NO_STATUS = 0x15 CDC_CLOSE_TRAY = 0x1 CDC_OPEN_TRAY = 0x2 CDC_LOCK = 0x4 CDC_SELECT_SPEED = 0x8 CDC_SELECT_DISC = 0x10 CDC_MULTI_SESSION = 0x20 CDC_MCN = 0x40 CDC_MEDIA_CHANGED = 0x80 CDC_PLAY_AUDIO = 0x100 CDC_RESET = 0x200 CDC_IOCTLS = 0x400 CDC_DRIVE_STATUS = 0x800 CDC_GENERIC_PACKET = 0x1000 CDC_CD_R = 0x2000 CDC_CD_RW = 0x4000 CDC_DVD = 0x8000 CDC_DVD_R = 0x10000 CDC_DVD_RAM = 0x20000 CDS_NO_INFO = 0 CDS_NO_DISC = 1 CDS_TRAY_OPEN = 2 CDS_DRIVE_NOT_READY = 3 CDS_DISC_OK = 4 CDS_AUDIO = 100 CDS_DATA_1 = 101 CDS_DATA_2 = 102 CDS_XA_2_1 = 103 CDS_XA_2_2 = 104 CDS_MIXED = 105 CDO_AUTO_CLOSE = 0x1 CDO_AUTO_EJECT = 0x2 CDO_USE_FFLAGS = 0x4 CDO_LOCK = 0x8 CDO_CHECK_TYPE = 0x10 CD_PART_MAX = 64 CD_PART_MASK = (CD_PART_MAX - 1) GPCMD_BLANK = 0xa1 GPCMD_CLOSE_TRACK = 0x5b GPCMD_FLUSH_CACHE = 0x35 GPCMD_FORMAT_UNIT = 0x04 GPCMD_GET_CONFIGURATION = 0x46 GPCMD_GET_EVENT_STATUS_NOTIFICATION = 0x4a GPCMD_GET_PERFORMANCE = 0xac GPCMD_INQUIRY = 0x12 GPCMD_LOAD_UNLOAD = 0xa6 GPCMD_MECHANISM_STATUS = 0xbd GPCMD_MODE_SELECT_10 = 0x55 GPCMD_MODE_SENSE_10 = 0x5a GPCMD_PAUSE_RESUME = 0x4b GPCMD_PLAY_AUDIO_10 = 0x45 GPCMD_PLAY_AUDIO_MSF = 0x47 GPCMD_PLAY_AUDIO_TI = 0x48 GPCMD_PLAY_CD = 0xbc GPCMD_PREVENT_ALLOW_MEDIUM_REMOVAL = 0x1e GPCMD_READ_10 = 0x28 GPCMD_READ_12 = 0xa8 GPCMD_READ_CDVD_CAPACITY = 0x25 GPCMD_READ_CD = 0xbe GPCMD_READ_CD_MSF = 0xb9 GPCMD_READ_DISC_INFO = 0x51 GPCMD_READ_DVD_STRUCTURE = 0xad GPCMD_READ_FORMAT_CAPACITIES = 0x23 GPCMD_READ_HEADER = 0x44 GPCMD_READ_TRACK_RZONE_INFO = 0x52 GPCMD_READ_SUBCHANNEL = 0x42 GPCMD_READ_TOC_PMA_ATIP = 0x43 GPCMD_REPAIR_RZONE_TRACK = 0x58 GPCMD_REPORT_KEY = 0xa4 GPCMD_REQUEST_SENSE = 0x03 GPCMD_RESERVE_RZONE_TRACK = 0x53 GPCMD_SCAN = 0xba GPCMD_SEEK = 0x2b GPCMD_SEND_DVD_STRUCTURE = 0xad GPCMD_SEND_EVENT = 0xa2 GPCMD_SEND_KEY = 0xa3 GPCMD_SEND_OPC = 0x54 GPCMD_SET_READ_AHEAD = 0xa7 GPCMD_SET_STREAMING = 0xb6 GPCMD_START_STOP_UNIT = 0x1b GPCMD_STOP_PLAY_SCAN = 0x4e GPCMD_TEST_UNIT_READY = 0x00 GPCMD_VERIFY_10 = 0x2f GPCMD_WRITE_10 = 0x2a GPCMD_WRITE_AND_VERIFY_10 = 0x2e GPCMD_SET_SPEED = 0xbb GPCMD_PLAYAUDIO_TI = 0x48 GPCMD_GET_MEDIA_STATUS = 0xda GPMODE_R_W_ERROR_PAGE = 0x01 GPMODE_WRITE_PARMS_PAGE = 0x05 GPMODE_AUDIO_CTL_PAGE = 0x0e GPMODE_POWER_PAGE = 0x1a GPMODE_FAULT_FAIL_PAGE = 0x1c GPMODE_TO_PROTECT_PAGE = 0x1d GPMODE_CAPABILITIES_PAGE = 0x2a GPMODE_ALL_PAGES = 0x3f GPMODE_CDROM_PAGE = 0x0d DVD_STRUCT_PHYSICAL = 0x00 DVD_STRUCT_COPYRIGHT = 0x01 DVD_STRUCT_DISCKEY = 0x02 DVD_STRUCT_BCA = 0x03 DVD_STRUCT_MANUFACT = 0x04 DVD_LAYERS = 4 DVD_LU_SEND_AGID = 0 DVD_HOST_SEND_CHALLENGE = 1 DVD_LU_SEND_KEY1 = 2 DVD_LU_SEND_CHALLENGE = 3 DVD_HOST_SEND_KEY2 = 4 DVD_AUTH_ESTABLISHED = 5 DVD_AUTH_FAILURE = 6 DVD_LU_SEND_TITLE_KEY = 7 DVD_LU_SEND_ASF = 8 DVD_INVALIDATE_AGID = 9 DVD_LU_SEND_RPC_STATE = 10 DVD_HOST_SEND_RPC_STATE = 11 DVD_CPM_NO_COPYRIGHT = 0 DVD_CPM_COPYRIGHTED = 1 DVD_CP_SEC_NONE = 0 DVD_CP_SEC_EXIST = 1 DVD_CGMS_UNRESTRICTED = 0 DVD_CGMS_SINGLE = 2 DVD_CGMS_RESTRICTED = 3 CDROM_MAX_SLOTS = 256
bsd-3-clause
heytcass/homeassistant-config
deps/sqlalchemy/sql/operators.py
34
23013
# sql/operators.py # Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Defines operators used in SQL expressions.""" from .. import util from operator import ( and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg, getitem, lshift, rshift, contains ) if util.py2k: from operator import div else: div = truediv class Operators(object): """Base of comparison and logical operators. Implements base methods :meth:`~sqlalchemy.sql.operators.Operators.operate` and :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as :meth:`~sqlalchemy.sql.operators.Operators.__and__`, :meth:`~sqlalchemy.sql.operators.Operators.__or__`, :meth:`~sqlalchemy.sql.operators.Operators.__invert__`. Usually is used via its most common subclass :class:`.ColumnOperators`. """ __slots__ = () def __and__(self, other): """Implement the ``&`` operator. When used with SQL expressions, results in an AND operation, equivalent to :func:`~.expression.and_`, that is:: a & b is equivalent to:: from sqlalchemy import and_ and_(a, b) Care should be taken when using ``&`` regarding operator precedence; the ``&`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: (a == 2) & (b == 4) """ return self.operate(and_, other) def __or__(self, other): """Implement the ``|`` operator. When used with SQL expressions, results in an OR operation, equivalent to :func:`~.expression.or_`, that is:: a | b is equivalent to:: from sqlalchemy import or_ or_(a, b) Care should be taken when using ``|`` regarding operator precedence; the ``|`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: (a == 2) | (b == 4) """ return self.operate(or_, other) def __invert__(self): """Implement the ``~`` operator. When used with SQL expressions, results in a NOT operation, equivalent to :func:`~.expression.not_`, that is:: ~a is equivalent to:: from sqlalchemy import not_ not_(a) """ return self.operate(inv) def op(self, opstring, precedence=0, is_comparison=False): """produce a generic operator function. e.g.:: somecolumn.op("*")(5) produces:: somecolumn * 5 This function can also be used to make bitwise operators explicit. For example:: somecolumn.op('&')(0xff) is a bitwise AND of the value in ``somecolumn``. :param operator: a string which will be output as the infix operator between this element and the expression passed to the generated function. :param precedence: precedence to apply to the operator, when parenthesizing expressions. A lower number will cause the expression to be parenthesized when applied against another operator with higher precedence. The default value of ``0`` is lower than all operators except for the comma (``,``) and ``AS`` operators. A value of 100 will be higher or equal to all operators, and -100 will be lower than or equal to all operators. .. versionadded:: 0.8 - added the 'precedence' argument. :param is_comparison: if True, the operator will be considered as a "comparison" operator, that is which evaluates to a boolean true/false value, like ``==``, ``>``, etc. This flag should be set so that ORM relationships can establish that the operator is a comparison operator when used in a custom join condition. .. versionadded:: 0.9.2 - added the :paramref:`.Operators.op.is_comparison` flag. .. seealso:: :ref:`types_operators` :ref:`relationship_custom_operator` """ operator = custom_op(opstring, precedence, is_comparison) def against(other): return operator(self, other) return against def operate(self, op, *other, **kwargs): """Operate on an argument. This is the lowest level of operation, raises :class:`NotImplementedError` by default. Overriding this on a subclass can allow common behavior to be applied to all operations. For example, overriding :class:`.ColumnOperators` to apply ``func.lower()`` to the left and right side:: class MyComparator(ColumnOperators): def operate(self, op, other): return op(func.lower(self), func.lower(other)) :param op: Operator callable. :param \*other: the 'other' side of the operation. Will be a single scalar for most operations. :param \**kwargs: modifiers. These may be passed by special operators such as :meth:`ColumnOperators.contains`. """ raise NotImplementedError(str(op)) def reverse_operate(self, op, other, **kwargs): """Reverse operate on an argument. Usage is the same as :meth:`operate`. """ raise NotImplementedError(str(op)) class custom_op(object): """Represent a 'custom' operator. :class:`.custom_op` is normally instantitated when the :meth:`.ColumnOperators.op` method is used to create a custom operator callable. The class can also be used directly when programmatically constructing expressions. E.g. to represent the "factorial" operation:: from sqlalchemy.sql import UnaryExpression from sqlalchemy.sql import operators from sqlalchemy import Numeric unary = UnaryExpression(table.c.somecolumn, modifier=operators.custom_op("!"), type_=Numeric) """ __name__ = 'custom_op' def __init__(self, opstring, precedence=0, is_comparison=False): self.opstring = opstring self.precedence = precedence self.is_comparison = is_comparison def __eq__(self, other): return isinstance(other, custom_op) and \ other.opstring == self.opstring def __hash__(self): return id(self) def __call__(self, left, right, **kw): return left.operate(self, right, **kw) class ColumnOperators(Operators): """Defines boolean, comparison, and other operators for :class:`.ColumnElement` expressions. By default, all methods call down to :meth:`.operate` or :meth:`.reverse_operate`, passing in the appropriate operator function from the Python builtin ``operator`` module or a SQLAlchemy-specific operator function from :mod:`sqlalchemy.expression.operators`. For example the ``__eq__`` function:: def __eq__(self, other): return self.operate(operators.eq, other) Where ``operators.eq`` is essentially:: def eq(a, b): return a == b The core column expression unit :class:`.ColumnElement` overrides :meth:`.Operators.operate` and others to return further :class:`.ColumnElement` constructs, so that the ``==`` operation above is replaced by a clause construct. See also: :ref:`types_operators` :attr:`.TypeEngine.comparator_factory` :class:`.ColumnOperators` :class:`.PropComparator` """ __slots__ = () timetuple = None """Hack, allows datetime objects to be compared on the LHS.""" def __lt__(self, other): """Implement the ``<`` operator. In a column context, produces the clause ``a < b``. """ return self.operate(lt, other) def __le__(self, other): """Implement the ``<=`` operator. In a column context, produces the clause ``a <= b``. """ return self.operate(le, other) __hash__ = Operators.__hash__ def __eq__(self, other): """Implement the ``==`` operator. In a column context, produces the clause ``a = b``. If the target is ``None``, produces ``a IS NULL``. """ return self.operate(eq, other) def __ne__(self, other): """Implement the ``!=`` operator. In a column context, produces the clause ``a != b``. If the target is ``None``, produces ``a IS NOT NULL``. """ return self.operate(ne, other) def __gt__(self, other): """Implement the ``>`` operator. In a column context, produces the clause ``a > b``. """ return self.operate(gt, other) def __ge__(self, other): """Implement the ``>=`` operator. In a column context, produces the clause ``a >= b``. """ return self.operate(ge, other) def __neg__(self): """Implement the ``-`` operator. In a column context, produces the clause ``-a``. """ return self.operate(neg) def __contains__(self, other): return self.operate(contains, other) def __getitem__(self, index): """Implement the [] operator. This can be used by some database-specific types such as Postgresql ARRAY and HSTORE. """ return self.operate(getitem, index) def __lshift__(self, other): """implement the << operator. Not used by SQLAlchemy core, this is provided for custom operator systems which want to use << as an extension point. """ return self.operate(lshift, other) def __rshift__(self, other): """implement the >> operator. Not used by SQLAlchemy core, this is provided for custom operator systems which want to use >> as an extension point. """ return self.operate(rshift, other) def concat(self, other): """Implement the 'concat' operator. In a column context, produces the clause ``a || b``, or uses the ``concat()`` operator on MySQL. """ return self.operate(concat_op, other) def like(self, other, escape=None): """Implement the ``like`` operator. In a column context, produces the clause ``a LIKE other``. E.g.:: select([sometable]).where(sometable.c.column.like("%foobar%")) :param other: expression to be compared :param escape: optional escape character, renders the ``ESCAPE`` keyword, e.g.:: somecolumn.like("foo/%bar", escape="/") .. seealso:: :meth:`.ColumnOperators.ilike` """ return self.operate(like_op, other, escape=escape) def ilike(self, other, escape=None): """Implement the ``ilike`` operator. In a column context, produces the clause ``a ILIKE other``. E.g.:: select([sometable]).where(sometable.c.column.ilike("%foobar%")) :param other: expression to be compared :param escape: optional escape character, renders the ``ESCAPE`` keyword, e.g.:: somecolumn.ilike("foo/%bar", escape="/") .. seealso:: :meth:`.ColumnOperators.like` """ return self.operate(ilike_op, other, escape=escape) def in_(self, other): """Implement the ``in`` operator. In a column context, produces the clause ``a IN other``. "other" may be a tuple/list of column expressions, or a :func:`~.expression.select` construct. """ return self.operate(in_op, other) def notin_(self, other): """implement the ``NOT IN`` operator. This is equivalent to using negation with :meth:`.ColumnOperators.in_`, i.e. ``~x.in_(y)``. .. versionadded:: 0.8 .. seealso:: :meth:`.ColumnOperators.in_` """ return self.operate(notin_op, other) def notlike(self, other, escape=None): """implement the ``NOT LIKE`` operator. This is equivalent to using negation with :meth:`.ColumnOperators.like`, i.e. ``~x.like(y)``. .. versionadded:: 0.8 .. seealso:: :meth:`.ColumnOperators.like` """ return self.operate(notlike_op, other, escape=escape) def notilike(self, other, escape=None): """implement the ``NOT ILIKE`` operator. This is equivalent to using negation with :meth:`.ColumnOperators.ilike`, i.e. ``~x.ilike(y)``. .. versionadded:: 0.8 .. seealso:: :meth:`.ColumnOperators.ilike` """ return self.operate(notilike_op, other, escape=escape) def is_(self, other): """Implement the ``IS`` operator. Normally, ``IS`` is generated automatically when comparing to a value of ``None``, which resolves to ``NULL``. However, explicit usage of ``IS`` may be desirable if comparing to boolean values on certain platforms. .. versionadded:: 0.7.9 .. seealso:: :meth:`.ColumnOperators.isnot` """ return self.operate(is_, other) def isnot(self, other): """Implement the ``IS NOT`` operator. Normally, ``IS NOT`` is generated automatically when comparing to a value of ``None``, which resolves to ``NULL``. However, explicit usage of ``IS NOT`` may be desirable if comparing to boolean values on certain platforms. .. versionadded:: 0.7.9 .. seealso:: :meth:`.ColumnOperators.is_` """ return self.operate(isnot, other) def startswith(self, other, **kwargs): """Implement the ``startwith`` operator. In a column context, produces the clause ``LIKE '<other>%'`` """ return self.operate(startswith_op, other, **kwargs) def endswith(self, other, **kwargs): """Implement the 'endswith' operator. In a column context, produces the clause ``LIKE '%<other>'`` """ return self.operate(endswith_op, other, **kwargs) def contains(self, other, **kwargs): """Implement the 'contains' operator. In a column context, produces the clause ``LIKE '%<other>%'`` """ return self.operate(contains_op, other, **kwargs) def match(self, other, **kwargs): """Implements a database-specific 'match' operator. :meth:`~.ColumnOperators.match` attempts to resolve to a MATCH-like function or operator provided by the backend. Examples include: * Postgresql - renders ``x @@ to_tsquery(y)`` * MySQL - renders ``MATCH (x) AGAINST (y IN BOOLEAN MODE)`` * Oracle - renders ``CONTAINS(x, y)`` * other backends may provide special implementations. * Backends without any special implementation will emit the operator as "MATCH". This is compatible with SQlite, for example. """ return self.operate(match_op, other, **kwargs) def desc(self): """Produce a :func:`~.expression.desc` clause against the parent object.""" return self.operate(desc_op) def asc(self): """Produce a :func:`~.expression.asc` clause against the parent object.""" return self.operate(asc_op) def nullsfirst(self): """Produce a :func:`~.expression.nullsfirst` clause against the parent object.""" return self.operate(nullsfirst_op) def nullslast(self): """Produce a :func:`~.expression.nullslast` clause against the parent object.""" return self.operate(nullslast_op) def collate(self, collation): """Produce a :func:`~.expression.collate` clause against the parent object, given the collation string.""" return self.operate(collate, collation) def __radd__(self, other): """Implement the ``+`` operator in reverse. See :meth:`.ColumnOperators.__add__`. """ return self.reverse_operate(add, other) def __rsub__(self, other): """Implement the ``-`` operator in reverse. See :meth:`.ColumnOperators.__sub__`. """ return self.reverse_operate(sub, other) def __rmul__(self, other): """Implement the ``*`` operator in reverse. See :meth:`.ColumnOperators.__mul__`. """ return self.reverse_operate(mul, other) def __rdiv__(self, other): """Implement the ``/`` operator in reverse. See :meth:`.ColumnOperators.__div__`. """ return self.reverse_operate(div, other) def __rmod__(self, other): """Implement the ``%`` operator in reverse. See :meth:`.ColumnOperators.__mod__`. """ return self.reverse_operate(mod, other) def between(self, cleft, cright, symmetric=False): """Produce a :func:`~.expression.between` clause against the parent object, given the lower and upper range. """ return self.operate(between_op, cleft, cright, symmetric=symmetric) def distinct(self): """Produce a :func:`~.expression.distinct` clause against the parent object. """ return self.operate(distinct_op) def __add__(self, other): """Implement the ``+`` operator. In a column context, produces the clause ``a + b`` if the parent object has non-string affinity. If the parent object has a string affinity, produces the concatenation operator, ``a || b`` - see :meth:`.ColumnOperators.concat`. """ return self.operate(add, other) def __sub__(self, other): """Implement the ``-`` operator. In a column context, produces the clause ``a - b``. """ return self.operate(sub, other) def __mul__(self, other): """Implement the ``*`` operator. In a column context, produces the clause ``a * b``. """ return self.operate(mul, other) def __div__(self, other): """Implement the ``/`` operator. In a column context, produces the clause ``a / b``. """ return self.operate(div, other) def __mod__(self, other): """Implement the ``%`` operator. In a column context, produces the clause ``a % b``. """ return self.operate(mod, other) def __truediv__(self, other): """Implement the ``//`` operator. In a column context, produces the clause ``a / b``. """ return self.operate(truediv, other) def __rtruediv__(self, other): """Implement the ``//`` operator in reverse. See :meth:`.ColumnOperators.__truediv__`. """ return self.reverse_operate(truediv, other) def from_(): raise NotImplementedError() def as_(): raise NotImplementedError() def exists(): raise NotImplementedError() def istrue(a): raise NotImplementedError() def isfalse(a): raise NotImplementedError() def is_(a, b): return a.is_(b) def isnot(a, b): return a.isnot(b) def collate(a, b): return a.collate(b) def op(a, opstring, b): return a.op(opstring)(b) def like_op(a, b, escape=None): return a.like(b, escape=escape) def notlike_op(a, b, escape=None): return a.notlike(b, escape=escape) def ilike_op(a, b, escape=None): return a.ilike(b, escape=escape) def notilike_op(a, b, escape=None): return a.notilike(b, escape=escape) def between_op(a, b, c, symmetric=False): return a.between(b, c, symmetric=symmetric) def notbetween_op(a, b, c, symmetric=False): return a.notbetween(b, c, symmetric=symmetric) def in_op(a, b): return a.in_(b) def notin_op(a, b): return a.notin_(b) def distinct_op(a): return a.distinct() def startswith_op(a, b, escape=None): return a.startswith(b, escape=escape) def notstartswith_op(a, b, escape=None): return ~a.startswith(b, escape=escape) def endswith_op(a, b, escape=None): return a.endswith(b, escape=escape) def notendswith_op(a, b, escape=None): return ~a.endswith(b, escape=escape) def contains_op(a, b, escape=None): return a.contains(b, escape=escape) def notcontains_op(a, b, escape=None): return ~a.contains(b, escape=escape) def match_op(a, b, **kw): return a.match(b, **kw) def notmatch_op(a, b, **kw): return a.notmatch(b, **kw) def comma_op(a, b): raise NotImplementedError() def concat_op(a, b): return a.concat(b) def desc_op(a): return a.desc() def asc_op(a): return a.asc() def nullsfirst_op(a): return a.nullsfirst() def nullslast_op(a): return a.nullslast() _commutative = set([eq, ne, add, mul]) _comparison = set([eq, ne, lt, gt, ge, le, between_op, like_op]) def is_comparison(op): return op in _comparison or \ isinstance(op, custom_op) and op.is_comparison def is_commutative(op): return op in _commutative def is_ordering_modifier(op): return op in (asc_op, desc_op, nullsfirst_op, nullslast_op) _associative = _commutative.union([concat_op, and_, or_]) _natural_self_precedent = _associative.union([getitem]) """Operators where if we have (a op b) op c, we don't want to parenthesize (a op b). """ _asbool = util.symbol('_asbool', canonical=-10) _smallest = util.symbol('_smallest', canonical=-100) _largest = util.symbol('_largest', canonical=100) _PRECEDENCE = { from_: 15, getitem: 15, mul: 8, truediv: 8, div: 8, mod: 8, neg: 8, add: 7, sub: 7, concat_op: 6, match_op: 6, notmatch_op: 6, ilike_op: 6, notilike_op: 6, like_op: 6, notlike_op: 6, in_op: 6, notin_op: 6, is_: 6, isnot: 6, eq: 5, ne: 5, gt: 5, lt: 5, ge: 5, le: 5, between_op: 5, notbetween_op: 5, distinct_op: 5, inv: 5, istrue: 5, isfalse: 5, and_: 3, or_: 2, comma_op: -1, desc_op: 3, asc_op: 3, collate: 4, as_: -1, exists: 0, _asbool: -10, _smallest: _smallest, _largest: _largest } def is_precedent(operator, against): if operator is against and operator in _natural_self_precedent: return False else: return (_PRECEDENCE.get(operator, getattr(operator, 'precedence', _smallest)) <= _PRECEDENCE.get(against, getattr(against, 'precedence', _largest)))
mit
xiangel/hue
desktop/core/ext-py/pysaml2-2.4.0/example/idp2/idp_uwsgi.py
29
36330
#!/usr/bin/env python import importlib import argparse import base64 import re import logging import time from hashlib import sha1 from urlparse import parse_qs from Cookie import SimpleCookie import os from saml2.authn import is_equal from saml2.profile import ecp from saml2 import server from saml2 import BINDING_HTTP_ARTIFACT from saml2 import BINDING_URI from saml2 import BINDING_PAOS from saml2 import BINDING_SOAP from saml2 import BINDING_HTTP_REDIRECT from saml2 import BINDING_HTTP_POST from saml2 import time_util from saml2.authn_context import AuthnBroker from saml2.authn_context import PASSWORD from saml2.authn_context import UNSPECIFIED from saml2.authn_context import authn_context_class_ref from saml2.httputil import Response from saml2.httputil import NotFound from saml2.httputil import geturl from saml2.httputil import get_post from saml2.httputil import Redirect from saml2.httputil import Unauthorized from saml2.httputil import BadRequest from saml2.httputil import ServiceError from saml2.ident import Unknown from saml2.metadata import create_metadata_string from saml2.s_utils import rndstr from saml2.s_utils import exception_trace from saml2.s_utils import UnknownPrincipal from saml2.s_utils import UnsupportedBinding from saml2.s_utils import PolicyError from saml2.sigver import verify_redirect_signature from saml2.sigver import encrypt_cert_from_item logger = logging.getLogger("saml2.idp") class Cache(object): def __init__(self): self.user2uid = {} self.uid2user = {} def _expiration(timeout, tformat="%a, %d-%b-%Y %H:%M:%S GMT"): """ :param timeout: :param tformat: :return: """ if timeout == "now": return time_util.instant(tformat) elif timeout == "dawn": return time.strftime(tformat, time.gmtime(0)) else: # validity time should match lifetime of assertions return time_util.in_a_while(minutes=timeout, format=tformat) def get_eptid(idp, req_info, session): return idp.eptid.get(idp.config.entityid, req_info.sender(), session["permanent_id"], session["authn_auth"]) # ----------------------------------------------------------------------------- def dict2list_of_tuples(d): return [(k, v) for k, v in d.items()] # ----------------------------------------------------------------------------- class Service(object): def __init__(self, environ, start_response, user=None): self.environ = environ logger.debug("ENVIRON: %s" % environ) self.start_response = start_response self.user = user def unpack_redirect(self): if "QUERY_STRING" in self.environ: _qs = self.environ["QUERY_STRING"] return dict([(k, v[0]) for k, v in parse_qs(_qs).items()]) else: return None def unpack_post(self): _dict = parse_qs(get_post(self.environ)) logger.debug("unpack_post:: %s" % _dict) try: return dict([(k, v[0]) for k, v in _dict.items()]) except Exception: return None def unpack_soap(self): try: query = get_post(self.environ) return {"SAMLRequest": query, "RelayState": ""} except Exception: return None def unpack_either(self): if self.environ["REQUEST_METHOD"] == "GET": _dict = self.unpack_redirect() elif self.environ["REQUEST_METHOD"] == "POST": _dict = self.unpack_post() else: _dict = None logger.debug("_dict: %s" % _dict) return _dict def operation(self, saml_msg, binding): logger.debug("_operation: %s" % saml_msg) if not saml_msg or not 'SAMLRequest' in saml_msg: resp = BadRequest('Error parsing request or no request') return resp(self.environ, self.start_response) else: try: _encrypt_cert = encrypt_cert_from_item( saml_msg["req_info"].message) return self.do(saml_msg["SAMLRequest"], binding, saml_msg["RelayState"], encrypt_cert=_encrypt_cert) except KeyError: # Can live with no relay state return self.do(saml_msg["SAMLRequest"], binding) def artifact_operation(self, saml_msg): if not saml_msg: resp = BadRequest("Missing query") return resp(self.environ, self.start_response) else: # exchange artifact for request request = IDP.artifact2message(saml_msg["SAMLart"], "spsso") try: return self.do(request, BINDING_HTTP_ARTIFACT, saml_msg["RelayState"]) except KeyError: return self.do(request, BINDING_HTTP_ARTIFACT) def response(self, binding, http_args): if binding == BINDING_HTTP_ARTIFACT: resp = Redirect() else: resp = Response(http_args["data"], headers=http_args["headers"]) return resp(self.environ, self.start_response) def do(self, query, binding, relay_state="", encrypt_cert=None): pass def redirect(self): """ Expects a HTTP-redirect request """ _dict = self.unpack_redirect() return self.operation(_dict, BINDING_HTTP_REDIRECT) def post(self): """ Expects a HTTP-POST request """ _dict = self.unpack_post() return self.operation(_dict, BINDING_HTTP_POST) def artifact(self): # Can be either by HTTP_Redirect or HTTP_POST _dict = self.unpack_either() return self.artifact_operation(_dict) def soap(self): """ Single log out using HTTP_SOAP binding """ logger.debug("- SOAP -") _dict = self.unpack_soap() logger.debug("_dict: %s" % _dict) return self.operation(_dict, BINDING_SOAP) def uri(self): _dict = self.unpack_either() return self.operation(_dict, BINDING_SOAP) def not_authn(self, key, requested_authn_context): ruri = geturl(self.environ, query=False) return do_authentication(self.environ, self.start_response, authn_context=requested_authn_context, key=key, redirect_uri=ruri) # ----------------------------------------------------------------------------- REPOZE_ID_EQUIVALENT = "uid" FORM_SPEC = """<form name="myform" method="post" action="%s"> <input type="hidden" name="SAMLResponse" value="%s" /> <input type="hidden" name="RelayState" value="%s" /> </form>""" # ----------------------------------------------------------------------------- # === Single log in ==== # ----------------------------------------------------------------------------- class AuthenticationNeeded(Exception): def __init__(self, authn_context=None, *args, **kwargs): Exception.__init__(*args, **kwargs) self.authn_context = authn_context class SSO(Service): def __init__(self, environ, start_response, user=None): Service.__init__(self, environ, start_response, user) self.binding = "" self.response_bindings = None self.resp_args = {} self.binding_out = None self.destination = None self.req_info = None self.op_type = "" def verify_request(self, query, binding): """ :param query: The SAML query, transport encoded :param binding: Which binding the query came in over """ resp_args = {} if not query: logger.info("Missing QUERY") resp = Unauthorized('Unknown user') return resp_args, resp(self.environ, self.start_response) if not self.req_info: self.req_info = IDP.parse_authn_request(query, binding) logger.info("parsed OK") _authn_req = self.req_info.message logger.debug("%s" % _authn_req) try: self.binding_out, self.destination = IDP.pick_binding( "assertion_consumer_service", bindings=self.response_bindings, entity_id=_authn_req.issuer.text) except Exception as err: logger.error("Couldn't find receiver endpoint: %s" % err) raise logger.debug("Binding: %s, destination: %s" % (self.binding_out, self.destination)) resp_args = {} try: resp_args = IDP.response_args(_authn_req) _resp = None except UnknownPrincipal as excp: _resp = IDP.create_error_response(_authn_req.id, self.destination, excp) except UnsupportedBinding as excp: _resp = IDP.create_error_response(_authn_req.id, self.destination, excp) return resp_args, _resp def do(self, query, binding_in, relay_state="", encrypt_cert=None): """ :param query: The request :param binding_in: Which binding was used when receiving the query :param relay_state: The relay state provided by the SP :param encrypt_cert: Cert to use for encryption :return: A response """ try: resp_args, _resp = self.verify_request(query, binding_in) except UnknownPrincipal as excp: logger.error("UnknownPrincipal: %s" % (excp,)) resp = ServiceError("UnknownPrincipal: %s" % (excp,)) return resp(self.environ, self.start_response) except UnsupportedBinding as excp: logger.error("UnsupportedBinding: %s" % (excp,)) resp = ServiceError("UnsupportedBinding: %s" % (excp,)) return resp(self.environ, self.start_response) if not _resp: identity = USERS[self.user].copy() # identity["eduPersonTargetedID"] = get_eptid(IDP, query, session) logger.info("Identity: %s" % (identity,)) if REPOZE_ID_EQUIVALENT: identity[REPOZE_ID_EQUIVALENT] = self.user try: try: metod = self.environ["idp.authn"] except KeyError: pass else: resp_args["authn"] = metod _resp = IDP.create_authn_response( identity, userid=self.user, encrypt_cert=encrypt_cert, **resp_args) except Exception as excp: logging.error(exception_trace(excp)) resp = ServiceError("Exception: %s" % (excp,)) return resp(self.environ, self.start_response) logger.info("AuthNResponse: %s" % _resp) if self.op_type == "ecp": kwargs = {"soap_headers": [ ecp.Response( assertion_consumer_service_url=self.destination)]} else: kwargs = {} http_args = IDP.apply_binding(self.binding_out, "%s" % _resp, self.destination, relay_state, response=True, **kwargs) logger.debug("HTTPargs: %s" % http_args) return self.response(self.binding_out, http_args) def _store_request(self, saml_msg): logger.debug("_store_request: %s" % saml_msg) key = sha1(saml_msg["SAMLRequest"]).hexdigest() # store the AuthnRequest IDP.ticket[key] = saml_msg return key def redirect(self): """ This is the HTTP-redirect endpoint """ logger.info("--- In SSO Redirect ---") saml_msg = self.unpack_redirect() try: _key = saml_msg["key"] saml_msg = IDP.ticket[_key] self.req_info = saml_msg["req_info"] del IDP.ticket[_key] except KeyError: try: self.req_info = IDP.parse_authn_request(saml_msg["SAMLRequest"], BINDING_HTTP_REDIRECT) except KeyError: resp = BadRequest("Message signature verification failure") return resp(self.environ, self.start_response) _req = self.req_info.message if "SigAlg" in saml_msg and "Signature" in saml_msg: # Signed # request issuer = _req.issuer.text _certs = IDP.metadata.certs(issuer, "any", "signing") verified_ok = False for cert in _certs: if verify_redirect_signature(saml_msg, cert): verified_ok = True break if not verified_ok: resp = BadRequest("Message signature verification failure") return resp(self.environ, self.start_response) if self.user: if _req.force_authn: saml_msg["req_info"] = self.req_info key = self._store_request(saml_msg) return self.not_authn(key, _req.requested_authn_context) else: return self.operation(saml_msg, BINDING_HTTP_REDIRECT) else: saml_msg["req_info"] = self.req_info key = self._store_request(saml_msg) return self.not_authn(key, _req.requested_authn_context) else: return self.operation(saml_msg, BINDING_HTTP_REDIRECT) def post(self): """ The HTTP-Post endpoint """ logger.info("--- In SSO POST ---") saml_msg = self.unpack_either() self.req_info = IDP.parse_authn_request( saml_msg["SAMLRequest"], BINDING_HTTP_POST) _req = self.req_info.message if self.user: if _req.force_authn: saml_msg["req_info"] = self.req_info key = self._store_request(saml_msg) return self.not_authn(key, _req.requested_authn_context) else: return self.operation(saml_msg, BINDING_HTTP_POST) else: saml_msg["req_info"] = self.req_info key = self._store_request(saml_msg) return self.not_authn(key, _req.requested_authn_context) # def artifact(self): # # Can be either by HTTP_Redirect or HTTP_POST # _req = self._store_request(self.unpack_either()) # if isinstance(_req, basestring): # return self.not_authn(_req) # return self.artifact_operation(_req) def ecp(self): # The ECP interface logger.info("--- ECP SSO ---") resp = None try: authz_info = self.environ["HTTP_AUTHORIZATION"] if authz_info.startswith("Basic "): try: _info = base64.b64decode(authz_info[6:]) except TypeError: resp = Unauthorized() else: try: (user, passwd) = _info.split(":") if is_equal(PASSWD[user], passwd): resp = Unauthorized() self.user = user self.environ[ "idp.authn"] = AUTHN_BROKER.get_authn_by_accr( PASSWORD) except ValueError: resp = Unauthorized() else: resp = Unauthorized() except KeyError: resp = Unauthorized() if resp: return resp(self.environ, self.start_response) _dict = self.unpack_soap() self.response_bindings = [BINDING_PAOS] # Basic auth ?! self.op_type = "ecp" return self.operation(_dict, BINDING_SOAP) # ----------------------------------------------------------------------------- # === Authentication ==== # ----------------------------------------------------------------------------- def do_authentication(environ, start_response, authn_context, key, redirect_uri): """ Display the login form """ logger.debug("Do authentication") auth_info = AUTHN_BROKER.pick(authn_context) if len(auth_info): method, reference = auth_info[0] logger.debug("Authn chosen: %s (ref=%s)" % (method, reference)) return method(environ, start_response, reference, key, redirect_uri) else: resp = Unauthorized("No usable authentication method") return resp(environ, start_response) # ----------------------------------------------------------------------------- PASSWD = { "daev0001": "qwerty", "haho0032": "qwerty", "roland": "dianakra", "babs": "howes", "upper": "crust"} def username_password_authn(environ, start_response, reference, key, redirect_uri): """ Display the login form """ logger.info("The login page") headers = [] resp = Response(mako_template="login.mako", template_lookup=LOOKUP, headers=headers) argv = { "action": "/verify", "login": "", "password": "", "key": key, "authn_reference": reference, "redirect_uri": redirect_uri } logger.info("do_authentication argv: %s" % argv) return resp(environ, start_response, **argv) def verify_username_and_password(dic): global PASSWD # verify username and password if PASSWD[dic["login"][0]] == dic["password"][0]: return True, dic["login"][0] else: return False, "" def do_verify(environ, start_response, _): query = parse_qs(get_post(environ)) logger.debug("do_verify: %s" % query) try: _ok, user = verify_username_and_password(query) except KeyError: _ok = False user = None if not _ok: resp = Unauthorized("Unknown user or wrong password") else: uid = rndstr(24) IDP.cache.uid2user[uid] = user IDP.cache.user2uid[user] = uid logger.debug("Register %s under '%s'" % (user, uid)) kaka = set_cookie("idpauthn", "/", uid, query["authn_reference"][0]) lox = "%s?id=%s&key=%s" % (query["redirect_uri"][0], uid, query["key"][0]) logger.debug("Redirect => %s" % lox) resp = Redirect(lox, headers=[kaka], content="text/html") return resp(environ, start_response) def not_found(environ, start_response): """Called if no URL matches.""" resp = NotFound() return resp(environ, start_response) # ----------------------------------------------------------------------------- # === Single log out === # ----------------------------------------------------------------------------- # def _subject_sp_info(req_info): # # look for the subject # subject = req_info.subject_id() # subject = subject.text.strip() # sp_entity_id = req_info.message.issuer.text.strip() # return subject, sp_entity_id class SLO(Service): def do(self, request, binding, relay_state="", encrypt_cert=None): logger.info("--- Single Log Out Service ---") try: _, body = request.split("\n") logger.debug("req: '%s'" % body) req_info = IDP.parse_logout_request(body, binding) except Exception as exc: logger.error("Bad request: %s" % exc) resp = BadRequest("%s" % exc) return resp(self.environ, self.start_response) msg = req_info.message if msg.name_id: lid = IDP.ident.find_local_id(msg.name_id) logger.info("local identifier: %s" % lid) if lid in IDP.cache.user2uid: uid = IDP.cache.user2uid[lid] if uid in IDP.cache.uid2user: del IDP.cache.uid2user[uid] del IDP.cache.user2uid[lid] # remove the authentication try: IDP.session_db.remove_authn_statements(msg.name_id) except KeyError as exc: logger.error("ServiceError: %s" % exc) resp = ServiceError("%s" % exc) return resp(self.environ, self.start_response) resp = IDP.create_logout_response(msg, [binding]) try: hinfo = IDP.apply_binding(binding, "%s" % resp, "", relay_state) except Exception as exc: logger.error("ServiceError: %s" % exc) resp = ServiceError("%s" % exc) return resp(self.environ, self.start_response) #_tlh = dict2list_of_tuples(hinfo["headers"]) delco = delete_cookie(self.environ, "idpauthn") if delco: hinfo["headers"].append(delco) logger.info("Header: %s" % (hinfo["headers"],)) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # Manage Name ID service # ---------------------------------------------------------------------------- class NMI(Service): def do(self, query, binding, relay_state="", encrypt_cert=None): logger.info("--- Manage Name ID Service ---") req = IDP.parse_manage_name_id_request(query, binding) request = req.message # Do the necessary stuff name_id = IDP.ident.handle_manage_name_id_request( request.name_id, request.new_id, request.new_encrypted_id, request.terminate) logger.debug("New NameID: %s" % name_id) _resp = IDP.create_manage_name_id_response(request) # It's using SOAP binding hinfo = IDP.apply_binding(BINDING_SOAP, "%s" % _resp, "", relay_state, response=True) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # === Assertion ID request === # ---------------------------------------------------------------------------- # Only URI binding class AIDR(Service): def do(self, aid, binding, relay_state="", encrypt_cert=None): logger.info("--- Assertion ID Service ---") try: assertion = IDP.create_assertion_id_request_response(aid) except Unknown: resp = NotFound(aid) return resp(self.environ, self.start_response) hinfo = IDP.apply_binding(BINDING_URI, "%s" % assertion, response=True) logger.debug("HINFO: %s" % hinfo) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) def operation(self, _dict, binding, **kwargs): logger.debug("_operation: %s" % _dict) if not _dict or "ID" not in _dict: resp = BadRequest('Error parsing request or no request') return resp(self.environ, self.start_response) return self.do(_dict["ID"], binding, **kwargs) # ---------------------------------------------------------------------------- # === Artifact resolve service === # ---------------------------------------------------------------------------- class ARS(Service): def do(self, request, binding, relay_state="", encrypt_cert=None): _req = IDP.parse_artifact_resolve(request, binding) msg = IDP.create_artifact_response(_req, _req.artifact.text) hinfo = IDP.apply_binding(BINDING_SOAP, "%s" % msg, "", "", response=True) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # === Authn query service === # ---------------------------------------------------------------------------- # Only SOAP binding class AQS(Service): def do(self, request, binding, relay_state="", encrypt_cert=None): logger.info("--- Authn Query Service ---") _req = IDP.parse_authn_query(request, binding) _query = _req.message msg = IDP.create_authn_query_response(_query.subject, _query.requested_authn_context, _query.session_index) logger.debug("response: %s" % msg) hinfo = IDP.apply_binding(BINDING_SOAP, "%s" % msg, "", "", response=True) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # === Attribute query service === # ---------------------------------------------------------------------------- # Only SOAP binding class ATTR(Service): def do(self, request, binding, relay_state="", encrypt_cert=None): logger.info("--- Attribute Query Service ---") _req = IDP.parse_attribute_query(request, binding) _query = _req.message name_id = _query.subject.name_id uid = name_id.text logger.debug("Local uid: %s" % uid) identity = EXTRA[self.user] # Comes in over SOAP so only need to construct the response args = IDP.response_args(_query, [BINDING_SOAP]) msg = IDP.create_attribute_response(identity, name_id=name_id, **args) logger.debug("response: %s" % msg) hinfo = IDP.apply_binding(BINDING_SOAP, "%s" % msg, "", "", response=True) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # Name ID Mapping service # When an entity that shares an identifier for a principal with an identity # provider wishes to obtain a name identifier for the same principal in a # particular format or federation namespace, it can send a request to # the identity provider using this protocol. # ---------------------------------------------------------------------------- class NIM(Service): def do(self, query, binding, relay_state="", encrypt_cert=None): req = IDP.parse_name_id_mapping_request(query, binding) request = req.message # Do the necessary stuff try: name_id = IDP.ident.handle_name_id_mapping_request( request.name_id, request.name_id_policy) except Unknown: resp = BadRequest("Unknown entity") return resp(self.environ, self.start_response) except PolicyError: resp = BadRequest("Unknown entity") return resp(self.environ, self.start_response) info = IDP.response_args(request) _resp = IDP.create_name_id_mapping_response(name_id, **info) # Only SOAP hinfo = IDP.apply_binding(BINDING_SOAP, "%s" % _resp, "", "", response=True) resp = Response(hinfo["data"], headers=hinfo["headers"]) return resp(self.environ, self.start_response) # ---------------------------------------------------------------------------- # Cookie handling # ---------------------------------------------------------------------------- def info_from_cookie(kaka): logger.debug("KAKA: %s" % kaka) if kaka: cookie_obj = SimpleCookie(kaka) morsel = cookie_obj.get("idpauthn", None) if morsel: try: key, ref = base64.b64decode(morsel.value).split(":") return IDP.cache.uid2user[key], ref except (TypeError, KeyError): return None, None else: logger.debug("No idpauthn cookie") return None, None def delete_cookie(environ, name): kaka = environ.get("HTTP_COOKIE", '') logger.debug("delete KAKA: %s" % kaka) if kaka: cookie_obj = SimpleCookie(kaka) morsel = cookie_obj.get(name, None) cookie = SimpleCookie() cookie[name] = "" cookie[name]['path'] = "/" logger.debug("Expire: %s" % morsel) cookie[name]["expires"] = _expiration("dawn") return tuple(cookie.output().split(": ", 1)) return None def set_cookie(name, _, *args): cookie = SimpleCookie() cookie[name] = base64.b64encode(":".join(args)) cookie[name]['path'] = "/" cookie[name]["expires"] = _expiration(5) # 5 minutes from now logger.debug("Cookie expires: %s" % cookie[name]["expires"]) return tuple(cookie.output().split(": ", 1)) # ---------------------------------------------------------------------------- # map urls to functions AUTHN_URLS = [ # sso (r'sso/post$', (SSO, "post")), (r'sso/post/(.*)$', (SSO, "post")), (r'sso/redirect$', (SSO, "redirect")), (r'sso/redirect/(.*)$', (SSO, "redirect")), (r'sso/art$', (SSO, "artifact")), (r'sso/art/(.*)$', (SSO, "artifact")), # slo (r'slo/redirect$', (SLO, "redirect")), (r'slo/redirect/(.*)$', (SLO, "redirect")), (r'slo/post$', (SLO, "post")), (r'slo/post/(.*)$', (SLO, "post")), (r'slo/soap$', (SLO, "soap")), (r'slo/soap/(.*)$', (SLO, "soap")), # (r'airs$', (AIDR, "uri")), (r'ars$', (ARS, "soap")), # mni (r'mni/post$', (NMI, "post")), (r'mni/post/(.*)$', (NMI, "post")), (r'mni/redirect$', (NMI, "redirect")), (r'mni/redirect/(.*)$', (NMI, "redirect")), (r'mni/art$', (NMI, "artifact")), (r'mni/art/(.*)$', (NMI, "artifact")), (r'mni/soap$', (NMI, "soap")), (r'mni/soap/(.*)$', (NMI, "soap")), # nim (r'nim$', (NIM, "soap")), (r'nim/(.*)$', (NIM, "soap")), # (r'aqs$', (AQS, "soap")), (r'attr$', (ATTR, "soap")) ] NON_AUTHN_URLS = [ #(r'login?(.*)$', do_authentication), (r'verify?(.*)$', do_verify), (r'sso/ecp$', (SSO, "ecp")), ] # ---------------------------------------------------------------------------- def metadata(environ, start_response): try: path = args.path if path is None or len(path) == 0: path = os.path.dirname(os.path.abspath(__file__)) if path[-1] != "/": path += "/" metadata = create_metadata_string(path + args.config, IDP.config, args.valid, args.cert, args.keyfile, args.id, args.name, args.sign) start_response('200 OK', [('Content-Type', "text/xml")]) return metadata except Exception as ex: logger.error("An error occured while creating metadata:" + ex.message) return not_found(environ, start_response) def staticfile(environ, start_response): try: path = args.path if path is None or len(path) == 0: path = os.path.dirname(os.path.abspath(__file__)) if path[-1] != "/": path += "/" path += environ.get('PATH_INFO', '').lstrip('/') path = os.path.realpath(path) if not path.startswith(args.path): resp = Unauthorized() return resp(environ, start_response) start_response('200 OK', [('Content-Type', "text/xml")]) return open(path, 'r').read() except Exception as ex: logger.error("An error occured while creating metadata:" + ex.message) return not_found(environ, start_response) def application(environ, start_response): """ The main WSGI application. Dispatch the current request to the functions from above and store the regular expression captures in the WSGI environment as `myapp.url_args` so that the functions from above can access the url placeholders. If nothing matches, call the `not_found` function. :param environ: The HTTP application environment :param start_response: The application to run when the handling of the request is done :return: The response as a list of lines """ path = environ.get('PATH_INFO', '').lstrip('/') if path == "metadata": return metadata(environ, start_response) kaka = environ.get("HTTP_COOKIE", None) logger.info("<application> PATH: %s" % path) if kaka: logger.info("= KAKA =") user, authn_ref = info_from_cookie(kaka) if authn_ref: environ["idp.authn"] = AUTHN_BROKER[authn_ref] else: try: query = parse_qs(environ["QUERY_STRING"]) logger.debug("QUERY: %s" % query) user = IDP.cache.uid2user[query["id"][0]] except KeyError: user = None url_patterns = AUTHN_URLS if not user: logger.info("-- No USER --") # insert NON_AUTHN_URLS first in case there is no user url_patterns = NON_AUTHN_URLS + url_patterns for regex, callback in url_patterns: match = re.search(regex, path) if match is not None: try: environ['myapp.url_args'] = match.groups()[0] except IndexError: environ['myapp.url_args'] = path logger.debug("Callback: %s" % (callback,)) if isinstance(callback, tuple): cls = callback[0](environ, start_response, user) func = getattr(cls, callback[1]) return func() return callback(environ, start_response, user) if re.search(r'static/.*', path) is not None: return staticfile(environ, start_response) return not_found(environ, start_response) # ---------------------------------------------------------------------------- # allow uwsgi or gunicorn mount # by moving some initialization out of __name__ == '__main__' section. # uwsgi -s 0.0.0.0:8088 --protocol http --callable application --module idp args = type('Config', (object,), {}) args.config = 'idp_conf' args.mako_root = './' args.path = None import socket from idp_user import USERS from idp_user import EXTRA from mako.lookup import TemplateLookup AUTHN_BROKER = AuthnBroker() AUTHN_BROKER.add(authn_context_class_ref(PASSWORD), username_password_authn, 10, "http://%s" % socket.gethostname()) AUTHN_BROKER.add(authn_context_class_ref(UNSPECIFIED), "", 0, "http://%s" % socket.gethostname()) CONFIG = importlib.import_module(args.config) IDP = server.Server(args.config, cache=Cache()) IDP.ticket = {} # ---------------------------------------------------------------------------- if __name__ == '__main__': from wsgiref.simple_server import make_server parser = argparse.ArgumentParser() parser.add_argument('-p', dest='path', help='Path to configuration file.') parser.add_argument('-v', dest='valid', help="How long, in days, the metadata is valid from " "the time of creation") parser.add_argument('-c', dest='cert', help='certificate') parser.add_argument('-i', dest='id', help="The ID of the entities descriptor") parser.add_argument('-k', dest='keyfile', help="A file with a key to sign the metadata with") parser.add_argument('-n', dest='name') parser.add_argument('-s', dest='sign', action='store_true', help="sign the metadata") parser.add_argument('-m', dest='mako_root', default="./") parser.add_argument(dest="config") args = parser.parse_args() _rot = args.mako_root LOOKUP = TemplateLookup(directories=[_rot + 'templates', _rot + 'htdocs'], module_directory=_rot + 'modules', input_encoding='utf-8', output_encoding='utf-8') HOST = CONFIG.HOST PORT = CONFIG.PORT SRV = make_server(HOST, PORT, application) print "IdP listening on %s:%s" % (HOST, PORT) SRV.serve_forever() else: _rot = args.mako_root LOOKUP = TemplateLookup(directories=[_rot + 'templates', _rot + 'htdocs'], module_directory=_rot + 'modules', input_encoding='utf-8', output_encoding='utf-8')
apache-2.0
BorisJeremic/Real-ESSI-Examples
analytic_solution/test_cases/4NodeANDES/cantilever_different_Poisson/NumberOfDivision1/PoissonRatio0.49/compare_HDF5_ALL.py
424
3382
#!/usr/bin/python import h5py import sys import numpy as np import os import re import random # find the path to my own python function: cur_dir=os.getcwd() sep='test_cases' test_DIR=cur_dir.split(sep,1)[0] scriptDIR=test_DIR+'compare_function' sys.path.append(scriptDIR) # import my own function for color and comparator from mycomparator import * from mycolor_fun import * # the real essi hdf5 results h5_result_new = sys.argv[1] h5_result_ori = sys.argv[2] disp_pass_or_fail=h5diff_disp(h5_result_ori,h5_result_new) Gauss_pass_or_fail = 1 try: Gauss_pass_or_fail=h5diff_Gauss_output(h5_result_ori,h5_result_new) except KeyError: pass Element_Output_pass_or_fail = 1 try: Element_Output_pass_or_fail=h5diff_Element_output(h5_result_ori,h5_result_new) except KeyError: pass if disp_pass_or_fail and Gauss_pass_or_fail and Element_Output_pass_or_fail: print headOK(), "All hdf5 results are the same." print headOKCASE(),"-----------Done this case!-----------------" else: if disp_pass_or_fail==0: print headFailed(),"-----------Displacement has mismatches!-----------------" if Gauss_pass_or_fail==0: print headFailed(),"-----------StressStrain has mismatches!-----------------" if Element_Output_pass_or_fail==0: print headFailed(),"-----------Element output has mismatches!-----------------" # # The allowable tolerance between the ori_vals and new_vals values. # tolerance=1e-5 # machine_epsilon=1e-16 # ori_vals=[] # new_vals=[] # ori_vals.append(find_max_disp(h5_result_ori,0)) # new_vals.append(find_max_disp(h5_result_new,0)) # # if multiple steps, compare the max_disp of random steps # Nstep = find_disp_Nstep(h5_result_ori) # if Nstep>5 : # for i in xrange(1,4): # test_step=random.randint(1,Nstep-1) # ori_vals.append(find_max_disp(h5_result_ori,test_step)) # new_vals.append(find_max_disp(h5_result_new,test_step)) # # calculate the errors # errors=[] # for index, x in enumerate(ori_vals): # if(abs(x))>machine_epsilon: # errors.append(abs((new_vals[index]-x)/x)) # else: # errors.append(machine_epsilon) # # compare and form the flags # flags=[] # for item in errors: # if abs(item)<tolerance: # flags.append('pass') # else: # flags.append('failed') # # print the results # case_flag=1 # print headrun() , "-----------Testing results-----------------" # print headstep() ,'{0} {1} {2} {3}'.format('back_value ','new_value ','error ','flag') # for index, x in enumerate(errors): # if(abs(x)<tolerance): # print headOK() ,'{0:e} {1:e} {2:0.2f} {3}'.format(ori_vals[index],new_vals[index], x, flags[index] ) # else: # case_flag=0 # print headFailed() ,'{0:e} {1:e} {2:0.2f} {3}'.format(ori_vals[index],new_vals[index], x, flags[index] ) # if(case_flag==1): # print headOKCASE(),"-----------Done this case!-----------------" # legacy backup # automatically find the script directory. # sys.path.append("/home/yuan/Dropbox/3essi_self_verification/test_suite/scripts" ) # script_dir=sys.argv[1] # print headstart() , "Running test cases..." # print headlocation(), os.path.dirname(os.path.abspath(__file__)) # file_in=open("ori_vals_values.txt","r") # Input the 1st line, which is the ori_vals value. # ori_vals= float(file_in.readline()) # Input the 2nd line, which is the HDF5 output filename. # new_vals=find_max_disp(file_in.readline()); # file_in.close()
cc0-1.0
jordiclariana/ansible
lib/ansible/modules/system/selinux_permissive.py
48
4345
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Michael Scherer <misc@zarb.org> # inspired by code of github.com/dandiker/ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: selinux_permissive short_description: Change permissive domain in SELinux policy description: - Add and remove domain from the list of permissive domain. version_added: "2.0" options: domain: description: - "the domain that will be added or removed from the list of permissive domains" required: true permissive: description: - "indicate if the domain should or should not be set as permissive" required: true choices: [ 'True', 'False' ] no_reload: description: - "automatically reload the policy after a change" - "default is set to 'false' as that's what most people would want after changing one domain" - "Note that this doesn't work on older version of the library (example EL 6), the module will silently ignore it in this case" required: false default: False choices: [ 'True', 'False' ] store: description: - "name of the SELinux policy store to use" required: false default: null notes: - Requires a version of SELinux recent enough ( ie EL 6 or newer ) requirements: [ policycoreutils-python ] author: Michael Scherer <misc@zarb.org> ''' EXAMPLES = ''' - selinux_permissive: name: httpd_t permissive: true ''' HAVE_SEOBJECT = False try: import seobject HAVE_SEOBJECT = True except ImportError: pass from ansible.module_utils.basic import * from ansible.module_utils.pycompat24 import get_exception def main(): module = AnsibleModule( argument_spec=dict( domain=dict(aliases=['name'], required=True), store=dict(required=False, default=''), permissive=dict(type='bool', required=True), no_reload=dict(type='bool', required=False, default=False), ), supports_check_mode=True ) # global vars changed = False store = module.params['store'] permissive = module.params['permissive'] domain = module.params['domain'] no_reload = module.params['no_reload'] if not HAVE_SEOBJECT: module.fail_json(changed=False, msg="policycoreutils-python required for this module") try: permissive_domains = seobject.permissiveRecords(store) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) # not supported on EL 6 if 'set_reload' in dir(permissive_domains): permissive_domains.set_reload(not no_reload) try: all_domains = permissive_domains.get_all() except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) if permissive: if domain not in all_domains: if not module.check_mode: try: permissive_domains.add(domain) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) changed = True else: if domain in all_domains: if not module.check_mode: try: permissive_domains.delete(domain) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) changed = True module.exit_json(changed=changed, store=store, permissive=permissive, domain=domain) if __name__ == '__main__': main()
gpl-3.0
hankcs/CS224n
assignment2/utils/parser_utils.py
7
15552
"""Utilities for training the dependency parser. You do not need to read/understand this code """ import time import os import logging from collections import Counter from general_utils import logged_loop, get_minibatches from q2_parser_transitions import PartialParse, minibatch_parse import numpy as np P_PREFIX = '<p>:' L_PREFIX = '<l>:' UNK = '<UNK>' NULL = '<NULL>' ROOT = '<ROOT>' class Config(object): language = 'english' with_punct = True unlabeled = True lowercase = True use_pos = True use_dep = True use_dep = use_dep and (not unlabeled) data_path = './data' train_file = 'train.conll' dev_file = 'dev.conll' test_file = 'test.conll' embedding_file = './data/en-cw.txt' class Parser(object): """Contains everything needed for transition-based dependency parsing except for the model""" def __init__(self, dataset): root_labels = list([l for ex in dataset for (h, l) in zip(ex['head'], ex['label']) if h == 0]) counter = Counter(root_labels) if len(counter) > 1: logging.info('Warning: more than one root label') logging.info(counter) self.root_label = counter.most_common()[0][0] deprel = [self.root_label] + list(set([w for ex in dataset for w in ex['label'] if w != self.root_label])) tok2id = {L_PREFIX + l: i for (i, l) in enumerate(deprel)} tok2id[L_PREFIX + NULL] = self.L_NULL = len(tok2id) config = Config() self.unlabeled = config.unlabeled self.with_punct = config.with_punct self.use_pos = config.use_pos self.use_dep = config.use_dep self.language = config.language if self.unlabeled: trans = ['L', 'R', 'S'] self.n_deprel = 1 else: trans = ['L-' + l for l in deprel] + ['R-' + l for l in deprel] + ['S'] self.n_deprel = len(deprel) self.n_trans = len(trans) self.tran2id = {t: i for (i, t) in enumerate(trans)} self.id2tran = {i: t for (i, t) in enumerate(trans)} # logging.info('Build dictionary for part-of-speech tags.') tok2id.update(build_dict([P_PREFIX + w for ex in dataset for w in ex['pos']], offset=len(tok2id))) tok2id[P_PREFIX + UNK] = self.P_UNK = len(tok2id) tok2id[P_PREFIX + NULL] = self.P_NULL = len(tok2id) tok2id[P_PREFIX + ROOT] = self.P_ROOT = len(tok2id) # logging.info('Build dictionary for words.') tok2id.update(build_dict([w for ex in dataset for w in ex['word']], offset=len(tok2id))) tok2id[UNK] = self.UNK = len(tok2id) tok2id[NULL] = self.NULL = len(tok2id) tok2id[ROOT] = self.ROOT = len(tok2id) self.tok2id = tok2id self.id2tok = {v: k for (k, v) in tok2id.items()} self.n_features = 18 + (18 if config.use_pos else 0) + (12 if config.use_dep else 0) self.n_tokens = len(tok2id) def vectorize(self, examples): vec_examples = [] for ex in examples: word = [self.ROOT] + [self.tok2id[w] if w in self.tok2id else self.UNK for w in ex['word']] pos = [self.P_ROOT] + [self.tok2id[P_PREFIX + w] if P_PREFIX + w in self.tok2id else self.P_UNK for w in ex['pos']] head = [-1] + ex['head'] label = [-1] + [self.tok2id[L_PREFIX + w] if L_PREFIX + w in self.tok2id else -1 for w in ex['label']] vec_examples.append({'word': word, 'pos': pos, 'head': head, 'label': label}) return vec_examples def extract_features(self, stack, buf, arcs, ex): if stack[0] == "ROOT": stack[0] = 0 def get_lc(k): return sorted([arc[1] for arc in arcs if arc[0] == k and arc[1] < k]) def get_rc(k): return sorted([arc[1] for arc in arcs if arc[0] == k and arc[1] > k], reverse=True) p_features = [] l_features = [] features = [self.NULL] * (3 - len(stack)) + [ex['word'][x] for x in stack[-3:]] features += [ex['word'][x] for x in buf[:3]] + [self.NULL] * (3 - len(buf)) if self.use_pos: p_features = [self.P_NULL] * (3 - len(stack)) + [ex['pos'][x] for x in stack[-3:]] p_features += [ex['pos'][x] for x in buf[:3]] + [self.P_NULL] * (3 - len(buf)) for i in xrange(2): if i < len(stack): k = stack[-i-1] lc = get_lc(k) rc = get_rc(k) llc = get_lc(lc[0]) if len(lc) > 0 else [] rrc = get_rc(rc[0]) if len(rc) > 0 else [] features.append(ex['word'][lc[0]] if len(lc) > 0 else self.NULL) features.append(ex['word'][rc[0]] if len(rc) > 0 else self.NULL) features.append(ex['word'][lc[1]] if len(lc) > 1 else self.NULL) features.append(ex['word'][rc[1]] if len(rc) > 1 else self.NULL) features.append(ex['word'][llc[0]] if len(llc) > 0 else self.NULL) features.append(ex['word'][rrc[0]] if len(rrc) > 0 else self.NULL) if self.use_pos: p_features.append(ex['pos'][lc[0]] if len(lc) > 0 else self.P_NULL) p_features.append(ex['pos'][rc[0]] if len(rc) > 0 else self.P_NULL) p_features.append(ex['pos'][lc[1]] if len(lc) > 1 else self.P_NULL) p_features.append(ex['pos'][rc[1]] if len(rc) > 1 else self.P_NULL) p_features.append(ex['pos'][llc[0]] if len(llc) > 0 else self.P_NULL) p_features.append(ex['pos'][rrc[0]] if len(rrc) > 0 else self.P_NULL) if self.use_dep: l_features.append(ex['label'][lc[0]] if len(lc) > 0 else self.L_NULL) l_features.append(ex['label'][rc[0]] if len(rc) > 0 else self.L_NULL) l_features.append(ex['label'][lc[1]] if len(lc) > 1 else self.L_NULL) l_features.append(ex['label'][rc[1]] if len(rc) > 1 else self.L_NULL) l_features.append(ex['label'][llc[0]] if len(llc) > 0 else self.L_NULL) l_features.append(ex['label'][rrc[0]] if len(rrc) > 0 else self.L_NULL) else: features += [self.NULL] * 6 if self.use_pos: p_features += [self.P_NULL] * 6 if self.use_dep: l_features += [self.L_NULL] * 6 features += p_features + l_features assert len(features) == self.n_features return features def get_oracle(self, stack, buf, ex): if len(stack) < 2: return self.n_trans - 1 i0 = stack[-1] i1 = stack[-2] h0 = ex['head'][i0] h1 = ex['head'][i1] l0 = ex['label'][i0] l1 = ex['label'][i1] if self.unlabeled: if (i1 > 0) and (h1 == i0): return 0 elif (i1 >= 0) and (h0 == i1) and \ (not any([x for x in buf if ex['head'][x] == i0])): return 1 else: return None if len(buf) == 0 else 2 else: if (i1 > 0) and (h1 == i0): return l1 if (l1 >= 0) and (l1 < self.n_deprel) else None elif (i1 >= 0) and (h0 == i1) and \ (not any([x for x in buf if ex['head'][x] == i0])): return l0 + self.n_deprel if (l0 >= 0) and (l0 < self.n_deprel) else None else: return None if len(buf) == 0 else self.n_trans - 1 def create_instances(self, examples): all_instances = [] succ = 0 for id, ex in enumerate(logged_loop(examples)): n_words = len(ex['word']) - 1 # arcs = {(h, t, label)} stack = [0] buf = [i + 1 for i in xrange(n_words)] arcs = [] instances = [] for i in xrange(n_words * 2): gold_t = self.get_oracle(stack, buf, ex) if gold_t is None: break legal_labels = self.legal_labels(stack, buf) assert legal_labels[gold_t] == 1 instances.append((self.extract_features(stack, buf, arcs, ex), legal_labels, gold_t)) if gold_t == self.n_trans - 1: stack.append(buf[0]) buf = buf[1:] elif gold_t < self.n_deprel: arcs.append((stack[-1], stack[-2], gold_t)) stack = stack[:-2] + [stack[-1]] else: arcs.append((stack[-2], stack[-1], gold_t - self.n_deprel)) stack = stack[:-1] else: succ += 1 all_instances += instances return all_instances def legal_labels(self, stack, buf): labels = ([1] if len(stack) > 2 else [0]) * self.n_deprel labels += ([1] if len(stack) >= 2 else [0]) * self.n_deprel labels += [1] if len(buf) > 0 else [0] return labels def parse(self, dataset, eval_batch_size=5000): sentences = [] sentence_id_to_idx = {} for i, example in enumerate(dataset): n_words = len(example['word']) - 1 sentence = [j + 1 for j in range(n_words)] sentences.append(sentence) sentence_id_to_idx[id(sentence)] = i model = ModelWrapper(self, dataset, sentence_id_to_idx) dependencies = minibatch_parse(sentences, model, eval_batch_size) UAS = all_tokens = 0.0 for i, ex in enumerate(dataset): head = [-1] * len(ex['word']) for h, t, in dependencies[i]: head[t] = h for pred_h, gold_h, gold_l, pos in \ zip(head[1:], ex['head'][1:], ex['label'][1:], ex['pos'][1:]): assert self.id2tok[pos].startswith(P_PREFIX) pos_str = self.id2tok[pos][len(P_PREFIX):] if (self.with_punct) or (not punct(self.language, pos_str)): UAS += 1 if pred_h == gold_h else 0 all_tokens += 1 UAS /= all_tokens return UAS, dependencies class ModelWrapper(object): def __init__(self, parser, dataset, sentence_id_to_idx): self.parser = parser self.dataset = dataset self.sentence_id_to_idx = sentence_id_to_idx def predict(self, partial_parses): mb_x = [self.parser.extract_features(p.stack, p.buffer, p.dependencies, self.dataset[self.sentence_id_to_idx[id(p.sentence)]]) for p in partial_parses] mb_x = np.array(mb_x).astype('int32') mb_l = [self.parser.legal_labels(p.stack, p.buffer) for p in partial_parses] pred = self.parser.model.predict_on_batch(self.parser.session, mb_x) pred = np.argmax(pred + 10000 * np.array(mb_l).astype('float32'), 1) pred = ["S" if p == 2 else ("LA" if p == 0 else "RA") for p in pred] return pred def read_conll(in_file, lowercase=False, max_example=None): examples = [] with open(in_file) as f: word, pos, head, label = [], [], [], [] for line in f.readlines(): sp = line.strip().split('\t') if len(sp) == 10: if '-' not in sp[0]: word.append(sp[1].lower() if lowercase else sp[1]) pos.append(sp[4]) head.append(int(sp[6])) label.append(sp[7]) elif len(word) > 0: examples.append({'word': word, 'pos': pos, 'head': head, 'label': label}) word, pos, head, label = [], [], [], [] if (max_example is not None) and (len(examples) == max_example): break if len(word) > 0: examples.append({'word': word, 'pos': pos, 'head': head, 'label': label}) return examples def build_dict(keys, n_max=None, offset=0): count = Counter() for key in keys: count[key] += 1 ls = count.most_common() if n_max is None \ else count.most_common(n_max) return {w[0]: index + offset for (index, w) in enumerate(ls)} def punct(language, pos): if language == 'english': return pos in ["''", ",", ".", ":", "``", "-LRB-", "-RRB-"] elif language == 'chinese': return pos == 'PU' elif language == 'french': return pos == 'PUNC' elif language == 'german': return pos in ["$.", "$,", "$["] elif language == 'spanish': # http://nlp.stanford.edu/software/spanish-faq.shtml return pos in ["f0", "faa", "fat", "fc", "fd", "fe", "fg", "fh", "fia", "fit", "fp", "fpa", "fpt", "fs", "ft", "fx", "fz"] elif language == 'universal': return pos == 'PUNCT' else: raise ValueError('language: %s is not supported.' % language) def minibatches(data, batch_size): x = np.array([d[0] for d in data]) y = np.array([d[2] for d in data]) one_hot = np.zeros((y.size, 3)) one_hot[np.arange(y.size), y] = 1 return get_minibatches([x, one_hot], batch_size) def load_and_preprocess_data(reduced=True): config = Config() print "Loading data...", start = time.time() train_set = read_conll(os.path.join(config.data_path, config.train_file), lowercase=config.lowercase) dev_set = read_conll(os.path.join(config.data_path, config.dev_file), lowercase=config.lowercase) test_set = read_conll(os.path.join(config.data_path, config.test_file), lowercase=config.lowercase) if reduced: train_set = train_set[:1000] dev_set = dev_set[:500] test_set = test_set[:500] print "took {:.2f} seconds".format(time.time() - start) print "Building parser...", start = time.time() parser = Parser(train_set) print "took {:.2f} seconds".format(time.time() - start) print "Loading pretrained embeddings...", start = time.time() word_vectors = {} for line in open(config.embedding_file).readlines(): sp = line.strip().split() word_vectors[sp[0]] = [float(x) for x in sp[1:]] embeddings_matrix = np.asarray(np.random.normal(0, 0.9, (parser.n_tokens, 50)), dtype='float32') for token in parser.tok2id: i = parser.tok2id[token] if token in word_vectors: embeddings_matrix[i] = word_vectors[token] elif token.lower() in word_vectors: embeddings_matrix[i] = word_vectors[token.lower()] print "took {:.2f} seconds".format(time.time() - start) print "Vectorizing data...", start = time.time() train_set = parser.vectorize(train_set) dev_set = parser.vectorize(dev_set) test_set = parser.vectorize(test_set) print "took {:.2f} seconds".format(time.time() - start) print "Preprocessing training data..." train_examples = parser.create_instances(train_set) return parser, embeddings_matrix, train_examples, dev_set, test_set, if __name__ == '__main__': pass
gpl-3.0
hazrpg/calibre
src/calibre/gui2/dialogs/confirm_delete.py
14
2426
#!/usr/bin/env python2 __license__ = 'GPL v3' __copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net' __docformat__ = 'restructuredtext en' from PyQt5.Qt import ( QDialog, Qt, QPixmap, QIcon, QSize, QVBoxLayout, QHBoxLayout, QLabel, QCheckBox, QDialogButtonBox) from calibre import confirm_config_name from calibre.gui2 import dynamic class Dialog(QDialog): def __init__(self, msg, name, parent, config_set=dynamic, icon='dialog_warning.png', title=None, confirm_msg=None, show_cancel_button=True): QDialog.__init__(self, parent) self.setWindowTitle(title or _("Are you sure?")) self.setWindowIcon(QIcon(I(icon))) self.l = l = QVBoxLayout(self) self.h = h = QHBoxLayout() l.addLayout(h) self.label = la = QLabel(self) la.setScaledContents(True), la.setMaximumSize(QSize(96, 96)), la.setMinimumSize(QSize(96, 96)) la.setPixmap(QPixmap(I(icon))) la.setObjectName("label") self.msg = m = QLabel(self) m.setMinimumWidth(300), m.setWordWrap(True), m.setObjectName("msg") m.setText(msg) h.addWidget(la), h.addSpacing(10), h.addWidget(m) self.again = a = QCheckBox((confirm_msg or _("&Show this warning again")), self) a.setChecked(True), a.setObjectName("again") a.stateChanged.connect(self.toggle) l.addWidget(a) buttons = QDialogButtonBox.Ok if show_cancel_button: buttons |= QDialogButtonBox.Cancel self.buttonBox = bb = QDialogButtonBox(buttons, self) bb.setObjectName("buttonBox") bb.setFocus(Qt.OtherFocusReason) bb.accepted.connect(self.accept), bb.rejected.connect(self.reject) l.addWidget(bb) self.name = name self.config_set = config_set self.resize(self.sizeHint()) def toggle(self, *args): self.config_set[confirm_config_name(self.name)] = self.again.isChecked() def confirm(msg, name, parent=None, pixmap='dialog_warning.png', title=None, show_cancel_button=True, confirm_msg=None, config_set=None): config_set = config_set or dynamic if not config_set.get(confirm_config_name(name), True): return True d = Dialog(msg, name, parent, config_set=config_set, icon=pixmap, title=title, confirm_msg=confirm_msg, show_cancel_button=show_cancel_button) return d.exec_() == d.Accepted
gpl-3.0
LeZhang2016/openthread
tests/scripts/thread-cert/Cert_6_1_03_RouterAttachConnectivity.py
3
4589
#!/usr/bin/env python # # Copyright (c) 2016, The OpenThread Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import time import unittest import config import node LEADER = 1 ROUTER1 = 2 ROUTER2 = 3 ROUTER3 = 4 ED = 5 class Cert_6_1_3_RouterAttachConnectivity(unittest.TestCase): def setUp(self): self.simulator = config.create_default_simulator() self.nodes = {} for i in range(1,6): self.nodes[i] = node.Node(i, (i == ED), simulator=self.simulator) self.nodes[LEADER].set_panid(0xface) self.nodes[LEADER].set_mode('rsdn') self.nodes[LEADER].add_whitelist(self.nodes[ROUTER1].get_addr64()) self.nodes[LEADER].add_whitelist(self.nodes[ROUTER2].get_addr64()) self.nodes[LEADER].add_whitelist(self.nodes[ROUTER3].get_addr64()) self.nodes[LEADER].enable_whitelist() self.nodes[ROUTER1].set_panid(0xface) self.nodes[ROUTER1].set_mode('rsdn') self.nodes[ROUTER1].add_whitelist(self.nodes[LEADER].get_addr64()) self.nodes[ROUTER1].add_whitelist(self.nodes[ROUTER3].get_addr64()) self.nodes[ROUTER1].enable_whitelist() self.nodes[ROUTER1].set_router_selection_jitter(1) self.nodes[ROUTER2].set_panid(0xface) self.nodes[ROUTER2].set_mode('rsdn') self.nodes[ROUTER2].add_whitelist(self.nodes[LEADER].get_addr64()) self.nodes[ROUTER2].add_whitelist(self.nodes[ED].get_addr64()) self.nodes[ROUTER2].enable_whitelist() self.nodes[ROUTER2].set_router_selection_jitter(1) self.nodes[ROUTER3].set_panid(0xface) self.nodes[ROUTER3].set_mode('rsdn') self.nodes[ROUTER3].add_whitelist(self.nodes[LEADER].get_addr64()) self.nodes[ROUTER3].add_whitelist(self.nodes[ROUTER1].get_addr64()) self.nodes[ROUTER3].add_whitelist(self.nodes[ED].get_addr64()) self.nodes[ROUTER3].enable_whitelist() self.nodes[ROUTER3].set_router_selection_jitter(1) self.nodes[ED].set_panid(0xface) self.nodes[ED].set_mode('rsn') self.nodes[ED].add_whitelist(self.nodes[ROUTER2].get_addr64()) self.nodes[ED].add_whitelist(self.nodes[ROUTER3].get_addr64()) self.nodes[ED].enable_whitelist() def tearDown(self): for node in list(self.nodes.values()): node.stop() node.destroy() self.simulator.stop() def test(self): self.nodes[LEADER].start() self.simulator.go(5) self.assertEqual(self.nodes[LEADER].get_state(), 'leader') for i in range(2, 5): self.nodes[i].start() self.simulator.go(5) for i in range(2, 5): self.assertEqual(self.nodes[i].get_state(), 'router') self.simulator.go(config.MAX_ADVERTISEMENT_INTERVAL) self.nodes[ED].start() self.simulator.go(5) self.assertEqual(self.nodes[ED].get_state(), 'child') addrs = self.nodes[ED].get_addrs() for addr in addrs: self.assertTrue(self.nodes[ROUTER3].ping(addr)) if __name__ == '__main__': unittest.main()
bsd-3-clause
walty8/trac
trac/versioncontrol/web_ui/tests/changeset.py
1
1258
# -*- coding: utf-8 -*- # # Copyright (C) 2014 Edgewall Software # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://trac.edgewall.com/license.html. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://trac.edgewall.org/. import unittest from trac.core import TracError from trac.test import EnvironmentStub, MockRequest from trac.versioncontrol.web_ui.changeset import ChangesetModule class ChangesetModuleTestCase(unittest.TestCase): def setUp(self): self.env = EnvironmentStub() self.cm = ChangesetModule(self.env) def test_default_repository_not_configured(self): """Test for regression of http://trac.edgewall.org/ticket/11599.""" req = MockRequest(self.env, args={'new_path': '/'}) self.assertRaises(TracError, self.cm.process_request, req) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(ChangesetModuleTestCase)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
bsd-3-clause
roofmonkey/kubernetes
cluster/juju/return-node-ips.py
310
1024
#!/usr/bin/env python # Copyright 2015 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import sys # This script helps parse out the private IP addresses from the # `juju run` command's JSON object, see cluster/juju/util.sh if len(sys.argv) > 1: # It takes the JSON output as the first argument. nodes = json.loads(sys.argv[1]) # There can be multiple nodes to print the Stdout. for num in nodes: print num['Stdout'].rstrip() else: exit(1)
apache-2.0
Emergya/icm-openedx-educamadrid-platform-basic
common/djangoapps/course_modes/tests/test_signals.py
28
3279
""" Unit tests for the course_mode signals """ from datetime import datetime, timedelta from mock import patch import ddt from pytz import UTC from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from course_modes.models import CourseMode from course_modes.signals import _listen_for_course_publish @ddt.ddt class CourseModeSignalTest(ModuleStoreTestCase): """ Tests for the course_mode course_published signal. """ def setUp(self): super(CourseModeSignalTest, self).setUp() self.end = datetime.now(tz=UTC).replace(microsecond=0) + timedelta(days=7) self.course = CourseFactory.create(end=self.end) CourseMode.objects.all().delete() def create_mode( self, mode_slug, mode_name, min_price=0, suggested_prices='', currency='usd', expiration_datetime=None, ): """ Create a new course mode """ return CourseMode.objects.get_or_create( course_id=self.course.id, mode_display_name=mode_name, mode_slug=mode_slug, min_price=min_price, suggested_prices=suggested_prices, currency=currency, _expiration_datetime=expiration_datetime, ) def test_no_verified_mode(self): """ Verify expiration not updated by signal for non-verified mode. """ course_mode, __ = self.create_mode('honor', 'honor') _listen_for_course_publish('store', self.course.id) course_mode.refresh_from_db() self.assertIsNone(course_mode.expiration_datetime) @ddt.data(1, 14, 30) def test_verified_mode(self, verification_window): """ Verify signal updates expiration to configured time period before course end for verified mode. """ course_mode, __ = self.create_mode('verified', 'verified') self.assertIsNone(course_mode.expiration_datetime) with patch('course_modes.models.CourseModeExpirationConfig.current') as config: instance = config.return_value instance.verification_window = timedelta(days=verification_window) _listen_for_course_publish('store', self.course.id) course_mode.refresh_from_db() self.assertEqual(course_mode.expiration_datetime, self.end - timedelta(days=verification_window)) @ddt.data(1, 14, 30) def test_verified_mode_explicitly_set(self, verification_window): """ Verify signal does not update expiration for verified mode with explicitly set expiration. """ course_mode, __ = self.create_mode('verified', 'verified') course_mode.expiration_datetime_is_explicit = True self.assertIsNone(course_mode.expiration_datetime) with patch('course_modes.models.CourseModeExpirationConfig.current') as config: instance = config.return_value instance.verification_window = timedelta(days=verification_window) _listen_for_course_publish('store', self.course.id) course_mode.refresh_from_db() self.assertEqual(course_mode.expiration_datetime, self.end - timedelta(days=verification_window))
agpl-3.0
wkennington/rethinkdb
external/gtest_1.6.0/test/gtest_filter_unittest.py
2826
21261
#!/usr/bin/env python # # Copyright 2005 Google Inc. All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for Google Test test filters. A user can specify which test(s) in a Google Test program to run via either the GTEST_FILTER environment variable or the --gtest_filter flag. This script tests such functionality by invoking gtest_filter_unittest_ (a program written with Google Test) with different environments and command line flags. Note that test sharding may also influence which tests are filtered. Therefore, we test that here also. """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sets import sys import gtest_test_utils # Constants. # Checks if this platform can pass empty environment variables to child # processes. We set an env variable to an empty string and invoke a python # script in a subprocess to print whether the variable is STILL in # os.environ. We then use 'eval' to parse the child's output so that an # exception is thrown if the input is anything other than 'True' nor 'False'. os.environ['EMPTY_VAR'] = '' child = gtest_test_utils.Subprocess( [sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ']) CAN_PASS_EMPTY_ENV = eval(child.output) # Check if this platform can unset environment variables in child processes. # We set an env variable to a non-empty string, unset it, and invoke # a python script in a subprocess to print whether the variable # is NO LONGER in os.environ. # We use 'eval' to parse the child's output so that an exception # is thrown if the input is neither 'True' nor 'False'. os.environ['UNSET_VAR'] = 'X' del os.environ['UNSET_VAR'] child = gtest_test_utils.Subprocess( [sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ']) CAN_UNSET_ENV = eval(child.output) # Checks if we should test with an empty filter. This doesn't # make sense on platforms that cannot pass empty env variables (Win32) # and on platforms that cannot unset variables (since we cannot tell # the difference between "" and NULL -- Borland and Solaris < 5.10) CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV) # The environment variable for specifying the test filters. FILTER_ENV_VAR = 'GTEST_FILTER' # The environment variables for test sharding. TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE' # The command line flag for specifying the test filters. FILTER_FLAG = 'gtest_filter' # The command line flag for including disabled tests. ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests' # Command to run the gtest_filter_unittest_ program. COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_') # Regex for determining whether parameterized tests are enabled in the binary. PARAM_TEST_REGEX = re.compile(r'/ParamTest') # Regex for parsing test case names from Google Test's output. TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)') # Regex for parsing test names from Google Test's output. TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)') # The command line flag to tell Google Test to output the list of tests it # will run. LIST_TESTS_FLAG = '--gtest_list_tests' # Indicates whether Google Test supports death tests. SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess( [COMMAND, LIST_TESTS_FLAG]).output # Full names of all tests in gtest_filter_unittests_. PARAM_TESTS = [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', 'SeqQ/ParamTest.TestX/0', 'SeqQ/ParamTest.TestX/1', 'SeqQ/ParamTest.TestY/0', 'SeqQ/ParamTest.TestY/1', ] DISABLED_TESTS = [ 'BarTest.DISABLED_TestFour', 'BarTest.DISABLED_TestFive', 'BazTest.DISABLED_TestC', 'DISABLED_FoobarTest.Test1', 'DISABLED_FoobarTest.DISABLED_Test2', 'DISABLED_FoobarbazTest.TestA', ] if SUPPORTS_DEATH_TESTS: DEATH_TESTS = [ 'HasDeathTest.Test1', 'HasDeathTest.Test2', ] else: DEATH_TESTS = [] # All the non-disabled tests. ACTIVE_TESTS = [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS param_tests_present = None # Utilities. environ = os.environ.copy() def SetEnvVar(env_var, value): """Sets the env variable to 'value'; unsets it when 'value' is None.""" if value is not None: environ[env_var] = value elif env_var in environ: del environ[env_var] def RunAndReturnOutput(args = None): """Runs the test program and returns its output.""" return gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ).output def RunAndExtractTestList(args = None): """Runs the test program and returns its exit code and a list of tests run.""" p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ) tests_run = [] test_case = '' test = '' for line in p.output.split('\n'): match = TEST_CASE_REGEX.match(line) if match is not None: test_case = match.group(1) else: match = TEST_REGEX.match(line) if match is not None: test = match.group(1) tests_run.append(test_case + '.' + test) return (tests_run, p.exit_code) def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs): """Runs the given function and arguments in a modified environment.""" try: original_env = environ.copy() environ.update(extra_env) return function(*args, **kwargs) finally: environ.clear() environ.update(original_env) def RunWithSharding(total_shards, shard_index, command): """Runs a test program shard and returns exit code and a list of tests run.""" extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index), TOTAL_SHARDS_ENV_VAR: str(total_shards)} return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command) # The unit test. class GTestFilterUnitTest(gtest_test_utils.TestCase): """Tests the env variable or the command line flag to filter tests.""" # Utilities. def AssertSetEqual(self, lhs, rhs): """Asserts that two sets are equal.""" for elem in lhs: self.assert_(elem in rhs, '%s in %s' % (elem, rhs)) for elem in rhs: self.assert_(elem in lhs, '%s in %s' % (elem, lhs)) def AssertPartitionIsValid(self, set_var, list_of_sets): """Asserts that list_of_sets is a valid partition of set_var.""" full_partition = [] for slice_var in list_of_sets: full_partition.extend(slice_var) self.assertEqual(len(set_var), len(full_partition)) self.assertEqual(sets.Set(set_var), sets.Set(full_partition)) def AdjustForParameterizedTests(self, tests_to_run): """Adjust tests_to_run in case value parameterized tests are disabled.""" global param_tests_present if not param_tests_present: return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS)) else: return tests_to_run def RunAndVerify(self, gtest_filter, tests_to_run): """Checks that the binary runs correct set of tests for a given filter.""" tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # First, tests using the environment variable. # Windows removes empty variables from the environment when passing it # to a new process. This means it is impossible to pass an empty filter # into a process using the environment variable. However, we can still # test the case when the variable is not supplied (i.e., gtest_filter is # None). # pylint: disable-msg=C6403 if CAN_TEST_EMPTY_FILTER or gtest_filter != '': SetEnvVar(FILTER_ENV_VAR, gtest_filter) tests_run = RunAndExtractTestList()[0] SetEnvVar(FILTER_ENV_VAR, None) self.AssertSetEqual(tests_run, tests_to_run) # pylint: enable-msg=C6403 # Next, tests using the command line flag. if gtest_filter is None: args = [] else: args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)] tests_run = RunAndExtractTestList(args)[0] self.AssertSetEqual(tests_run, tests_to_run) def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run, args=None, check_exit_0=False): """Checks that binary runs correct tests for the given filter and shard. Runs all shards of gtest_filter_unittest_ with the given filter, and verifies that the right set of tests were run. The union of tests run on each shard should be identical to tests_to_run, without duplicates. Args: gtest_filter: A filter to apply to the tests. total_shards: A total number of shards to split test run into. tests_to_run: A set of tests expected to run. args : Arguments to pass to the to the test binary. check_exit_0: When set to a true value, make sure that all shards return 0. """ tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # Windows removes empty variables from the environment when passing it # to a new process. This means it is impossible to pass an empty filter # into a process using the environment variable. However, we can still # test the case when the variable is not supplied (i.e., gtest_filter is # None). # pylint: disable-msg=C6403 if CAN_TEST_EMPTY_FILTER or gtest_filter != '': SetEnvVar(FILTER_ENV_VAR, gtest_filter) partition = [] for i in range(0, total_shards): (tests_run, exit_code) = RunWithSharding(total_shards, i, args) if check_exit_0: self.assertEqual(0, exit_code) partition.append(tests_run) self.AssertPartitionIsValid(tests_to_run, partition) SetEnvVar(FILTER_ENV_VAR, None) # pylint: enable-msg=C6403 def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run): """Checks that the binary runs correct set of tests for the given filter. Runs gtest_filter_unittest_ with the given filter, and enables disabled tests. Verifies that the right set of tests were run. Args: gtest_filter: A filter to apply to the tests. tests_to_run: A set of tests expected to run. """ tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # Construct the command line. args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG] if gtest_filter is not None: args.append('--%s=%s' % (FILTER_FLAG, gtest_filter)) tests_run = RunAndExtractTestList(args)[0] self.AssertSetEqual(tests_run, tests_to_run) def setUp(self): """Sets up test case. Determines whether value-parameterized tests are enabled in the binary and sets the flags accordingly. """ global param_tests_present if param_tests_present is None: param_tests_present = PARAM_TEST_REGEX.search( RunAndReturnOutput()) is not None def testDefaultBehavior(self): """Tests the behavior of not specifying the filter.""" self.RunAndVerify(None, ACTIVE_TESTS) def testDefaultBehaviorWithShards(self): """Tests the behavior without the filter, with sharding enabled.""" self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS) def testEmptyFilter(self): """Tests an empty filter.""" self.RunAndVerify('', []) self.RunAndVerifyWithSharding('', 1, []) self.RunAndVerifyWithSharding('', 2, []) def testBadFilter(self): """Tests a filter that matches nothing.""" self.RunAndVerify('BadFilter', []) self.RunAndVerifyAllowingDisabled('BadFilter', []) def testFullName(self): """Tests filtering by full name.""" self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz']) self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz']) self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz']) def testUniversalFilters(self): """Tests filters that match everything.""" self.RunAndVerify('*', ACTIVE_TESTS) self.RunAndVerify('*.*', ACTIVE_TESTS) self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS) self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS) self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS) def testFilterByTestCase(self): """Tests filtering by test case name.""" self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz']) BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB'] self.RunAndVerify('BazTest.*', BAZ_TESTS) self.RunAndVerifyAllowingDisabled('BazTest.*', BAZ_TESTS + ['BazTest.DISABLED_TestC']) def testFilterByTest(self): """Tests filtering by test name.""" self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne']) def testFilterDisabledTests(self): """Select only the disabled tests to run.""" self.RunAndVerify('DISABLED_FoobarTest.Test1', []) self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1', ['DISABLED_FoobarTest.Test1']) self.RunAndVerify('*DISABLED_*', []) self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS) self.RunAndVerify('*.DISABLED_*', []) self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [ 'BarTest.DISABLED_TestFour', 'BarTest.DISABLED_TestFive', 'BazTest.DISABLED_TestC', 'DISABLED_FoobarTest.DISABLED_Test2', ]) self.RunAndVerify('DISABLED_*', []) self.RunAndVerifyAllowingDisabled('DISABLED_*', [ 'DISABLED_FoobarTest.Test1', 'DISABLED_FoobarTest.DISABLED_Test2', 'DISABLED_FoobarbazTest.TestA', ]) def testWildcardInTestCaseName(self): """Tests using wildcard in the test case name.""" self.RunAndVerify('*a*.*', [ 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS) def testWildcardInTestName(self): """Tests using wildcard in the test name.""" self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA']) def testFilterWithoutDot(self): """Tests a filter that has no '.' in it.""" self.RunAndVerify('*z*', [ 'FooTest.Xyz', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ]) def testTwoPatterns(self): """Tests filters that consist of two patterns.""" self.RunAndVerify('Foo*.*:*A*', [ 'FooTest.Abc', 'FooTest.Xyz', 'BazTest.TestA', ]) # An empty pattern + a non-empty one self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA']) def testThreePatterns(self): """Tests filters that consist of three patterns.""" self.RunAndVerify('*oo*:*A*:*One', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BazTest.TestOne', 'BazTest.TestA', ]) # The 2nd pattern is empty. self.RunAndVerify('*oo*::*One', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BazTest.TestOne', ]) # The last 2 patterns are empty. self.RunAndVerify('*oo*::', [ 'FooTest.Abc', 'FooTest.Xyz', ]) def testNegativeFilters(self): self.RunAndVerify('*-BazTest.TestOne', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS) self.RunAndVerify('*-FooTest.Abc:BazTest.*', [ 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', ] + DEATH_TESTS + PARAM_TESTS) self.RunAndVerify('BarTest.*-BarTest.TestOne', [ 'BarTest.TestTwo', 'BarTest.TestThree', ]) # Tests without leading '*'. self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [ 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', ] + DEATH_TESTS + PARAM_TESTS) # Value parameterized tests. self.RunAndVerify('*/*', PARAM_TESTS) # Value parameterized tests filtering by the sequence name. self.RunAndVerify('SeqP/*', [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', ]) # Value parameterized tests filtering by the test name. self.RunAndVerify('*/0', [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestY/0', 'SeqQ/ParamTest.TestX/0', 'SeqQ/ParamTest.TestY/0', ]) def testFlagOverridesEnvVar(self): """Tests that the filter flag overrides the filtering env. variable.""" SetEnvVar(FILTER_ENV_VAR, 'Foo*') args = ['--%s=%s' % (FILTER_FLAG, '*One')] tests_run = RunAndExtractTestList(args)[0] SetEnvVar(FILTER_ENV_VAR, None) self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne']) def testShardStatusFileIsCreated(self): """Tests that the shard file is created if specified in the environment.""" shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), 'shard_status_file') self.assert_(not os.path.exists(shard_status_file)) extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} try: InvokeWithModifiedEnv(extra_env, RunAndReturnOutput) finally: self.assert_(os.path.exists(shard_status_file)) os.remove(shard_status_file) def testShardStatusFileIsCreatedWithListTests(self): """Tests that the shard file is created with the "list_tests" flag.""" shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), 'shard_status_file2') self.assert_(not os.path.exists(shard_status_file)) extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} try: output = InvokeWithModifiedEnv(extra_env, RunAndReturnOutput, [LIST_TESTS_FLAG]) finally: # This assertion ensures that Google Test enumerated the tests as # opposed to running them. self.assert_('[==========]' not in output, 'Unexpected output during test enumeration.\n' 'Please ensure that LIST_TESTS_FLAG is assigned the\n' 'correct flag value for listing Google Test tests.') self.assert_(os.path.exists(shard_status_file)) os.remove(shard_status_file) if SUPPORTS_DEATH_TESTS: def testShardingWorksWithDeathTests(self): """Tests integration with death tests and sharding.""" gtest_filter = 'HasDeathTest.*:SeqP/*' expected_tests = [ 'HasDeathTest.Test1', 'HasDeathTest.Test2', 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', ] for flag in ['--gtest_death_test_style=threadsafe', '--gtest_death_test_style=fast']: self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests, check_exit_0=True, args=[flag]) self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests, check_exit_0=True, args=[flag]) if __name__ == '__main__': gtest_test_utils.Main()
agpl-3.0
spring-week-topos/horizon-week
openstack_dashboard/dashboards/admin/networks/forms.py
7
5239
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon import messages from openstack_dashboard import api LOG = logging.getLogger(__name__) class CreateNetwork(forms.SelfHandlingForm): name = forms.CharField(max_length=255, label=_("Name"), required=False) tenant_id = forms.ChoiceField(label=_("Project")) if api.neutron.is_port_profiles_supported(): net_profile_id = forms.ChoiceField(label=_("Network Profile")) admin_state = forms.BooleanField(label=_("Admin State"), initial=True, required=False) shared = forms.BooleanField(label=_("Shared"), initial=False, required=False) external = forms.BooleanField(label=_("External Network"), initial=False, required=False) @classmethod def _instantiate(cls, request, *args, **kwargs): return cls(request, *args, **kwargs) def __init__(self, request, *args, **kwargs): super(CreateNetwork, self).__init__(request, *args, **kwargs) tenant_choices = [('', _("Select a project"))] tenants, has_more = api.keystone.tenant_list(request) for tenant in tenants: if tenant.enabled: tenant_choices.append((tenant.id, tenant.name)) self.fields['tenant_id'].choices = tenant_choices if api.neutron.is_port_profiles_supported(): self.fields['net_profile_id'].choices = ( self.get_network_profile_choices(request)) def get_network_profile_choices(self, request): profile_choices = [('', _("Select a profile"))] for profile in self._get_profiles(request, 'network'): profile_choices.append((profile.id, profile.name)) return profile_choices def _get_profiles(self, request, type_p): profiles = [] try: profiles = api.neutron.profile_list(request, type_p) except Exception: msg = _('Network Profiles could not be retrieved.') exceptions.handle(request, msg) return profiles def handle(self, request, data): try: params = {'name': data['name'], 'tenant_id': data['tenant_id'], 'admin_state_up': data['admin_state'], 'shared': data['shared'], 'router:external': data['external']} if api.neutron.is_port_profiles_supported(): params['net_profile_id'] = data['net_profile_id'] network = api.neutron.network_create(request, **params) msg = _('Network %s was successfully created.') % data['name'] LOG.debug(msg) messages.success(request, msg) return network except Exception: redirect = reverse('horizon:admin:networks:index') msg = _('Failed to create network %s') % data['name'] exceptions.handle(request, msg, redirect=redirect) class UpdateNetwork(forms.SelfHandlingForm): name = forms.CharField(label=_("Name"), required=False) tenant_id = forms.CharField(widget=forms.HiddenInput) network_id = forms.CharField(label=_("ID"), widget=forms.TextInput( attrs={'readonly': 'readonly'})) admin_state = forms.BooleanField(label=_("Admin State"), required=False) shared = forms.BooleanField(label=_("Shared"), required=False) external = forms.BooleanField(label=_("External Network"), required=False) failure_url = 'horizon:admin:networks:index' def handle(self, request, data): try: params = {'name': data['name'], 'admin_state_up': data['admin_state'], 'shared': data['shared'], 'router:external': data['external']} network = api.neutron.network_update(request, data['network_id'], **params) msg = _('Network %s was successfully updated.') % data['name'] LOG.debug(msg) messages.success(request, msg) return network except Exception: msg = _('Failed to update network %s') % data['name'] LOG.info(msg) redirect = reverse(self.failure_url) exceptions.handle(request, msg, redirect=redirect)
apache-2.0
mathspace/django
tests/gis_tests/test_geoforms.py
26
15044
from unittest import skipUnless from django.contrib.gis import forms from django.contrib.gis.gdal import HAS_GDAL from django.contrib.gis.geos import GEOSGeometry from django.forms import ValidationError from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.utils.html import escape @skipUnless(HAS_GDAL, "GeometryFieldTest needs GDAL support") @skipUnlessDBFeature("gis_enabled") class GeometryFieldTest(SimpleTestCase): def test_init(self): "Testing GeometryField initialization with defaults." fld = forms.GeometryField() for bad_default in ('blah', 3, 'FoO', None, 0): with self.assertRaises(ValidationError): fld.clean(bad_default) def test_srid(self): "Testing GeometryField with a SRID set." # Input that doesn't specify the SRID is assumed to be in the SRID # of the input field. fld = forms.GeometryField(srid=4326) geom = fld.clean('POINT(5 23)') self.assertEqual(4326, geom.srid) # Making the field in a different SRID from that of the geometry, and # asserting it transforms. fld = forms.GeometryField(srid=32140) tol = 0.0000001 xform_geom = GEOSGeometry('POINT (951640.547328465 4219369.26171664)', srid=32140) # The cleaned geometry should be transformed to 32140. cleaned_geom = fld.clean('SRID=4326;POINT (-95.363151 29.763374)') self.assertTrue(xform_geom.equals_exact(cleaned_geom, tol)) def test_null(self): "Testing GeometryField's handling of null (None) geometries." # Form fields, by default, are required (`required=True`) fld = forms.GeometryField() with self.assertRaisesMessage(forms.ValidationError, "No geometry value provided."): fld.clean(None) # This will clean None as a geometry (See #10660). fld = forms.GeometryField(required=False) self.assertIsNone(fld.clean(None)) def test_geom_type(self): "Testing GeometryField's handling of different geometry types." # By default, all geometry types are allowed. fld = forms.GeometryField() for wkt in ('POINT(5 23)', 'MULTIPOLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'LINESTRING(0 0, 1 1)'): self.assertEqual(GEOSGeometry(wkt), fld.clean(wkt)) pnt_fld = forms.GeometryField(geom_type='POINT') self.assertEqual(GEOSGeometry('POINT(5 23)'), pnt_fld.clean('POINT(5 23)')) # a WKT for any other geom_type will be properly transformed by `to_python` self.assertEqual(GEOSGeometry('LINESTRING(0 0, 1 1)'), pnt_fld.to_python('LINESTRING(0 0, 1 1)')) # but rejected by `clean` with self.assertRaises(forms.ValidationError): pnt_fld.clean('LINESTRING(0 0, 1 1)') def test_to_python(self): """ Testing to_python returns a correct GEOSGeometry object or a ValidationError """ fld = forms.GeometryField() # to_python returns the same GEOSGeometry for a WKT for wkt in ('POINT(5 23)', 'MULTIPOLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'LINESTRING(0 0, 1 1)'): self.assertEqual(GEOSGeometry(wkt), fld.to_python(wkt)) # but raises a ValidationError for any other string for wkt in ('POINT(5)', 'MULTI POLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'BLAH(0 0, 1 1)'): with self.assertRaises(forms.ValidationError): fld.to_python(wkt) def test_field_with_text_widget(self): class PointForm(forms.Form): pt = forms.PointField(srid=4326, widget=forms.TextInput) form = PointForm() cleaned_pt = form.fields['pt'].clean('POINT(5 23)') self.assertEqual(cleaned_pt, GEOSGeometry('POINT(5 23)')) self.assertEqual(4326, cleaned_pt.srid) point = GEOSGeometry('SRID=4326;POINT(5 23)') form = PointForm(data={'pt': 'POINT(5 23)'}, initial={'pt': point}) self.assertFalse(form.has_changed()) @skipUnless(HAS_GDAL, "SpecializedFieldTest needs GDAL support") @skipUnlessDBFeature("gis_enabled") class SpecializedFieldTest(SimpleTestCase): def setUp(self): self.geometries = { 'point': GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)"), 'multipoint': GEOSGeometry("SRID=4326;MULTIPOINT(" "(13.18634033203125 14.504356384277344)," "(13.207969665527 14.490966796875)," "(13.177070617675 14.454917907714))"), 'linestring': GEOSGeometry("SRID=4326;LINESTRING(" "-8.26171875 -0.52734375," "-7.734375 4.21875," "6.85546875 3.779296875," "5.44921875 -3.515625)"), 'multilinestring': GEOSGeometry("SRID=4326;MULTILINESTRING(" "(-16.435546875 -2.98828125," "-17.2265625 2.98828125," "-0.703125 3.515625," "-1.494140625 -3.33984375)," "(-8.0859375 -5.9765625," "8.525390625 -8.7890625," "12.392578125 -0.87890625," "10.01953125 7.646484375))"), 'polygon': GEOSGeometry("SRID=4326;POLYGON(" "(-1.669921875 6.240234375," "-3.8671875 -0.615234375," "5.9765625 -3.955078125," "18.193359375 3.955078125," "9.84375 9.4921875," "-1.669921875 6.240234375))"), 'multipolygon': GEOSGeometry("SRID=4326;MULTIPOLYGON(" "((-17.578125 13.095703125," "-17.2265625 10.8984375," "-13.974609375 10.1953125," "-13.359375 12.744140625," "-15.732421875 13.7109375," "-17.578125 13.095703125))," "((-8.525390625 5.537109375," "-8.876953125 2.548828125," "-5.888671875 1.93359375," "-5.09765625 4.21875," "-6.064453125 6.240234375," "-8.525390625 5.537109375)))"), 'geometrycollection': GEOSGeometry("SRID=4326;GEOMETRYCOLLECTION(" "POINT(5.625 -0.263671875)," "POINT(6.767578125 -3.603515625)," "POINT(8.525390625 0.087890625)," "POINT(8.0859375 -2.13134765625)," "LINESTRING(" "6.273193359375 -1.175537109375," "5.77880859375 -1.812744140625," "7.27294921875 -2.230224609375," "7.657470703125 -1.25244140625))"), } def assertMapWidget(self, form_instance): """ Make sure the MapWidget js is passed in the form media and a MapWidget is actually created """ self.assertTrue(form_instance.is_valid()) rendered = form_instance.as_p() self.assertIn('new MapWidget(options);', rendered) self.assertIn('map_srid: 4326,', rendered) self.assertIn('gis/js/OLMapWidget.js', str(form_instance.media)) def assertTextarea(self, geom, rendered): """Makes sure the wkt and a textarea are in the content""" self.assertIn('<textarea ', rendered) self.assertIn('required', rendered) self.assertIn(geom.wkt, rendered) # map_srid in operlayers.html template must not be localized. @override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True) def test_pointfield(self): class PointForm(forms.Form): p = forms.PointField() geom = self.geometries['point'] form = PointForm(data={'p': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(PointForm().is_valid()) invalid = PointForm(data={'p': 'some invalid geom'}) self.assertFalse(invalid.is_valid()) self.assertIn('Invalid geometry value', str(invalid.errors)) for invalid in [geo for key, geo in self.geometries.items() if key != 'point']: self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid()) def test_multipointfield(self): class PointForm(forms.Form): p = forms.MultiPointField() geom = self.geometries['multipoint'] form = PointForm(data={'p': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(PointForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'multipoint']: self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid()) def test_linestringfield(self): class LineStringForm(forms.Form): l = forms.LineStringField() geom = self.geometries['linestring'] form = LineStringForm(data={'l': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(LineStringForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'linestring']: self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid()) def test_multilinestringfield(self): class LineStringForm(forms.Form): l = forms.MultiLineStringField() geom = self.geometries['multilinestring'] form = LineStringForm(data={'l': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(LineStringForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'multilinestring']: self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid()) def test_polygonfield(self): class PolygonForm(forms.Form): p = forms.PolygonField() geom = self.geometries['polygon'] form = PolygonForm(data={'p': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(PolygonForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'polygon']: self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid()) def test_multipolygonfield(self): class PolygonForm(forms.Form): p = forms.MultiPolygonField() geom = self.geometries['multipolygon'] form = PolygonForm(data={'p': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(PolygonForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'multipolygon']: self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid()) def test_geometrycollectionfield(self): class GeometryForm(forms.Form): g = forms.GeometryCollectionField() geom = self.geometries['geometrycollection'] form = GeometryForm(data={'g': geom}) self.assertTextarea(geom, form.as_p()) self.assertMapWidget(form) self.assertFalse(GeometryForm().is_valid()) for invalid in [geo for key, geo in self.geometries.items() if key != 'geometrycollection']: self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid()) @skipUnless(HAS_GDAL, "OSMWidgetTest needs GDAL support") @skipUnlessDBFeature("gis_enabled") class OSMWidgetTest(SimpleTestCase): def setUp(self): self.geometries = { 'point': GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)"), } def test_osm_widget(self): class PointForm(forms.Form): p = forms.PointField(widget=forms.OSMWidget) geom = self.geometries['point'] form = PointForm(data={'p': geom}) rendered = form.as_p() self.assertIn("OpenStreetMap (Mapnik)", rendered) self.assertIn("id: 'id_p',", rendered) def test_default_lat_lon(self): class PointForm(forms.Form): p = forms.PointField( widget=forms.OSMWidget(attrs={ 'default_lon': 20, 'default_lat': 30 }), ) form = PointForm() rendered = form.as_p() self.assertIn("options['default_lon'] = 20;", rendered) self.assertIn("options['default_lat'] = 30;", rendered) if forms.OSMWidget.default_lon != 20: self.assertNotIn( "options['default_lon'] = %d;" % forms.OSMWidget.default_lon, rendered) if forms.OSMWidget.default_lat != 30: self.assertNotIn( "options['default_lat'] = %d;" % forms.OSMWidget.default_lat, rendered) @skipUnless(HAS_GDAL, "CustomGeometryWidgetTest needs GDAL support") @skipUnlessDBFeature("gis_enabled") class CustomGeometryWidgetTest(SimpleTestCase): def test_custom_serialization_widget(self): class CustomGeometryWidget(forms.BaseGeometryWidget): template_name = 'gis/openlayers.html' deserialize_called = 0 def serialize(self, value): return value.json if value else '' def deserialize(self, value): self.deserialize_called += 1 return GEOSGeometry(value) class PointForm(forms.Form): p = forms.PointField(widget=CustomGeometryWidget) point = GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)") form = PointForm(data={'p': point}) self.assertIn(escape(point.json), form.as_p()) CustomGeometryWidget.called = 0 widget = form.fields['p'].widget # Force deserialize use due to a string value self.assertIn(escape(point.json), widget.render('p', point.json)) self.assertEqual(widget.deserialize_called, 1) form = PointForm(data={'p': point.json}) self.assertTrue(form.is_valid()) # Ensure that resulting geometry has srid set self.assertEqual(form.cleaned_data['p'].srid, 4326)
bsd-3-clause
mlaitinen/odoo
openerp/osv/fields.py
9
74532
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## """ Fields: - simple - relations (one2many, many2one, many2many) - function Fields Attributes: * _classic_read: is a classic sql fields * _type : field type * _auto_join: for one2many and many2one fields, tells whether select queries will join the relational table instead of replacing the field condition by an equivalent-one based on a search. * readonly * required * size """ import base64 import datetime as DT import functools import logging import pytz import re import xmlrpclib from operator import itemgetter from contextlib import contextmanager from psycopg2 import Binary import openerp import openerp.tools as tools from openerp.tools.translate import _ from openerp.tools import float_repr, float_round, frozendict, html_sanitize import simplejson from openerp import SUPERUSER_ID, registry @contextmanager def _get_cursor(): # yield a valid cursor from any environment or create a new one if none found with registry().cursor() as cr: yield cr EMPTY_DICT = frozendict() _logger = logging.getLogger(__name__) def _symbol_set(symb): if symb is None or symb == False: return None elif isinstance(symb, unicode): return symb.encode('utf-8') return str(symb) class _column(object): """ Base of all fields, a database column An instance of this object is a *description* of a database column. It will not hold any data, but only provide the methods to manipulate data of an ORM record or even prepare/update the database to hold such a field of data. """ _classic_read = True _classic_write = True _auto_join = False _properties = False _type = 'unknown' _obj = None _multi = False _symbol_c = '%s' _symbol_f = _symbol_set _symbol_set = (_symbol_c, _symbol_f) _symbol_get = None _deprecated = False __slots__ = [ 'copy', # whether value is copied by BaseModel.copy() 'string', 'help', 'required', 'readonly', '_domain', '_context', 'states', 'priority', 'change_default', 'size', 'ondelete', 'translate', 'select', 'manual', 'write', 'read', 'selectable', 'group_operator', 'groups', # CSV list of ext IDs of groups 'deprecated', # Optional deprecation warning '_args', '_prefetch', ] def __init__(self, string='unknown', required=False, readonly=False, domain=[], context={}, states=None, priority=0, change_default=False, size=None, ondelete=None, translate=False, select=False, manual=False, **args): """ The 'manual' keyword argument specifies if the field is a custom one. It corresponds to the 'state' column in ir_model_fields. """ # add parameters and default values args['copy'] = args.get('copy', True) args['string'] = string args['help'] = args.get('help', '') args['required'] = required args['readonly'] = readonly args['_domain'] = domain args['_context'] = context args['states'] = states args['priority'] = priority args['change_default'] = change_default args['size'] = size args['ondelete'] = ondelete.lower() if ondelete else None args['translate'] = translate args['select'] = select args['manual'] = manual args['write'] = args.get('write', False) args['read'] = args.get('read', False) args['selectable'] = args.get('selectable', True) args['group_operator'] = args.get('group_operator', None) args['groups'] = args.get('groups', None) args['deprecated'] = args.get('deprecated', None) args['_prefetch'] = args.get('_prefetch', True) self._args = EMPTY_DICT for key, val in args.iteritems(): setattr(self, key, val) # prefetch only if _classic_write, not deprecated and not manual if not self._classic_write or self.deprecated or self.manual: self._prefetch = False def __getattr__(self, name): """ Access a non-slot attribute. """ if name == '_args': raise AttributeError(name) try: return self._args[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): """ Set a slot or non-slot attribute. """ try: object.__setattr__(self, name, value) except AttributeError: if self._args: self._args[name] = value else: self._args = {name: value} # replace EMPTY_DICT def __delattr__(self, name): """ Remove a non-slot attribute. """ try: del self._args[name] except KeyError: raise AttributeError(name) def new(self, _computed_field=False, **args): """ Return a column like `self` with the given parameters; the parameter `_computed_field` tells whether the corresponding field is computed. """ # memory optimization: reuse self whenever possible; you can reduce the # average memory usage per registry by 10 megabytes! column = type(self)(**args) return self if self.to_field_args() == column.to_field_args() else column def to_field(self): """ convert column `self` to a new-style field """ from openerp.fields import Field return Field.by_type[self._type](column=self, **self.to_field_args()) def to_field_args(self): """ return a dictionary with all the arguments to pass to the field """ base_items = [ ('copy', self.copy), ('index', self.select), ('manual', self.manual), ('string', self.string), ('help', self.help), ('readonly', self.readonly), ('required', self.required), ('states', self.states), ('groups', self.groups), ('change_default', self.change_default), ('deprecated', self.deprecated), ] truthy_items = filter(itemgetter(1), [ ('group_operator', self.group_operator), ('size', self.size), ('ondelete', self.ondelete), ('translate', self.translate), ('domain', self._domain), ('context', self._context), ]) return dict(base_items + truthy_items + self._args.items()) def restart(self): pass def set(self, cr, obj, id, name, value, user=None, context=None): cr.execute('update '+obj._table+' set '+name+'='+self._symbol_set[0]+' where id=%s', (self._symbol_set[1](value), id)) def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): raise Exception(_('undefined get method !')) def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None): ids = obj.search(cr, uid, args+self._domain+[(name, 'ilike', value)], offset, limit, context=context) res = obj.read(cr, uid, ids, [name], context=context) return [x[name] for x in res] def as_display_name(self, cr, uid, obj, value, context=None): """Converts a field value to a suitable string representation for a record, e.g. when this field is used as ``rec_name``. :param obj: the ``BaseModel`` instance this column belongs to :param value: a proper value as returned by :py:meth:`~openerp.orm.osv.BaseModel.read` for this column """ # delegated to class method, so a column type A can delegate # to a column type B. return self._as_display_name(self, cr, uid, obj, value, context=None) @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): # This needs to be a class method, in case a column type A as to delegate # to a column type B. return tools.ustr(value) # --------------------------------------------------------- # Simple fields # --------------------------------------------------------- class boolean(_column): _type = 'boolean' _symbol_c = '%s' _symbol_f = bool _symbol_set = (_symbol_c, _symbol_f) __slots__ = [] def __init__(self, string='unknown', required=False, **args): super(boolean, self).__init__(string=string, required=required, **args) if required: _logger.debug( "required=True is deprecated: making a boolean field" " `required` has no effect, as NULL values are " "automatically turned into False. args: %r",args) class integer(_column): _type = 'integer' _symbol_c = '%s' _symbol_f = lambda x: int(x or 0) _symbol_set = (_symbol_c, _symbol_f) _symbol_get = lambda self,x: x or 0 __slots__ = [] def __init__(self, string='unknown', required=False, **args): super(integer, self).__init__(string=string, required=required, **args) class reference(_column): _type = 'reference' _classic_read = False # post-process to handle missing target __slots__ = ['selection'] def __init__(self, string, selection, size=None, **args): if callable(selection): from openerp import api selection = api.expected(api.cr_uid_context, selection) _column.__init__(self, string=string, size=size, selection=selection, **args) def to_field_args(self): args = super(reference, self).to_field_args() args['selection'] = self.selection return args def get(self, cr, obj, ids, name, uid=None, context=None, values=None): result = {} # copy initial values fetched previously. for value in values: result[value['id']] = value[name] if value[name]: model, res_id = value[name].split(',') if not obj.pool[model].exists(cr, uid, [int(res_id)], context=context): result[value['id']] = False return result @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): if value: # reference fields have a 'model,id'-like value, that we need to convert # to a real name model_name, res_id = value.split(',') if model_name in obj.pool and res_id: model = obj.pool[model_name] names = model.name_get(cr, uid, [int(res_id)], context=context) return names[0][1] if names else False return tools.ustr(value) # takes a string (encoded in utf8) and returns a string (encoded in utf8) def _symbol_set_char(self, symb): #TODO: # * we need to remove the "symb==False" from the next line BUT # for now too many things rely on this broken behavior # * the symb==None test should be common to all data types if symb is None or symb == False: return None # we need to convert the string to a unicode object to be able # to evaluate its length (and possibly truncate it) reliably u_symb = tools.ustr(symb) return u_symb[:self.size].encode('utf8') class char(_column): _type = 'char' __slots__ = ['_symbol_f', '_symbol_set', '_symbol_set_char'] def __init__(self, string="unknown", size=None, **args): _column.__init__(self, string=string, size=size or None, **args) # self._symbol_set_char defined to keep the backward compatibility self._symbol_f = self._symbol_set_char = lambda x: _symbol_set_char(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) class text(_column): _type = 'text' __slots__ = [] class html(text): _type = 'html' _symbol_c = '%s' __slots__ = ['_sanitize', '_strip_style', '_symbol_f', '_symbol_set'] def _symbol_set_html(self, value): if value is None or value is False: return None if not self._sanitize: return value return html_sanitize(value, strip_style=self._strip_style) def __init__(self, string='unknown', sanitize=True, strip_style=False, **args): super(html, self).__init__(string=string, **args) self._sanitize = sanitize self._strip_style = strip_style # symbol_set redefinition because of sanitize specific behavior self._symbol_f = self._symbol_set_html self._symbol_set = (self._symbol_c, self._symbol_f) def to_field_args(self): args = super(html, self).to_field_args() args['sanitize'] = self._sanitize args['strip_style'] = self._strip_style return args import __builtin__ def _symbol_set_float(self, x): result = __builtin__.float(x or 0.0) digits = self.digits if digits: precision, scale = digits result = float_repr(float_round(result, precision_digits=scale), precision_digits=scale) return result class float(_column): _type = 'float' _symbol_c = '%s' _symbol_get = lambda self,x: x or 0.0 __slots__ = ['_digits', '_digits_compute', '_symbol_f', '_symbol_set'] @property def digits(self): if self._digits_compute: with _get_cursor() as cr: return self._digits_compute(cr) else: return self._digits def __init__(self, string='unknown', digits=None, digits_compute=None, required=False, **args): _column.__init__(self, string=string, required=required, **args) # synopsis: digits_compute(cr) -> (precision, scale) self._digits = digits self._digits_compute = digits_compute self._symbol_f = lambda x: _symbol_set_float(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) def to_field_args(self): args = super(float, self).to_field_args() args['digits'] = self._digits_compute or self._digits return args def digits_change(self, cr): pass class date(_column): _type = 'date' __slots__ = [] MONTHS = [ ('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'), ('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December') ] @staticmethod def today(*args): """ Returns the current date in a format fit for being a default value to a ``date`` field. This method should be provided as is to the _defaults dict, it should not be called. """ return DT.date.today().strftime( tools.DEFAULT_SERVER_DATE_FORMAT) @staticmethod def context_today(model, cr, uid, context=None, timestamp=None): """Returns the current date as seen in the client's timezone in a format fit for date fields. This method may be passed as value to initialize _defaults. :param Model model: model (osv) for which the date value is being computed - automatically passed when used in _defaults. :param datetime timestamp: optional datetime value to use instead of the current date and time (must be a datetime, regular dates can't be converted between timezones.) :param dict context: the 'tz' key in the context should give the name of the User/Client timezone (otherwise UTC is used) :rtype: str """ today = timestamp or DT.datetime.now() context_today = None if context and context.get('tz'): tz_name = context['tz'] else: user = model.pool['res.users'].browse(cr, SUPERUSER_ID, uid) tz_name = user.tz if tz_name: try: utc = pytz.timezone('UTC') context_tz = pytz.timezone(tz_name) utc_today = utc.localize(today, is_dst=False) # UTC = no DST context_today = utc_today.astimezone(context_tz) except Exception: _logger.debug("failed to compute context/client-specific today date, " "using the UTC value for `today`", exc_info=True) return (context_today or today).strftime(tools.DEFAULT_SERVER_DATE_FORMAT) @staticmethod def date_to_datetime(model, cr, uid, userdate, context=None): """ Convert date values expressed in user's timezone to server-side UTC timestamp, assuming a default arbitrary time of 12:00 AM - because a time is needed. :param str userdate: date string in in user time zone :return: UTC datetime string for server-side use """ user_date = DT.datetime.strptime(userdate, tools.DEFAULT_SERVER_DATE_FORMAT) if context and context.get('tz'): tz_name = context['tz'] else: tz_name = model.pool.get('res.users').read(cr, SUPERUSER_ID, uid, ['tz'])['tz'] if tz_name: utc = pytz.timezone('UTC') context_tz = pytz.timezone(tz_name) user_datetime = user_date + DT.timedelta(hours=12.0) local_timestamp = context_tz.localize(user_datetime, is_dst=False) user_datetime = local_timestamp.astimezone(utc) return user_datetime.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT) return user_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT) class datetime(_column): _type = 'datetime' __slots__ = [] MONTHS = [ ('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'), ('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December') ] @staticmethod def now(*args): """ Returns the current datetime in a format fit for being a default value to a ``datetime`` field. This method should be provided as is to the _defaults dict, it should not be called. """ return DT.datetime.now().strftime( tools.DEFAULT_SERVER_DATETIME_FORMAT) @staticmethod def context_timestamp(cr, uid, timestamp, context=None): """Returns the given timestamp converted to the client's timezone. This method is *not* meant for use as a _defaults initializer, because datetime fields are automatically converted upon display on client side. For _defaults you :meth:`fields.datetime.now` should be used instead. :param datetime timestamp: naive datetime value (expressed in UTC) to be converted to the client timezone :param dict context: the 'tz' key in the context should give the name of the User/Client timezone (otherwise UTC is used) :rtype: datetime :return: timestamp converted to timezone-aware datetime in context timezone """ assert isinstance(timestamp, DT.datetime), 'Datetime instance expected' if context and context.get('tz'): tz_name = context['tz'] else: registry = openerp.modules.registry.RegistryManager.get(cr.dbname) user = registry['res.users'].browse(cr, SUPERUSER_ID, uid) tz_name = user.tz utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST if tz_name: try: context_tz = pytz.timezone(tz_name) return utc_timestamp.astimezone(context_tz) except Exception: _logger.debug("failed to compute context/client-specific timestamp, " "using the UTC value", exc_info=True) return utc_timestamp @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): value = datetime.context_timestamp(cr, uid, DT.datetime.strptime(value, tools.DEFAULT_SERVER_DATETIME_FORMAT), context=context) return tools.ustr(value.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)) class binary(_column): _type = 'binary' _classic_read = False # Binary values may be byte strings (python 2.6 byte array), but # the legacy OpenERP convention is to transfer and store binaries # as base64-encoded strings. The base64 string may be provided as a # unicode in some circumstances, hence the str() cast in symbol_f. # This str coercion will only work for pure ASCII unicode strings, # on purpose - non base64 data must be passed as a 8bit byte strings. _symbol_c = '%s' _symbol_f = lambda symb: symb and Binary(str(symb)) or None _symbol_set = (_symbol_c, _symbol_f) _symbol_get = lambda self, x: x and str(x) __slots__ = ['filters'] def __init__(self, string='unknown', filters=None, **args): args['_prefetch'] = args.get('_prefetch', False) _column.__init__(self, string=string, filters=filters, **args) def get(self, cr, obj, ids, name, user=None, context=None, values=None): if not context: context = {} if not values: values = [] res = {} for i in ids: val = None for v in values: if v['id'] == i: val = v[name] break # If client is requesting only the size of the field, we return it instead # of the content. Presumably a separate request will be done to read the actual # content if it's needed at some point. # TODO: after 6.0 we should consider returning a dict with size and content instead of # having an implicit convention for the value if val and context.get('bin_size_%s' % name, context.get('bin_size')): res[i] = tools.human_size(long(val)) else: res[i] = val return res class selection(_column): _type = 'selection' __slots__ = ['selection'] def __init__(self, selection, string='unknown', **args): if callable(selection): from openerp import api selection = api.expected(api.cr_uid_context, selection) _column.__init__(self, string=string, selection=selection, **args) def to_field_args(self): args = super(selection, self).to_field_args() args['selection'] = self.selection return args @classmethod def reify(cls, cr, uid, model, field, context=None): """ Munges the field's ``selection`` attribute as necessary to get something useable out of it: calls it if it's a function, applies translations to labels if it's not. A callable ``selection`` is considered translated on its own. :param orm.Model model: :param _column field: """ if callable(field.selection): return field.selection(model, cr, uid, context) if not (context and 'lang' in context): return field.selection # field_to_dict isn't given a field name, only a field object, we # need to get the name back in order to perform the translation lookup field_name = next( name for name, column in model._columns.iteritems() if column == field) translation_filter = "%s,%s" % (model._name, field_name) translate = functools.partial( model.pool['ir.translation']._get_source, cr, uid, translation_filter, 'selection', context['lang']) return [ (value, translate(label)) for value, label in field.selection ] # --------------------------------------------------------- # Relationals fields # --------------------------------------------------------- # # Values: (0, 0, { fields }) create # (1, ID, { fields }) update # (2, ID) remove (delete) # (3, ID) unlink one (target id or target of relation) # (4, ID) link # (5) unlink all (only valid for one2many) # class many2one(_column): _classic_read = False _classic_write = True _type = 'many2one' _symbol_c = '%s' _symbol_f = lambda x: x or None _symbol_set = (_symbol_c, _symbol_f) __slots__ = ['_obj', '_auto_join'] def __init__(self, obj, string='unknown', auto_join=False, **args): args['ondelete'] = args.get('ondelete', 'set null') _column.__init__(self, string=string, **args) self._obj = obj self._auto_join = auto_join def to_field_args(self): args = super(many2one, self).to_field_args() args['comodel_name'] = self._obj args['auto_join'] = self._auto_join return args def set(self, cr, obj_src, id, field, values, user=None, context=None): if not context: context = {} obj = obj_src.pool[self._obj] self._table = obj._table if type(values) == type([]): for act in values: if act[0] == 0: id_new = obj.create(cr, act[2]) cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (id_new, id)) elif act[0] == 1: obj.write(cr, [act[1]], act[2], context=context) elif act[0] == 2: cr.execute('delete from '+self._table+' where id=%s', (act[1],)) elif act[0] == 3 or act[0] == 5: cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) elif act[0] == 4: cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (act[1], id)) else: if values: cr.execute('update '+obj_src._table+' set '+field+'=%s where id=%s', (values, id)) else: cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,)) def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None): return obj.pool[self._obj].search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit, context=context) @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): return value[1] if isinstance(value, tuple) else tools.ustr(value) class one2many(_column): _classic_read = False _classic_write = False _type = 'one2many' __slots__ = ['_obj', '_fields_id', '_limit', '_auto_join'] def __init__(self, obj, fields_id, string='unknown', limit=None, auto_join=False, **args): # one2many columns are not copied by default args['copy'] = args.get('copy', False) args['_prefetch'] = args.get('_prefetch', False) _column.__init__(self, string=string, **args) self._obj = obj self._fields_id = fields_id self._limit = limit self._auto_join = auto_join #one2many can't be used as condition for defaults assert(self.change_default != True) def to_field_args(self): args = super(one2many, self).to_field_args() args['comodel_name'] = self._obj args['inverse_name'] = self._fields_id args['auto_join'] = self._auto_join args['limit'] = self._limit return args def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): if self._context: context = dict(context or {}) context.update(self._context) # retrieve the records in the comodel comodel = obj.pool[self._obj].browse(cr, user, [], context) inverse = self._fields_id domain = self._domain(obj) if callable(self._domain) else self._domain domain = domain + [(inverse, 'in', ids)] records = comodel.search(domain, limit=self._limit) result = {id: [] for id in ids} # read the inverse of records without prefetching other fields on them for record in records.with_context(prefetch_fields=False): # record[inverse] may be a record or an integer result[int(record[inverse])].append(record.id) return result def set(self, cr, obj, id, field, values, user=None, context=None): result = [] context = dict(context or {}) context.update(self._context) if not values: return obj = obj.pool[self._obj] rec = obj.browse(cr, user, [], context=context) with rec.env.norecompute(): _table = obj._table for act in values: if act[0] == 0: act[2][self._fields_id] = id id_new = obj.create(cr, user, act[2], context=context) result += obj._store_get_values(cr, user, [id_new], act[2].keys(), context) elif act[0] == 1: obj.write(cr, user, [act[1]], act[2], context=context) elif act[0] == 2: obj.unlink(cr, user, [act[1]], context=context) elif act[0] == 3: inverse_field = obj._fields.get(self._fields_id) assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o' # if the model has on delete cascade, just delete the row if inverse_field.ondelete == "cascade": obj.unlink(cr, user, [act[1]], context=context) else: cr.execute('update '+_table+' set '+self._fields_id+'=null where id=%s', (act[1],)) elif act[0] == 4: # check whether the given record is already linked rec = obj.browse(cr, SUPERUSER_ID, act[1], {'prefetch_fields': False}) if int(rec[self._fields_id]) != id: # Must use write() to recompute parent_store structure if needed and check access rules obj.write(cr, user, [act[1]], {self._fields_id:id}, context=context or {}) elif act[0] == 5: inverse_field = obj._fields.get(self._fields_id) assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o' # if the o2m has a static domain we must respect it when unlinking domain = self._domain(obj) if callable(self._domain) else self._domain extra_domain = domain or [] ids_to_unlink = obj.search(cr, user, [(self._fields_id,'=',id)] + extra_domain, context=context) # If the model has cascade deletion, we delete the rows because it is the intended behavior, # otherwise we only nullify the reverse foreign key column. if inverse_field.ondelete == "cascade": obj.unlink(cr, user, ids_to_unlink, context=context) else: obj.write(cr, user, ids_to_unlink, {self._fields_id: False}, context=context) elif act[0] == 6: # Must use write() to recompute parent_store structure if needed obj.write(cr, user, act[2], {self._fields_id:id}, context=context or {}) ids2 = act[2] or [0] cr.execute('select id from '+_table+' where '+self._fields_id+'=%s and id <> ALL (%s)', (id,ids2)) ids3 = map(lambda x:x[0], cr.fetchall()) obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {}) return result def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None): domain = self._domain(obj) if callable(self._domain) else self._domain return obj.pool[self._obj].name_search(cr, uid, value, domain, operator, context=context,limit=limit) @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): raise NotImplementedError('One2Many columns should not be used as record name (_rec_name)') # # Values: (0, 0, { fields }) create # (1, ID, { fields }) update (write fields to ID) # (2, ID) remove (calls unlink on ID, that will also delete the relationship because of the ondelete) # (3, ID) unlink (delete the relationship between the two objects but does not delete ID) # (4, ID) link (add a relationship) # (5, ID) unlink all # (6, ?, ids) set a list of links # class many2many(_column): """Encapsulates the logic of a many-to-many bidirectional relationship, handling the low-level details of the intermediary relationship table transparently. A many-to-many relationship is always symmetrical, and can be declared and accessed from either endpoint model. If ``rel`` (relationship table name), ``id1`` (source foreign key column name) or id2 (destination foreign key column name) are not specified, the system will provide default values. This will by default only allow one single symmetrical many-to-many relationship between the source and destination model. For multiple many-to-many relationship between the same models and for relationships where source and destination models are the same, ``rel``, ``id1`` and ``id2`` should be specified explicitly. :param str obj: destination model :param str rel: optional name of the intermediary relationship table. If not specified, a canonical name will be derived based on the alphabetically-ordered model names of the source and destination (in the form: ``amodel_bmodel_rel``). Automatic naming is not possible when the source and destination are the same, for obvious ambiguity reasons. :param str id1: optional name for the column holding the foreign key to the current model in the relationship table. If not specified, a canonical name will be derived based on the model name (in the form: `src_model_id`). :param str id2: optional name for the column holding the foreign key to the destination model in the relationship table. If not specified, a canonical name will be derived based on the model name (in the form: `dest_model_id`) :param str string: field label """ _classic_read = False _classic_write = False _type = 'many2many' __slots__ = ['_obj', '_rel', '_id1', '_id2', '_limit', '_auto_join'] def __init__(self, obj, rel=None, id1=None, id2=None, string='unknown', limit=None, **args): """ """ args['_prefetch'] = args.get('_prefetch', False) _column.__init__(self, string=string, **args) self._obj = obj if rel and '.' in rel: raise Exception(_('The second argument of the many2many field %s must be a SQL table !'\ 'You used %s, which is not a valid SQL table name.')% (string,rel)) self._rel = rel self._id1 = id1 self._id2 = id2 self._limit = limit self._auto_join = False def to_field_args(self): args = super(many2many, self).to_field_args() args['comodel_name'] = self._obj args['relation'] = self._rel args['column1'] = self._id1 args['column2'] = self._id2 args['limit'] = self._limit return args def _sql_names(self, source_model): """Return the SQL names defining the structure of the m2m relationship table :return: (m2m_table, local_col, dest_col) where m2m_table is the table name, local_col is the name of the column holding the current model's FK, and dest_col is the name of the column holding the destination model's FK, and """ tbl, col1, col2 = self._rel, self._id1, self._id2 if not all((tbl, col1, col2)): # the default table name is based on the stable alphabetical order of tables dest_model = source_model.pool[self._obj] tables = tuple(sorted([source_model._table, dest_model._table])) if not tbl: assert tables[0] != tables[1], 'Implicit/Canonical naming of m2m relationship table '\ 'is not possible when source and destination models are '\ 'the same' tbl = '%s_%s_rel' % tables if not col1: col1 = '%s_id' % source_model._table if not col2: col2 = '%s_id' % dest_model._table return tbl, col1, col2 def _get_query_and_where_params(self, cr, model, ids, values, where_params): """ Extracted from ``get`` to facilitate fine-tuning of the generated query. """ query = 'SELECT %(rel)s.%(id2)s, %(rel)s.%(id1)s \ FROM %(rel)s, %(from_c)s \ WHERE %(rel)s.%(id1)s IN %%s \ AND %(rel)s.%(id2)s = %(tbl)s.id \ %(where_c)s \ %(order_by)s \ %(limit)s \ OFFSET %(offset)d' \ % values return query, where_params def get(self, cr, model, ids, name, user=None, offset=0, context=None, values=None): if not context: context = {} if not values: values = {} res = {} if not ids: return res for id in ids: res[id] = [] if offset: _logger.warning( "Specifying offset at a many2many.get() is deprecated and may" " produce unpredictable results.") obj = model.pool[self._obj] rel, id1, id2 = self._sql_names(model) # static domains are lists, and are evaluated both here and on client-side, while string # domains supposed by dynamic and evaluated on client-side only (thus ignored here) # FIXME: make this distinction explicit in API! domain = isinstance(self._domain, list) and self._domain or [] wquery = obj._where_calc(cr, user, domain, context=context) obj._apply_ir_rules(cr, user, wquery, 'read', context=context) order_by = obj._generate_order_by(None, wquery) from_c, where_c, where_params = wquery.get_sql() if where_c: where_c = ' AND ' + where_c limit_str = '' if self._limit is not None: limit_str = ' LIMIT %d' % self._limit query, where_params = self._get_query_and_where_params(cr, model, ids, {'rel': rel, 'from_c': from_c, 'tbl': obj._table, 'id1': id1, 'id2': id2, 'where_c': where_c, 'limit': limit_str, 'order_by': order_by, 'offset': offset, }, where_params) cr.execute(query, [tuple(ids),] + where_params) for r in cr.fetchall(): res[r[1]].append(r[0]) return res def set(self, cr, model, id, name, values, user=None, context=None): if not context: context = {} if not values: return rel, id1, id2 = self._sql_names(model) obj = model.pool[self._obj] for act in values: if not (isinstance(act, list) or isinstance(act, tuple)) or not act: continue if act[0] == 0: idnew = obj.create(cr, user, act[2], context=context) cr.execute('insert into '+rel+' ('+id1+','+id2+') values (%s,%s)', (id, idnew)) elif act[0] == 1: obj.write(cr, user, [act[1]], act[2], context=context) elif act[0] == 2: obj.unlink(cr, user, [act[1]], context=context) elif act[0] == 3: cr.execute('delete from '+rel+' where ' + id1 + '=%s and '+ id2 + '=%s', (id, act[1])) elif act[0] == 4: # following queries are in the same transaction - so should be relatively safe cr.execute('SELECT 1 FROM '+rel+' WHERE '+id1+' = %s and '+id2+' = %s', (id, act[1])) if not cr.fetchone(): cr.execute('insert into '+rel+' ('+id1+','+id2+') values (%s,%s)', (id, act[1])) elif act[0] == 5: cr.execute('delete from '+rel+' where ' + id1 + ' = %s', (id,)) elif act[0] == 6: d1, d2,tables = obj.pool.get('ir.rule').domain_get(cr, user, obj._name, context=context) if d1: d1 = ' and ' + ' and '.join(d1) else: d1 = '' cr.execute('delete from '+rel+' where '+id1+'=%s AND '+id2+' IN (SELECT '+rel+'.'+id2+' FROM '+rel+', '+','.join(tables)+' WHERE '+rel+'.'+id1+'=%s AND '+rel+'.'+id2+' = '+obj._table+'.id '+ d1 +')', [id, id]+d2) for act_nbr in act[2]: cr.execute('insert into '+rel+' ('+id1+','+id2+') values (%s, %s)', (id, act_nbr)) # # TODO: use a name_search # def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None): return obj.pool[self._obj].search(cr, uid, args+self._domain+[('name', operator, value)], offset, limit, context=context) @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): raise NotImplementedError('Many2Many columns should not be used as record name (_rec_name)') def get_nice_size(value): size = 0 if isinstance(value, (int,long)): size = value elif value: # this is supposed to be a string size = len(value) if size < 12: # suppose human size return value return tools.human_size(size) # See http://www.w3.org/TR/2000/REC-xml-20001006#NT-Char # and http://bugs.python.org/issue10066 invalid_xml_low_bytes = re.compile(r'[\x00-\x08\x0b-\x0c\x0e-\x1f]') def sanitize_binary_value(value): # binary fields should be 7-bit ASCII base64-encoded data, # but we do additional sanity checks to make sure the values # are not something else that won't pass via XML-RPC if isinstance(value, (xmlrpclib.Binary, tuple, list, dict)): # these builtin types are meant to pass untouched return value # Handle invalid bytes values that will cause problems # for XML-RPC. See for more info: # - http://bugs.python.org/issue10066 # - http://www.w3.org/TR/2000/REC-xml-20001006#NT-Char # Coercing to unicode would normally allow it to properly pass via # XML-RPC, transparently encoded as UTF-8 by xmlrpclib. # (this works for _any_ byte values, thanks to the fallback # to latin-1 passthrough encoding when decoding to unicode) value = tools.ustr(value) # Due to Python bug #10066 this could still yield invalid XML # bytes, specifically in the low byte range, that will crash # the decoding side: [\x00-\x08\x0b-\x0c\x0e-\x1f] # So check for low bytes values, and if any, perform # base64 encoding - not very smart or useful, but this is # our last resort to avoid crashing the request. if invalid_xml_low_bytes.search(value): # b64-encode after restoring the pure bytes with latin-1 # passthrough encoding value = base64.b64encode(value.encode('latin-1')) return value # --------------------------------------------------------- # Function fields # --------------------------------------------------------- class function(_column): """ A field whose value is computed by a function (rather than being read from the database). :param fnct: the callable that will compute the field value. :param arg: arbitrary value to be passed to ``fnct`` when computing the value. :param fnct_inv: the callable that will allow writing values in that field (if not provided, the field is read-only). :param fnct_inv_arg: arbitrary value to be passed to ``fnct_inv`` when writing a value. :param str type: type of the field simulated by the function field :param fnct_search: the callable that allows searching on the field (if not provided, search will not return any result). :param store: store computed value in database (see :ref:`The *store* parameter <field-function-store>`). :type store: True or dict specifying triggers for field computation :param multi: name of batch for batch computation of function fields. All fields with the same batch name will be computed by a single function call. This changes the signature of the ``fnct`` callable. .. _field-function-fnct: The ``fnct`` parameter .. rubric:: The ``fnct`` parameter The callable implementing the function field must have the following signature: .. function:: fnct(model, cr, uid, ids, field_name(s), arg, context) Implements the function field. :param orm model: model to which the field belongs (should be ``self`` for a model method) :param field_name(s): name of the field to compute, or if ``multi`` is provided, list of field names to compute. :type field_name(s): str | [str] :param arg: arbitrary value passed when declaring the function field :rtype: dict :return: mapping of ``ids`` to computed values, or if multi is provided, to a map of field_names to computed values The values in the returned dictionary must be of the type specified by the type argument in the field declaration. Here is an example with a simple function ``char`` function field:: # declarations def compute(self, cr, uid, ids, field_name, arg, context): result = {} # ... return result _columns['my_char'] = fields.function(compute, type='char', size=50) # when called with ``ids=[1,2,3]``, ``compute`` could return: { 1: 'foo', 2: 'bar', 3: False # null values should be returned explicitly too } If ``multi`` is set, then ``field_name`` is replaced by ``field_names``: a list of the field names that should be computed. Each value in the returned dictionary must then be a dictionary mapping field names to values. Here is an example where two function fields (``name`` and ``age``) are both computed by a single function field:: # declarations def compute(self, cr, uid, ids, field_names, arg, context): result = {} # ... return result _columns['name'] = fields.function(compute_person_data, type='char',\ size=50, multi='person_data') _columns[''age'] = fields.function(compute_person_data, type='integer',\ multi='person_data') # when called with ``ids=[1,2,3]``, ``compute_person_data`` could return: { 1: {'name': 'Bob', 'age': 23}, 2: {'name': 'Sally', 'age': 19}, 3: {'name': 'unknown', 'age': False} } .. _field-function-fnct-inv: .. rubric:: The ``fnct_inv`` parameter This callable implements the write operation for the function field and must have the following signature: .. function:: fnct_inv(model, cr, uid, id, field_name, field_value, fnct_inv_arg, context) Callable that implements the ``write`` operation for the function field. :param orm model: model to which the field belongs (should be ``self`` for a model method) :param int id: the identifier of the object to write on :param str field_name: name of the field to set :param fnct_inv_arg: arbitrary value passed when declaring the function field :return: True When writing values for a function field, the ``multi`` parameter is ignored. .. _field-function-fnct-search: .. rubric:: The ``fnct_search`` parameter This callable implements the search operation for the function field and must have the following signature: .. function:: fnct_search(model, cr, uid, model_again, field_name, criterion, context) Callable that implements the ``search`` operation for the function field by expanding a search criterion based on the function field into a new domain based only on columns that are stored in the database. :param orm model: model to which the field belongs (should be ``self`` for a model method) :param orm model_again: same value as ``model`` (seriously! this is for backwards compatibility) :param str field_name: name of the field to search on :param list criterion: domain component specifying the search criterion on the field. :rtype: list :return: domain to use instead of ``criterion`` when performing the search. This new domain must be based only on columns stored in the database, as it will be used directly without any translation. The returned value must be a domain, that is, a list of the form [(field_name, operator, operand)]. The most generic way to implement ``fnct_search`` is to directly search for the records that match the given ``criterion``, and return their ``ids`` wrapped in a domain, such as ``[('id','in',[1,3,5])]``. .. _field-function-store: .. rubric:: The ``store`` parameter The ``store`` parameter allows caching the result of the field computation in the database, and defining the triggers that will invalidate that cache and force a recomputation of the function field. When not provided, the field is computed every time its value is read. The value of ``store`` may be either ``True`` (to recompute the field value whenever any field in the same record is modified), or a dictionary specifying a more flexible set of recomputation triggers. A trigger specification is a dictionary that maps the names of the models that will trigger the computation, to a tuple describing the trigger rule, in the following form:: store = { 'trigger_model': (mapping_function, ['trigger_field1', 'trigger_field2'], priority), } A trigger rule is defined by a 3-item tuple where: * The ``mapping_function`` is defined as follows: .. function:: mapping_function(trigger_model, cr, uid, trigger_ids, context) Callable that maps record ids of a trigger model to ids of the corresponding records in the source model (whose field values need to be recomputed). :param orm model: trigger_model :param list trigger_ids: ids of the records of trigger_model that were modified :rtype: list :return: list of ids of the source model whose function field values need to be recomputed * The second item is a list of the fields who should act as triggers for the computation. If an empty list is given, all fields will act as triggers. * The last item is the priority, used to order the triggers when processing them after any write operation on a model that has function field triggers. The default priority is 10. In fact, setting store = True is the same as using the following trigger dict:: store = { 'model_itself': (lambda self, cr, uid, ids, context: ids, [], 10) } """ _properties = True __slots__ = [ '_type', '_classic_read', '_classic_write', '_symbol_c', '_symbol_f', '_symbol_set', '_symbol_get', '_fnct', '_arg', '_fnct_inv', '_fnct_inv_arg', '_fnct_search', '_multi', 'store', '_digits', '_digits_compute', 'selection', '_obj', ] @property def digits(self): if self._digits_compute: with _get_cursor() as cr: return self._digits_compute(cr) else: return self._digits # # multi: compute several fields in one call # def __init__(self, fnct, arg=None, fnct_inv=None, fnct_inv_arg=None, type='float', fnct_search=None, obj=None, store=False, multi=False, **args): self._classic_read = False self._classic_write = False self._prefetch = False self._symbol_c = '%s' self._symbol_f = _symbol_set self._symbol_set = (self._symbol_c, self._symbol_f) self._symbol_get = None # pop attributes that should not be assigned to self self._digits = args.pop('digits', (16,2)) self._digits_compute = args.pop('digits_compute', None) self._obj = args.pop('relation', obj) # function fields are not copied by default args['copy'] = args.get('copy', False) _column.__init__(self, **args) self._type = type self._fnct = fnct self._arg = arg self._fnct_inv = fnct_inv self._fnct_inv_arg = fnct_inv_arg self._fnct_search = fnct_search self.store = store self._multi = multi if not fnct_inv: self.readonly = 1 if not fnct_search and not store: self.selectable = False if callable(args.get('selection')): from openerp import api self.selection = api.expected(api.cr_uid_context, args['selection']) if store: if self._type != 'many2one': # m2o fields need to return tuples with name_get, not just foreign keys self._classic_read = True self._classic_write = True if type=='binary': self._symbol_get=lambda x:x and str(x) else: self._prefetch = True if type == 'char': self._symbol_c = char._symbol_c self._symbol_f = lambda x: _symbol_set_char(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) elif type == 'float': self._symbol_c = float._symbol_c self._symbol_f = lambda x: _symbol_set_float(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) else: type_class = globals().get(type) if type_class is not None: self._symbol_c = type_class._symbol_c self._symbol_f = type_class._symbol_f self._symbol_set = type_class._symbol_set def new(self, _computed_field=False, **args): if _computed_field: # field is computed, we need an instance of a non-function column type_class = globals()[self._type] return type_class(**args) else: # HACK: function fields are tricky to recreate, simply return a copy import copy return copy.copy(self) def to_field_args(self): args = super(function, self).to_field_args() args['store'] = bool(self.store) args['company_dependent'] = False if self._type in ('float',): args['digits'] = self._digits_compute or self._digits elif self._type in ('selection', 'reference'): args['selection'] = self.selection elif self._type in ('many2one', 'one2many', 'many2many'): args['comodel_name'] = self._obj return args def digits_change(self, cr): pass def search(self, cr, uid, obj, name, args, context=None): if not self._fnct_search: #CHECKME: should raise an exception return [] return self._fnct_search(obj, cr, uid, obj, name, args, context=context) def postprocess(self, cr, uid, obj, field, value=None, context=None): return self._postprocess_batch(cr, uid, obj, field, {0: value}, context=context)[0] def _postprocess_batch(self, cr, uid, obj, field, values, context=None): if not values: return values if context is None: context = {} field_type = obj._columns[field]._type new_values = dict(values) if field_type == 'binary': if context.get('bin_size'): # client requests only the size of binary fields for rid, value in values.iteritems(): if value: new_values[rid] = get_nice_size(value) elif not context.get('bin_raw'): for rid, value in values.iteritems(): if value: new_values[rid] = sanitize_binary_value(value) return new_values def get(self, cr, obj, ids, name, uid=False, context=None, values=None): multi = self._multi # if we already have a value, don't recompute it. # This happen if case of stored many2one fields if values and not multi and name in values[0]: result = dict((v['id'], v[name]) for v in values) elif values and multi and all(n in values[0] for n in name): result = dict((v['id'], dict((n, v[n]) for n in name)) for v in values) else: result = self._fnct(obj, cr, uid, ids, name, self._arg, context) if multi: swap = {} for rid, values in result.iteritems(): for f, v in values.iteritems(): if f not in name: continue swap.setdefault(f, {})[rid] = v for field, values in swap.iteritems(): new_values = self._postprocess_batch(cr, uid, obj, field, values, context) for rid, value in new_values.iteritems(): result[rid][field] = value else: result = self._postprocess_batch(cr, uid, obj, name, result, context) return result def set(self, cr, obj, id, name, value, user=None, context=None): if not context: context = {} if self._fnct_inv: self._fnct_inv(obj, cr, user, id, name, value, self._fnct_inv_arg, context) @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): # Function fields are supposed to emulate a basic field type, # so they can delegate to the basic type for record name rendering return globals()[field._type]._as_display_name(field, cr, uid, obj, value, context=context) # --------------------------------------------------------- # Related fields # --------------------------------------------------------- class related(function): """Field that points to some data inside another field of the current record. Example:: _columns = { 'foo_id': fields.many2one('my.foo', 'Foo'), 'bar': fields.related('foo_id', 'frol', type='char', string='Frol of Foo'), } """ __slots__ = ['arg', '_relations'] def _related_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None): # assume self._arg = ('foo', 'bar', 'baz') # domain = [(name, op, val)] => search [('foo.bar.baz', op, val)] field = '.'.join(self._arg) return map(lambda x: (field, x[1], x[2]), domain) def _related_write(self, obj, cr, uid, ids, field_name, values, args, context=None): if isinstance(ids, (int, long)): ids = [ids] for instance in obj.browse(cr, uid, ids, context=context): # traverse all fields except the last one for field in self.arg[:-1]: instance = instance[field][:1] if instance: # write on the last field of the target record instance.write({self.arg[-1]: values}) def _related_read(self, obj, cr, uid, ids, field_name, args, context=None): res = {} for record in obj.browse(cr, SUPERUSER_ID, ids, context=context): value = record # traverse all fields except the last one for field in self.arg[:-1]: value = value[field][:1] # read the last field on the target record res[record.id] = value[self.arg[-1]] if self._type == 'many2one': # res[id] is a recordset; convert it to (id, name) or False. # Perform name_get as root, as seeing the name of a related object depends on # access right of source document, not target, so user may not have access. value_ids = list(set(value.id for value in res.itervalues() if value)) value_name = dict(obj.pool[self._obj].name_get(cr, SUPERUSER_ID, value_ids, context=context)) res = dict((id, bool(value) and (value.id, value_name[value.id])) for id, value in res.iteritems()) elif self._type in ('one2many', 'many2many'): # res[id] is a recordset; convert it to a list of ids res = dict((id, value.ids) for id, value in res.iteritems()) return res def __init__(self, *arg, **args): self.arg = arg self._relations = [] super(related, self).__init__(self._related_read, arg, self._related_write, fnct_inv_arg=arg, fnct_search=self._related_search, **args) if self.store is True: # TODO: improve here to change self.store = {...} according to related objects pass class sparse(function): __slots__ = ['serialization_field'] def convert_value(self, obj, cr, uid, record, value, read_value, context=None): """ + For a many2many field, a list of tuples is expected. Here is the list of tuple that are accepted, with the corresponding semantics :: (0, 0, { values }) link to a new record that needs to be created with the given values dictionary (1, ID, { values }) update the linked record with id = ID (write *values* on it) (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself) (4, ID) link to existing record with id = ID (adds a relationship) (5) unlink all (like using (3,ID) for all linked records) (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs) Example: [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4] + For a one2many field, a lits of tuples is expected. Here is the list of tuple that are accepted, with the corresponding semantics :: (0, 0, { values }) link to a new record that needs to be created with the given values dictionary (1, ID, { values }) update the linked record with id = ID (write *values* on it) (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) Example: [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})] """ if self._type == 'many2many': if not value: return [] assert value[0][0] == 6, 'Unsupported m2m value for sparse field: %s' % value return value[0][2] elif self._type == 'one2many': if not read_value: read_value = [] relation_obj = obj.pool[self.relation] for vals in value: assert vals[0] in (0,1,2), 'Unsupported o2m value for sparse field: %s' % vals if vals[0] == 0: read_value.append(relation_obj.create(cr, uid, vals[2], context=context)) elif vals[0] == 1: relation_obj.write(cr, uid, vals[1], vals[2], context=context) elif vals[0] == 2: relation_obj.unlink(cr, uid, vals[1], context=context) read_value.remove(vals[1]) return read_value return value def _sparse_write(self,obj,cr, uid, ids, field_name, value, args, context=None): if not type(ids) == list: ids = [ids] records = obj.browse(cr, uid, ids, context=context) for record in records: # grab serialized value as object - already deserialized serialized = getattr(record, self.serialization_field) if value is None: # simply delete the key to unset it. serialized.pop(field_name, None) else: serialized[field_name] = self.convert_value(obj, cr, uid, record, value, serialized.get(field_name), context=context) obj.write(cr, uid, ids, {self.serialization_field: serialized}, context=context) return True def _sparse_read(self, obj, cr, uid, ids, field_names, args, context=None): results = {} records = obj.browse(cr, uid, ids, context=context) for record in records: # grab serialized value as object - already deserialized serialized = getattr(record, self.serialization_field) results[record.id] = {} for field_name in field_names: field_type = obj._columns[field_name]._type value = serialized.get(field_name, False) if field_type in ('one2many','many2many'): value = value or [] if value: # filter out deleted records as superuser relation_obj = obj.pool[obj._columns[field_name].relation] value = relation_obj.exists(cr, openerp.SUPERUSER_ID, value) if type(value) in (int,long) and field_type == 'many2one': relation_obj = obj.pool[obj._columns[field_name].relation] # check for deleted record as superuser if not relation_obj.exists(cr, openerp.SUPERUSER_ID, [value]): value = False results[record.id][field_name] = value return results def __init__(self, serialization_field, **kwargs): self.serialization_field = serialization_field super(sparse, self).__init__(self._sparse_read, fnct_inv=self._sparse_write, multi='__sparse_multi', **kwargs) # --------------------------------------------------------- # Dummy fields # --------------------------------------------------------- class dummy(function): __slots__ = ['arg', '_relations'] def _dummy_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None): return [] def _dummy_write(self, obj, cr, uid, ids, field_name, values, args, context=None): return False def _dummy_read(self, obj, cr, uid, ids, field_name, args, context=None): return {} def __init__(self, *arg, **args): self.arg = arg self._relations = [] super(dummy, self).__init__(self._dummy_read, arg, self._dummy_write, fnct_inv_arg=arg, fnct_search=self._dummy_search, **args) # --------------------------------------------------------- # Serialized fields # --------------------------------------------------------- class serialized(_column): """ A field able to store an arbitrary python data structure. Note: only plain components allowed. """ _type = 'serialized' __slots__ = [] def _symbol_set_struct(val): return simplejson.dumps(val) def _symbol_get_struct(self, val): return simplejson.loads(val or '{}') _symbol_c = '%s' _symbol_f = _symbol_set_struct _symbol_set = (_symbol_c, _symbol_f) _symbol_get = _symbol_get_struct def __init__(self, *args, **kwargs): kwargs['_prefetch'] = kwargs.get('_prefetch', False) super(serialized, self).__init__(*args, **kwargs) # TODO: review completly this class for speed improvement class property(function): __slots__ = [] def to_field_args(self): args = super(property, self).to_field_args() args['company_dependent'] = True return args def _property_search(self, tobj, cr, uid, obj, name, domain, context=None): ir_property = obj.pool['ir.property'] result = [] for field, operator, value in domain: result += ir_property.search_multi(cr, uid, name, tobj._name, operator, value, context=context) return result def _property_write(self, obj, cr, uid, id, prop_name, value, obj_dest, context=None): ir_property = obj.pool['ir.property'] ir_property.set_multi(cr, uid, prop_name, obj._name, {id: value}, context=context) return True def _property_read(self, obj, cr, uid, ids, prop_names, obj_dest, context=None): ir_property = obj.pool['ir.property'] res = {id: {} for id in ids} for prop_name in prop_names: field = obj._fields[prop_name] values = ir_property.get_multi(cr, uid, prop_name, obj._name, ids, context=context) if field.type == 'many2one': # name_get the non-null values as SUPERUSER_ID vals = sum(set(filter(None, values.itervalues())), obj.pool[field.comodel_name].browse(cr, uid, [], context=context)) vals_name = dict(vals.sudo().name_get()) if vals else {} for id, value in values.iteritems(): ng = False if value and value.id in vals_name: ng = value.id, vals_name[value.id] res[id][prop_name] = ng else: for id, value in values.iteritems(): res[id][prop_name] = value return res def __init__(self, **args): if 'view_load' in args: _logger.warning("view_load attribute is deprecated on ir.fields. Args: %r", args) args = dict(args) args['obj'] = args.pop('relation', '') or args.get('obj', '') super(property, self).__init__( fnct=self._property_read, fnct_inv=self._property_write, fnct_search=self._property_search, multi='properties', **args ) class column_info(object): """ Struct containing details about an osv column, either one local to its model, or one inherited via _inherits. .. attribute:: name name of the column .. attribute:: column column instance, subclass of :class:`_column` .. attribute:: parent_model if the column is inherited, name of the model that contains it, ``None`` for local columns. .. attribute:: parent_column the name of the column containing the m2o relationship to the parent model that contains this column, ``None`` for local columns. .. attribute:: original_parent if the column is inherited, name of the original parent model that contains it i.e in case of multilevel inheritance, ``None`` for local columns. """ __slots__ = ['name', 'column', 'parent_model', 'parent_column', 'original_parent'] def __init__(self, name, column, parent_model=None, parent_column=None, original_parent=None): self.name = name self.column = column self.parent_model = parent_model self.parent_column = parent_column self.original_parent = original_parent def __str__(self): return '%s(%s, %s, %s, %s, %s)' % ( self.__class__.__name__, self.name, self.column, self.parent_model, self.parent_column, self.original_parent) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ysyrov/yaml-cpp
test/gmock-1.7.0/scripts/generator/cpp/keywords.py
1157
2004
#!/usr/bin/env python # # Copyright 2007 Neal Norwitz # Portions Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """C++ keywords and helper utilities for determining keywords.""" __author__ = 'nnorwitz@google.com (Neal Norwitz)' try: # Python 3.x import builtins except ImportError: # Python 2.x import __builtin__ as builtins if not hasattr(builtins, 'set'): # Nominal support for Python 2.3. from sets import Set as set TYPES = set('bool char int long short double float void wchar_t unsigned signed'.split()) TYPE_MODIFIERS = set('auto register const inline extern static virtual volatile mutable'.split()) ACCESS = set('public protected private friend'.split()) CASTS = set('static_cast const_cast dynamic_cast reinterpret_cast'.split()) OTHERS = set('true false asm class namespace using explicit this operator sizeof'.split()) OTHER_TYPES = set('new delete typedef struct union enum typeid typename template'.split()) CONTROL = set('case switch default if else return goto'.split()) EXCEPTION = set('try catch throw'.split()) LOOP = set('while do for break continue'.split()) ALL = TYPES | TYPE_MODIFIERS | ACCESS | CASTS | OTHERS | OTHER_TYPES | CONTROL | EXCEPTION | LOOP def IsKeyword(token): return token in ALL def IsBuiltinType(token): if token in ('virtual', 'inline'): # These only apply to methods, they can't be types by themselves. return False return token in TYPES or token in TYPE_MODIFIERS
mit
ravindrapanda/tensorflow
tensorflow/python/eager/graph_only_ops.py
69
2363
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Graph-only versions of a few op functions, for internal use only.""" # Must be separate from array_ops to avoid a cyclic dependency. from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.core.framework import attr_value_pb2 from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape def graph_zeros_like(tensor): """Graph-only version of tf.zeros_like(), for internal use only.""" g = ops._get_graph_from_inputs([tensor]) # pylint: disable=protected-access with g.as_default(), ops.name_scope(None, "zeros_like", [tensor]) as name: tensor = ops.convert_to_tensor(tensor, name="tensor") dtype = tensor.dtype.base_dtype dtype_value = attr_value_pb2.AttrValue(type=dtype.as_datatype_enum) op = g.create_op("ZerosLike", [tensor], [dtype], input_types=[dtype], attrs={"T": dtype_value}, name=name) result, = op.outputs return result def graph_placeholder(dtype, shape, name=None): """Graph-only version of tf.placeholder(), for internal use only.""" dtype = dtype.base_dtype dtype_value = attr_value_pb2.AttrValue(type=dtype.as_datatype_enum) if isinstance(shape, (list, tuple)): shape = tensor_shape.TensorShape(shape) assert isinstance(shape, tensor_shape.TensorShape) shape = attr_value_pb2.AttrValue(shape=shape.as_proto()) g = ops.get_default_graph() with ops.name_scope(name, "placeholder", []) as name: op = g.create_op("Placeholder", [], [dtype], input_types=[], attrs={"dtype": dtype_value, "shape": shape}, name=name) result, = op.outputs return result
apache-2.0
andyaguiar/tornado
demos/appengine/blog.py
114
5385
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import os.path import re import tornado.escape import tornado.web import tornado.wsgi import unicodedata from google.appengine.api import users from google.appengine.ext import db class Entry(db.Model): """A single blog entry.""" author = db.UserProperty() title = db.StringProperty(required=True) slug = db.StringProperty(required=True) body_source = db.TextProperty(required=True) html = db.TextProperty(required=True) published = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) def administrator(method): """Decorate with this method to restrict to site admins.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: if self.request.method == "GET": self.redirect(self.get_login_url()) return raise tornado.web.HTTPError(403) elif not self.current_user.administrator: if self.request.method == "GET": self.redirect("/") return raise tornado.web.HTTPError(403) else: return method(self, *args, **kwargs) return wrapper class BaseHandler(tornado.web.RequestHandler): """Implements Google Accounts authentication methods.""" def get_current_user(self): user = users.get_current_user() if user: user.administrator = users.is_current_user_admin() return user def get_login_url(self): return users.create_login_url(self.request.uri) def get_template_namespace(self): # Let the templates access the users module to generate login URLs ns = super(BaseHandler, self).get_template_namespace() ns['users'] = users return ns class HomeHandler(BaseHandler): def get(self): entries = db.Query(Entry).order('-published').fetch(limit=5) if not entries: if not self.current_user or self.current_user.administrator: self.redirect("/compose") return self.render("home.html", entries=entries) class EntryHandler(BaseHandler): def get(self, slug): entry = db.Query(Entry).filter("slug =", slug).get() if not entry: raise tornado.web.HTTPError(404) self.render("entry.html", entry=entry) class ArchiveHandler(BaseHandler): def get(self): entries = db.Query(Entry).order('-published') self.render("archive.html", entries=entries) class FeedHandler(BaseHandler): def get(self): entries = db.Query(Entry).order('-published').fetch(limit=10) self.set_header("Content-Type", "application/atom+xml") self.render("feed.xml", entries=entries) class ComposeHandler(BaseHandler): @administrator def get(self): key = self.get_argument("key", None) entry = Entry.get(key) if key else None self.render("compose.html", entry=entry) @administrator def post(self): key = self.get_argument("key", None) if key: entry = Entry.get(key) entry.title = self.get_argument("title") entry.body_source = self.get_argument("body_source") entry.html = tornado.escape.linkify( self.get_argument("body_source")) else: title = self.get_argument("title") slug = unicodedata.normalize("NFKD", title).encode( "ascii", "ignore") slug = re.sub(r"[^\w]+", " ", slug) slug = "-".join(slug.lower().strip().split()) if not slug: slug = "entry" while True: existing = db.Query(Entry).filter("slug =", slug).get() if not existing or str(existing.key()) == key: break slug += "-2" entry = Entry( author=self.current_user, title=title, slug=slug, body_source=self.get_argument("body_source"), html=tornado.escape.linkify(self.get_argument("body_source")), ) entry.put() self.redirect("/entry/" + entry.slug) class EntryModule(tornado.web.UIModule): def render(self, entry): return self.render_string("modules/entry.html", entry=entry) settings = { "blog_title": u"Tornado Blog", "template_path": os.path.join(os.path.dirname(__file__), "templates"), "ui_modules": {"Entry": EntryModule}, "xsrf_cookies": True, } application = tornado.web.Application([ (r"/", HomeHandler), (r"/archive", ArchiveHandler), (r"/feed", FeedHandler), (r"/entry/([^/]+)", EntryHandler), (r"/compose", ComposeHandler), ], **settings) application = tornado.wsgi.WSGIAdapter(application)
apache-2.0
peercoin/peercoin
test/functional/test_framework/test_shell.py
49
2402
#!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework class TestShell: """Wrapper Class for BitcoinTestFramework. The TestShell class extends the BitcoinTestFramework rpc & daemon process management functionality to external python environments. It is a singleton class, which ensures that users only start a single TestShell at a time.""" class __TestShell(BitcoinTestFramework): def set_test_params(self): pass def run_test(self): pass def setup(self, **kwargs): if self.running: print("TestShell is already running!") return # Num_nodes parameter must be set # by BitcoinTestFramework child class. self.num_nodes = 1 # User parameters override default values. for key, value in kwargs.items(): if hasattr(self, key): setattr(self, key, value) elif hasattr(self.options, key): setattr(self.options, key, value) else: raise KeyError(key + " not a valid parameter key!") super().setup() self.running = True return self def shutdown(self): if not self.running: print("TestShell is not running!") else: super().shutdown() self.running = False def reset(self): if self.running: print("Shutdown TestShell before resetting!") else: self.num_nodes = None super().__init__() instance = None def __new__(cls): # This implementation enforces singleton pattern, and will return the # previously initialized instance if available if not TestShell.instance: TestShell.instance = TestShell.__TestShell() TestShell.instance.running = False return TestShell.instance def __getattr__(self, name): return getattr(self.instance, name) def __setattr__(self, name, value): return setattr(self.instance, name, value)
mit
iromli/flosculus
setup.py
1
1639
import codecs import os.path import re from setuptools import setup from setuptools import find_packages def find_version(*file_paths): here = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(here, *file_paths), 'r') as f: version_file = f.read() version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") with open("README.rst") as f: long_desc = f.read() requirements = [ "logbook", "docopt", "fluent-logger", "six", ] setup( name="flosculus", version=find_version("flosculus", "__init__.py"), description="Tail your log, extract the data, and send it to Fluentd", long_description=long_desc, author="Isman Firmansyah", author_email="isman.firmansyah@gmail.com", url="https://github.com/iromli/flosculus", packages=find_packages(), license="MIT", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", ], entry_points={ "console_scripts": ["flosculusd=flosculus.cli:main"], }, zip_safe=False, install_requires=requirements, )
mit
vegetableman/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/suggestreviewers.py
125
2506
# Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from webkitpy.tool.steps.abstractstep import AbstractStep from webkitpy.tool.steps.options import Options class SuggestReviewers(AbstractStep): @classmethod def options(cls): return AbstractStep.options() + [ Options.git_commit, Options.suggest_reviewers, ] def run(self, state): if not self._options.suggest_reviewers: return reviewers = self._tool.checkout().suggested_reviewers(self._options.git_commit, self._changed_files(state))[:5] print "The following reviewers have recently modified files in your patch:" print ", ".join([reviewer.full_name for reviewer in reviewers]) if not state.get('bug_id'): return if not self._tool.user.confirm("Would you like to CC them?"): return reviewer_emails = [reviewer.bugzilla_email() for reviewer in reviewers] self._tool.bugs.add_cc_to_bug(state['bug_id'], reviewer_emails)
bsd-3-clause
Jhonbeltran/information-layer8
informationLayer8/settings.example.py
1
4057
""" Django settings for informationLayer8 project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'xxxxx' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'members', 'events', 'flat_responsive', 'materialize_forms', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'social_django', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'social_django.middleware.SocialAuthExceptionMiddleware', ] ROOT_URLCONF = 'informationLayer8.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates'),], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'social_django.context_processors.backends', 'social_django.context_processors.login_redirect', ], }, }, ] WSGI_APPLICATION = 'informationLayer8.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'layer8', 'USER': 'xxxxx', 'PASSWORD': 'xxxxx', 'HOST': '127.0.0.1', 'PORT': '5432', } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTHENTICATION_BACKENDS = [ 'social_core.backends.open_id.OpenIdAuth', 'social_core.backends.google.GoogleOpenId', 'social_core.backends.google.GoogleOAuth2', 'social_core.backends.google.GooglePlusAuth', 'django.contrib.auth.backends.ModelBackend', ] AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'static') ] STATIC_URL = '/static/' SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ['username', 'email'] SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = 'xxxxx' SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'xxxxx' SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = ['email'] SOCIAL_AUTH_LOGIN_REDIRECT_URL = "/"
mit
jacksemancik/PokemonPerlaBot
views.py
1
3646
import pyautogui, time from color import color_from_mouse def keypress(key): pyautogui.keyDown(key) time.sleep(.5) pyautogui.keyUp(key) def save(): keypress('enter') keypress('up') keypress('up') keypress('up') keypress('x') keypress('x') keypress('x') keypress('x')#spams x to assure that the saving process screens have ended keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') keypress('enter')#resets menu cursor keypress('down')#resets menu cursor keypress('down')#resets menu cursor keypress('down')#resets menu cursor keypress('enter') print('Game saved!') return def perla(maxmin, wait, swi, ver, gen): pyautogui.FAILSAFE = True if wait: wait_time = wait * 60 time.sleep(wait_time) if (swi == True): switchpos() save() red = 1 endtime = time.time() + 60 * maxmin pyautogui.PAUSE = .5 standard_color = game_color(ver,gen) #Perla male standard color: (206, 170, 99) move_changes = 0 while time.time() < endtime: while color_from_mouse() == standard_color: keypress('right') keypress('left') print('Performed task %s times!' % red) red += 1 limit = time.time() + 180 while not color_from_mouse() == standard_color: if time.time() < limit: keypress('x') keypress('x') print('Performed task %s times!' % red) red += 1 else: move_changes = exit_battle(move_changes) print('Performed task %s times!' % red) red += 1 limit = time.time() + 180 move_changes = 5 while not color_from_mouse() == standard_color: keypress('x') if time.time() > limit: move_changes = exit_battle(move_changes) save() return def switchpos(): #pyautogui.PAUSE = 1.5 keypress('enter') keypress('down') keypress('x') time.sleep(2) keypress('x') time.sleep(1) keypress('down') time.sleep(2) keypress('x') keypress('right') keypress('right') keypress('x') time.sleep(2) keypress('z') time.sleep(2) keypress('up') keypress('enter') print('Positions switched!') return def exit_battle(move_changes): keypress('z') keypress('z') keypress('z') keypress('z') keypress('down') keypress('right') keypress('x') keypress('x') keypress('x') keypress('x') keypress('x') return switch_move(move_changes) def switch_move(move_changes): move_changes += 1 diff = 3 - move_changes if move_changes > 3: pass else: keypress('enter') keypress('down') keypress('x') time.sleep(2) keypress('x') time.sleep(1) keypress('x') time.sleep(2.5) keypress('right') time.sleep(1) keypress('right') time.sleep(2) keypress('x') time.sleep(1) keypress('x') keypress('down') time.sleep(1) keypress('x') time.sleep(1) keypress('x') for x in range(0,diff): keypress('down') time.sleep(1) keypress('x') keypress('z') keypress('z') time.sleep(1) keypress('z') keypress('z') time.sleep(2) keypress('up') keypress('enter') return move_changes def game_color(v,g): if v == 'Chaos' or v == 'Fire' or v == 'Leaf': if g == 'Female': return (239, 235, 255) else: return (255, 105, 74) elif v == 'Emerald': if g == 'Female':#Must place mouse cursor over hair not over hat!! return (165, 105, 82) else:#Must place cursor over hair not headband!! return (252, 248, 253) elif v == 'Sapphire' or v == 'Ruby': if g == 'Female': return (165, 105, 82)#Placeholder, may not be accurate else: return (252, 248, 253)#Placeholder, may not be accurate elif v == 'Perla': return (206, 170, 99)#Same color for both characters, oddly enough
mit
yaegashi/ansible-modules-extras
cloud/cloudstack/cs_loadbalancer_rule_member.py
44
10054
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2015, Darren Worrall <darren@iweb.co.uk> # (c) 2015, René Moser <mail@renemoser.net> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: cs_loadbalancer_rule_member short_description: Manages load balancer rule members on Apache CloudStack based clouds. description: - Add and remove load balancer rule members. version_added: '2.0' author: - "Darren Worrall (@dazworrall)" - "René Moser (@resmo)" options: name: description: - The name of the load balancer rule. required: true ip_address: description: - Public IP address from where the network traffic will be load balanced from. - Only needed to find the rule if C(name) is not unique. required: false default: null aliases: [ 'public_ip' ] vms: description: - List of VMs to assign to or remove from the rule. required: true type: list aliases: [ 'vm' ] state: description: - Should the VMs be present or absent from the rule. required: false default: 'present' choices: [ 'present', 'absent' ] project: description: - Name of the project the firewall rule is related to. required: false default: null domain: description: - Domain the rule is related to. required: false default: null account: description: - Account the rule is related to. required: false default: null zone: description: - Name of the zone in which the rule should be located. - If not set, default zone is used. required: false default: null extends_documentation_fragment: cloudstack ''' EXAMPLES = ''' # Add VMs to an exising load balancer - local_action: module: cs_loadbalancer_rule_member name: balance_http vms: - web01 - web02 # Remove a VM from an existing load balancer - local_action: module: cs_loadbalancer_rule_member name: balance_http vms: - web01 - web02 state: absent # Rolling upgrade of hosts - hosts: webservers serial: 1 pre_tasks: - name: Remove from load balancer local_action: module: cs_loadbalancer_rule_member name: balance_http vm: "{{ ansible_hostname }}" state: absent tasks: # Perform update post_tasks: - name: Add to load balancer local_action: module: cs_loadbalancer_rule_member name: balance_http vm: "{{ ansible_hostname }}" state: present ''' RETURN = ''' --- id: description: UUID of the rule. returned: success type: string sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f zone: description: Name of zone the rule is related to. returned: success type: string sample: ch-gva-2 project: description: Name of project the rule is related to. returned: success type: string sample: Production account: description: Account the rule is related to. returned: success type: string sample: example account domain: description: Domain the rule is related to. returned: success type: string sample: example domain algorithm: description: Load balancer algorithm used. returned: success type: string sample: "source" cidr: description: CIDR to forward traffic from. returned: success type: string sample: "" name: description: Name of the rule. returned: success type: string sample: "http-lb" description: description: Description of the rule. returned: success type: string sample: "http load balancer rule" protocol: description: Protocol of the rule. returned: success type: string sample: "tcp" public_port: description: Public port. returned: success type: string sample: 80 private_port: description: Private IP address. returned: success type: string sample: 80 public_ip: description: Public IP address. returned: success type: string sample: "1.2.3.4" vms: description: Rule members. returned: success type: list sample: '[ "web01", "web02" ]' tags: description: List of resource tags associated with the rule. returned: success type: dict sample: '[ { "key": "foo", "value": "bar" } ]' state: description: State of the rule. returned: success type: string sample: "Add" ''' # import cloudstack common from ansible.module_utils.cloudstack import * class AnsibleCloudStackLBRuleMember(AnsibleCloudStack): def __init__(self, module): super(AnsibleCloudStackLBRuleMember, self).__init__(module) self.returns = { 'publicip': 'public_ip', 'algorithm': 'algorithm', 'cidrlist': 'cidr', 'protocol': 'protocol', } # these values will be casted to int self.returns_to_int = { 'publicport': 'public_port', 'privateport': 'private_port', } def get_rule(self): args = self._get_common_args() args['name'] = self.module.params.get('name') args['zoneid'] = self.get_zone(key='id') if self.module.params.get('ip_address'): args['publicipid'] = self.get_ip_address(key='id') rules = self.cs.listLoadBalancerRules(**args) if rules: if len(rules['loadbalancerrule']) > 1: self.module.fail_json(msg="More than one rule having name %s. Please pass 'ip_address' as well." % args['name']) return rules['loadbalancerrule'][0] return None def _get_common_args(self): return { 'account': self.get_account(key='name'), 'domainid': self.get_domain(key='id'), 'projectid': self.get_project(key='id'), } def _get_members_of_rule(self, rule): res = self.cs.listLoadBalancerRuleInstances(id=rule['id']) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) return res.get('loadbalancerruleinstance', []) def _ensure_members(self, operation): if operation not in ['add', 'remove']: self.module.fail_json(msg="Bad operation: %s" % operation) rule = self.get_rule() if not rule: self.module.fail_json(msg="Unknown rule: %s" % self.module.params.get('name')) existing = {} for vm in self._get_members_of_rule(rule=rule): existing[vm['name']] = vm['id'] wanted_names = self.module.params.get('vms') if operation =='add': cs_func = self.cs.assignToLoadBalancerRule to_change = set(wanted_names) - set(existing.keys()) else: cs_func = self.cs.removeFromLoadBalancerRule to_change = set(wanted_names) & set(existing.keys()) if not to_change: return rule args = self._get_common_args() vms = self.cs.listVirtualMachines(**args) to_change_ids = [] for name in to_change: for vm in vms.get('virtualmachine', []): if vm['name'] == name: to_change_ids.append(vm['id']) break else: self.module.fail_json(msg="Unknown VM: %s" % name) if to_change_ids: self.result['changed'] = True if to_change_ids and not self.module.check_mode: res = cs_func( id = rule['id'], virtualmachineids = to_change_ids, ) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: self.poll_job(res) rule = self.get_rule() return rule def add_members(self): return self._ensure_members('add') def remove_members(self): return self._ensure_members('remove') def get_result(self, rule): super(AnsibleCloudStackLBRuleMember, self).get_result(rule) if rule: self.result['vms'] = [] for vm in self._get_members_of_rule(rule=rule): self.result['vms'].append(vm['name']) return self.result def main(): argument_spec = cs_argument_spec() argument_spec.update(dict( name = dict(required=True), ip_address = dict(default=None, aliases=['public_ip']), vms = dict(required=True, aliases=['vm'], type='list'), state = dict(choices=['present', 'absent'], default='present'), zone = dict(default=None), domain = dict(default=None), project = dict(default=None), account = dict(default=None), poll_async = dict(type='bool', default=True), )) module = AnsibleModule( argument_spec=argument_spec, required_together=cs_required_together(), supports_check_mode=True ) try: acs_lb_rule_member = AnsibleCloudStackLBRuleMember(module) state = module.params.get('state') if state in ['absent']: rule = acs_lb_rule_member.remove_members() else: rule = acs_lb_rule_member.add_members() result = acs_lb_rule_member.get_result(rule) except CloudStackException as e: module.fail_json(msg='CloudStackException: %s' % str(e)) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
ahmedsalman/threadedcomments
threadedcomments/management/commands/migratecomments.py
17
2145
from django.core.management.base import BaseCommand from django.contrib.comments.models import Comment, FreeComment from threadedcomments.models import ThreadedComment, FreeThreadedComment class Command(BaseCommand): help = "Migrates Django's built-in django.contrib.comments data to threadedcomments data" output_transaction = True def handle(self, *args, **options): """ Converts all legacy ``Comment`` and ``FreeComment`` objects into ``ThreadedComment`` and ``FreeThreadedComment`` objects, respectively. """ self.handle_free_comments() self.handle_comments() def handle_free_comments(self): """ Converts all legacy ``FreeComment`` objects into ``FreeThreadedComment`` objects. """ comments = FreeComment.objects.all() for c in comments: new = FreeThreadedComment( content_type = c.content_type, object_id = c.object_id, comment = c.comment, name = c.person_name, website = '', email = '', date_submitted = c.submit_date, date_modified = c.submit_date, date_approved = c.submit_date, is_public = c.is_public, ip_address = c.ip_address, is_approved = c.approved ) new.save() def handle_comments(self): """ Converts all legacy ``Comment`` objects into ``ThreadedComment`` objects. """ comments = Comment.objects.all() for c in comments: new = ThreadedComment( content_type = c.content_type, object_id = c.object_id, comment = c.comment, user = c.user, date_submitted = c.submit_date, date_modified = c.submit_date, date_approved = c.submit_date, is_public = c.is_public, ip_address = c.ip_address, is_approved = not c.is_removed ) new.save()
bsd-3-clause
mchristopher/PokemonGo-DesktopMap
app/pylibs/win32/Cryptodome/SelfTest/Hash/test_SHAKE.py
2
4960
# =================================================================== # # Copyright (c) 2015, Legrandin <helderijs@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== """Self-test suite for Cryptodome.Hash.SHAKE128 and SHAKE256""" import unittest from binascii import hexlify, unhexlify from Cryptodome.SelfTest.loader import load_tests from Cryptodome.SelfTest.st_common import list_test_cases from StringIO import StringIO from Cryptodome.Hash import SHAKE128, SHAKE256 from Cryptodome.Util.py3compat import b, bchr, bord, tobytes class SHAKETest(unittest.TestCase): def test_new_positive(self): xof1 = self.shake.new() xof2 = self.shake.new(data=b("90")) xof3 = self.shake.new().update(b("90")) self.assertNotEqual(xof1.read(10), xof2.read(10)) xof3.read(10) self.assertEqual(xof2.read(10), xof3.read(10)) def test_update(self): pieces = [bchr(10) * 200, bchr(20) * 300] h = self.shake.new() h.update(pieces[0]).update(pieces[1]) digest = h.read(10) h = self.shake.new() h.update(pieces[0] + pieces[1]) self.assertEqual(h.read(10), digest) def test_update_negative(self): h = self.shake.new() self.assertRaises(TypeError, h.update, u"string") def test_digest(self): h = self.shake.new() digest = h.read(90) # read returns a byte string of the right length self.failUnless(isinstance(digest, type(b("digest")))) self.assertEqual(len(digest), 90) def test_update_after_read(self): mac = self.shake.new() mac.update(b("rrrr")) mac.read(90) self.assertRaises(TypeError, mac.update, b("ttt")) class SHAKE128Test(SHAKETest): shake = SHAKE128 class SHAKE256Test(SHAKETest): shake = SHAKE256 class SHAKEVectors(unittest.TestCase): pass test_vectors_128 = load_tests(("Cryptodome", "SelfTest", "Hash", "test_vectors", "SHA3"), "ShortMsgKAT_SHAKE128.txt", "Short Messages KAT SHAKE128", { "len" : lambda x: int(x) } ) for idx, tv in enumerate(test_vectors_128): if tv.len == 0: data = b("") else: data = tobytes(tv.msg) def new_test(self, data=data, result=tv.md): hobj = SHAKE128.new(data=data) digest = hobj.read(len(result)) self.assertEqual(digest, result) setattr(SHAKEVectors, "test_128_%d" % idx, new_test) test_vectors_256 = load_tests(("Cryptodome", "SelfTest", "Hash", "test_vectors", "SHA3"), "ShortMsgKAT_SHAKE256.txt", "Short Messages KAT SHAKE256", { "len" : lambda x: int(x) } ) for idx, tv in enumerate(test_vectors_256): if tv.len == 0: data = b("") else: data = tobytes(tv.msg) def new_test(self, data=data, result=tv.md): hobj = SHAKE256.new(data=data) digest = hobj.read(len(result)) self.assertEqual(digest, result) setattr(SHAKEVectors, "test_256_%d" % idx, new_test) def get_tests(config={}): tests = [] tests += list_test_cases(SHAKE128Test) tests += list_test_cases(SHAKE256Test) tests += list_test_cases(SHAKEVectors) return tests if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite')
mit
aclifton/cpeg853-gem5
src/arch/x86/isa/insts/general_purpose/data_transfer/move.py
40
9122
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' # # Regular moves # def macroop MOV_R_MI { limm t1, imm, dataSize=asz ld reg, seg, [1, t0, t1] }; def macroop MOV_MI_R { limm t1, imm, dataSize=asz st reg, seg, [1, t0, t1] }; def macroop MOV_R_R { mov reg, reg, regm }; def macroop MOV_M_R { st reg, seg, sib, disp }; def macroop MOV_P_R { rdip t7 st reg, seg, riprel, disp }; def macroop MOV_R_M { ld reg, seg, sib, disp }; def macroop MOV_R_P { rdip t7 ld reg, seg, riprel, disp }; def macroop MOV_R_I { limm reg, imm }; def macroop MOV_M_I { limm t1, imm st t1, seg, sib, disp }; def macroop MOV_P_I { rdip t7 limm t1, imm st t1, seg, riprel, disp }; # # Sign extending moves # def macroop MOVSXD_R_R { sexti reg, regm, 31 }; def macroop MOVSXD_R_M { ld t1, seg, sib, disp, dataSize=4 sexti reg, t1, 31 }; def macroop MOVSXD_R_P { rdip t7 ld t1, seg, riprel, disp, dataSize=4 sexti reg, t1, 31 }; def macroop MOVSX_B_R_R { mov t1, t1, regm, dataSize=1 sexti reg, t1, 7 }; def macroop MOVSX_B_R_M { ld t1, seg, sib, disp, dataSize=1 sexti reg, t1, 7 }; def macroop MOVSX_B_R_P { rdip t7 ld t1, seg, riprel, disp, dataSize=1 sexti reg, t1, 7 }; def macroop MOVSX_W_R_R { sexti reg, regm, 15 }; def macroop MOVSX_W_R_M { ld reg, seg, sib, disp, dataSize=2 sexti reg, reg, 15 }; def macroop MOVSX_W_R_P { rdip t7 ld reg, seg, riprel, disp, dataSize=2 sexti reg, reg, 15 }; # # Zero extending moves # def macroop MOVZX_B_R_R { mov t1, t1, regm, dataSize=1 zexti reg, t1, 7 }; def macroop MOVZX_B_R_M { ld t1, seg, sib, disp, dataSize=1 zexti reg, t1, 7 }; def macroop MOVZX_B_R_P { rdip t7 ld t1, seg, riprel, disp, dataSize=1 zexti reg, t1, 7 }; def macroop MOVZX_W_R_R { zexti reg, regm, 15 }; def macroop MOVZX_W_R_M { ld t1, seg, sib, disp, dataSize=2 zexti reg, t1, 15 }; def macroop MOVZX_W_R_P { rdip t7 ld t1, seg, riprel, disp, dataSize=2 zexti reg, t1, 15 }; def macroop MOV_C_R { .serializing .adjust_env maxOsz wrcr reg, regm }; def macroop MOV_R_C { .serializing .adjust_env maxOsz rdcr reg, regm }; def macroop MOV_D_R { .serializing .adjust_env maxOsz wrdr reg, regm }; def macroop MOV_R_D { .adjust_env maxOsz rddr reg, regm }; def macroop MOV_R_S { rdsel reg, regm }; def macroop MOV_M_S { rdsel t1, reg st t1, seg, sib, disp, dataSize=2 }; def macroop MOV_P_S { rdip t7 rdsel t1, reg st t1, seg, riprel, disp, dataSize=2 }; def macroop MOV_REAL_S_R { zexti t2, regm, 15, dataSize=8 slli t3, t2, 4, dataSize=8 wrsel reg, regm wrbase reg, t3, dataSize=8 }; def macroop MOV_REAL_S_M { ld t1, seg, sib, disp, dataSize=2 zexti t2, t1, 15, dataSize=8 slli t3, t2, 4, dataSize=8 wrsel reg, t1 wrbase reg, t3, dataSize=8 }; def macroop MOV_REAL_S_P { panic "RIP relative addressing shouldn't happen in real mode" }; def macroop MOV_S_R { andi t0, regm, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, regm, 0xF8, dataSize=8 andi t0, regm, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks regm, t3, dataSize=8 wrdl reg, t3, regm wrsel reg, regm }; def macroop MOV_S_M { ld t1, seg, sib, disp, dataSize=2 andi t0, t1, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, t1, 0xF8, dataSize=8 andi t0, t1, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks t1, t3, dataSize=8 wrdl reg, t3, t1 wrsel reg, t1 }; def macroop MOV_S_P { rdip t7 ld t1, seg, riprel, disp, dataSize=2 andi t0, t1, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, t1, 0xF8, dataSize=8 andi t0, t1, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks t1, t3, dataSize=8 wrdl reg, t3, t1 wrsel reg, t1 }; def macroop MOVSS_S_R { andi t0, regm, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, regm, 0xF8, dataSize=8 andi t0, regm, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks regm, t3, SSCheck, dataSize=8 wrdl reg, t3, regm wrsel reg, regm }; def macroop MOVSS_S_M { ld t1, seg, sib, disp, dataSize=2 andi t0, t1, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, t1, 0xF8, dataSize=8 andi t0, t1, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks t1, t3, SSCheck, dataSize=8 wrdl reg, t3, t1 wrsel reg, t1 }; def macroop MOVSS_S_P { rdip t7 ld t1, seg, riprel, disp, dataSize=2 andi t0, t1, 0xFC, flags=(EZF,), dataSize=2 br label("processDescriptor"), flags=(CEZF,) andi t2, t1, 0xF8, dataSize=8 andi t0, t1, 0x4, flags=(EZF,), dataSize=2 br label("globalDescriptor"), flags=(CEZF,) ld t3, tsl, [1, t0, t2], dataSize=8, addressSize=8 br label("processDescriptor") globalDescriptor: ld t3, tsg, [1, t0, t2], dataSize=8, addressSize=8 processDescriptor: chks t1, t3, SSCheck, dataSize=8 wrdl reg, t3, t1 wrsel reg, t1 }; def macroop MOVNTI_M_R { st reg, seg, sib, disp }; def macroop MOVNTI_P_R { rdip t7 st reg, seg, riprel, disp }; def macroop MOVD_XMM_R { mov2fp xmml, regm, srcSize=dsz, destSize=8 lfpimm xmmh, 0 }; def macroop MOVD_XMM_M { ldfp xmml, seg, sib, disp, dataSize=dsz lfpimm xmmh, 0 }; def macroop MOVD_XMM_P { rdip t7 ldfp xmml, seg, riprel, disp, dataSize=dsz lfpimm xmmh, 0 }; def macroop MOVD_R_XMM { mov2int reg, xmmlm, size=dsz }; def macroop MOVD_M_XMM { stfp xmml, seg, sib, disp, dataSize=dsz }; def macroop MOVD_P_XMM { rdip t7 stfp xmml, seg, riprel, disp, dataSize=dsz }; ''' #let {{ # class MOVD(Inst): # "GenFault ${new UnimpInstFault}" #}};
bsd-3-clause
linked67/p2pool-phicoin
p2pool/test/test_data.py
276
1605
import random import unittest from p2pool import data from p2pool.bitcoin import data as bitcoin_data from p2pool.test.util import test_forest from p2pool.util import forest def random_bytes(length): return ''.join(chr(random.randrange(2**8)) for i in xrange(length)) class Test(unittest.TestCase): def test_hashlink1(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d) assert data.check_hash_link(x, '') == bitcoin_data.hash256(d) def test_hashlink2(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) d2 = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d) assert data.check_hash_link(x, d2) == bitcoin_data.hash256(d + d2) def test_hashlink3(self): for i in xrange(100): d = random_bytes(random.randrange(2048)) d2 = random_bytes(random.randrange(200)) d3 = random_bytes(random.randrange(2048)) x = data.prefix_to_hash_link(d + d2, d2) assert data.check_hash_link(x, d3, d2) == bitcoin_data.hash256(d + d2 + d3) def test_skiplist(self): t = forest.Tracker() d = data.WeightsSkipList(t) for i in xrange(200): t.add(test_forest.FakeShare(hash=i, previous_hash=i - 1 if i > 0 else None, new_script=i, share_data=dict(donation=1234), target=2**249)) for i in xrange(200): a = random.randrange(200) d(a, random.randrange(a + 1), 1000000*65535)[1]
gpl-3.0
mengxn/tensorflow
tensorflow/contrib/learn/python/learn/tests/dataframe/sparsify_densify_test.py
62
3824
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for learn.dataframe.transforms.sparsify and densify.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.learn.python.learn.dataframe.transforms import densify from tensorflow.contrib.learn.python.learn.dataframe.transforms import in_memory_source from tensorflow.contrib.learn.python.learn.dataframe.transforms import sparsify from tensorflow.python.platform import test from tensorflow.python.training import coordinator from tensorflow.python.training import queue_runner_impl def _test_sparsify_densify(self, x, default_value): """Test roundtrip via Sparsify and Densify.""" numpy_source = in_memory_source.NumpySource(x, batch_size=len(x))() (sparse_series,) = sparsify.Sparsify(default_value)(numpy_source[1]) (dense_series,) = densify.Densify(default_value)(sparse_series) cache = {} sparse_tensor = sparse_series.build(cache) dense_tensor = dense_series.build(cache) with self.test_session() as sess: coord = coordinator.Coordinator() threads = queue_runner_impl.start_queue_runners(sess=sess, coord=coord) sparse_val, dense_val = sess.run([sparse_tensor, dense_tensor]) coord.request_stop() coord.join(threads) if x.dtype.kind not in ["S", "U"] and np.isnan(default_value): x_values = x[~np.isnan(x)] x_indexes = np.arange(len(x))[~np.isnan(x)].T.reshape(-1, 1) else: x_values = x[x != default_value] x_indexes = np.arange(len(x))[x != default_value].T.reshape(-1, 1) if x.dtype.kind in ["S", "U"]: # Python 2/3 compatibility # TensorFlow always returns bytes, so we just convert the unicode # expectations to bytes also before comparing. expected_x = [item.encode("utf-8") for item in x] expected_x_values = [item.encode("utf-8") for item in x_values] else: expected_x = x expected_x_values = x_values np.testing.assert_array_equal(len(x), sparse_val.dense_shape[0]) np.testing.assert_array_equal(expected_x_values, sparse_val.values) np.testing.assert_array_equal(x_indexes, sparse_val.indices) np.testing.assert_array_equal(expected_x, dense_val) class SparsifyDensifyTestCase(test.TestCase): """Test class for Sparsify and Densify transforms.""" def testSparsifyDensifyIntNan(self): x = np.array([0, np.nan, 2, 4, np.nan]) default_value = np.nan _test_sparsify_densify(self, x, default_value) def testSparsifyDensifyIntZero(self): x = np.array([0, 0, 2, 4, 0]) default_value = 0 _test_sparsify_densify(self, x, default_value) def testSparsifyDensifyFloatNan(self): x = np.array([0.0, np.nan, 2.1, 4.1, np.nan]) default_value = np.nan _test_sparsify_densify(self, x, default_value) def testSparsifyDensifyFloatZero(self): x = np.array([0.0, 0.0, 2, 4, 0.0]) default_value = 0.0 _test_sparsify_densify(self, x, default_value) def testSparsifyDensifyStringEmpty(self): x = np.array(["zero", "", "two", "four", ""]) default_value = "" _test_sparsify_densify(self, x, default_value) if __name__ == "__main__": test.main()
apache-2.0
jasonwzhy/django
tests/model_inheritance/models.py
227
4810
""" XX. Model inheritance Model inheritance exists in two varieties: - abstract base classes which are a way of specifying common information inherited by the subclasses. They don't exist as a separate model. - non-abstract base classes (the default), which are models in their own right with their own database tables and everything. Their subclasses have references back to them, created automatically. Both styles are demonstrated here. """ from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible # # Abstract base classes # @python_2_unicode_compatible class CommonInfo(models.Model): name = models.CharField(max_length=50) age = models.PositiveIntegerField() class Meta: abstract = True ordering = ['name'] def __str__(self): return '%s %s' % (self.__class__.__name__, self.name) class Worker(CommonInfo): job = models.CharField(max_length=50) class Student(CommonInfo): school_class = models.CharField(max_length=10) class Meta: pass # # Abstract base classes with related models # class Post(models.Model): title = models.CharField(max_length=50) @python_2_unicode_compatible class Attachment(models.Model): post = models.ForeignKey(Post, models.CASCADE, related_name='attached_%(class)s_set') content = models.TextField() class Meta: abstract = True def __str__(self): return self.content class Comment(Attachment): is_spam = models.BooleanField(default=False) class Link(Attachment): url = models.URLField() # # Multi-table inheritance # @python_2_unicode_compatible class Chef(models.Model): name = models.CharField(max_length=50) def __str__(self): return "%s the chef" % self.name @python_2_unicode_compatible class Place(models.Model): name = models.CharField(max_length=50) address = models.CharField(max_length=80) def __str__(self): return "%s the place" % self.name class Rating(models.Model): rating = models.IntegerField(null=True, blank=True) class Meta: abstract = True ordering = ['-rating'] @python_2_unicode_compatible class Restaurant(Place, Rating): serves_hot_dogs = models.BooleanField(default=False) serves_pizza = models.BooleanField(default=False) chef = models.ForeignKey(Chef, models.SET_NULL, null=True, blank=True) class Meta(Rating.Meta): db_table = 'my_restaurant' def __str__(self): return "%s the restaurant" % self.name @python_2_unicode_compatible class ItalianRestaurant(Restaurant): serves_gnocchi = models.BooleanField(default=False) def __str__(self): return "%s the italian restaurant" % self.name @python_2_unicode_compatible class Supplier(Place): customers = models.ManyToManyField(Restaurant, related_name='provider') def __str__(self): return "%s the supplier" % self.name @python_2_unicode_compatible class ParkingLot(Place): # An explicit link to the parent (we can control the attribute name). parent = models.OneToOneField(Place, models.CASCADE, primary_key=True, parent_link=True) main_site = models.ForeignKey(Place, models.CASCADE, related_name='lot') def __str__(self): return "%s the parking lot" % self.name # # Abstract base classes with related models where the sub-class has the # same name in a different app and inherits from the same abstract base # class. # NOTE: The actual API tests for the following classes are in # model_inheritance_same_model_name/models.py - They are defined # here in order to have the name conflict between apps # class Title(models.Model): title = models.CharField(max_length=50) class NamedURL(models.Model): title = models.ForeignKey(Title, models.CASCADE, related_name='attached_%(app_label)s_%(class)s_set') url = models.URLField() class Meta: abstract = True @python_2_unicode_compatible class Copy(NamedURL): content = models.TextField() def __str__(self): return self.content class Mixin(object): def __init__(self): self.other_attr = 1 super(Mixin, self).__init__() class MixinModel(models.Model, Mixin): pass class Base(models.Model): titles = models.ManyToManyField(Title) class SubBase(Base): sub_id = models.IntegerField(primary_key=True) class GrandParent(models.Model): first_name = models.CharField(max_length=80) last_name = models.CharField(max_length=80) email = models.EmailField(unique=True) class Meta: unique_together = ('first_name', 'last_name') class Parent(GrandParent): pass class Child(Parent): pass class GrandChild(Child): pass
bsd-3-clause
ESSS/numpy
numpy/distutils/unixccompiler.py
155
4656
""" unixccompiler - can handle very long argument lists for ar. """ from __future__ import division, absolute_import, print_function import os from distutils.errors import DistutilsExecError, CompileError from distutils.unixccompiler import * from numpy.distutils.ccompiler import replace_method from numpy.distutils.compat import get_exception if sys.version_info[0] < 3: from . import log else: from numpy.distutils import log # Note that UnixCCompiler._compile appeared in Python 2.3 def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): """Compile a single source files with a Unix-style compiler.""" # HP ad-hoc fix, see ticket 1383 ccomp = self.compiler_so if ccomp[0] == 'aCC': # remove flags that will trigger ANSI-C mode for aCC if '-Ae' in ccomp: ccomp.remove('-Ae') if '-Aa' in ccomp: ccomp.remove('-Aa') # add flags for (almost) sane C++ handling ccomp += ['-AA'] self.compiler_so = ccomp # ensure OPT environment variable is read if 'OPT' in os.environ: from distutils.sysconfig import get_config_vars opt = " ".join(os.environ['OPT'].split()) gcv_opt = " ".join(get_config_vars('OPT')[0].split()) ccomp_s = " ".join(self.compiler_so) if opt not in ccomp_s: ccomp_s = ccomp_s.replace(gcv_opt, opt) self.compiler_so = ccomp_s.split() llink_s = " ".join(self.linker_so) if opt not in llink_s: self.linker_so = llink_s.split() + opt.split() display = '%s: %s' % (os.path.basename(self.compiler_so[0]), src) try: self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs, display = display) except DistutilsExecError: msg = str(get_exception()) raise CompileError(msg) replace_method(UnixCCompiler, '_compile', UnixCCompiler__compile) def UnixCCompiler_create_static_lib(self, objects, output_libname, output_dir=None, debug=0, target_lang=None): """ Build a static library in a separate sub-process. Parameters ---------- objects : list or tuple of str List of paths to object files used to build the static library. output_libname : str The library name as an absolute or relative (if `output_dir` is used) path. output_dir : str, optional The path to the output directory. Default is None, in which case the ``output_dir`` attribute of the UnixCCompiler instance. debug : bool, optional This parameter is not used. target_lang : str, optional This parameter is not used. Returns ------- None """ objects, output_dir = self._fix_object_args(objects, output_dir) output_filename = \ self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): try: # previous .a may be screwed up; best to remove it first # and recreate. # Also, ar on OS X doesn't handle updating universal archives os.unlink(output_filename) except (IOError, OSError): pass self.mkpath(os.path.dirname(output_filename)) tmp_objects = objects + self.objects while tmp_objects: objects = tmp_objects[:50] tmp_objects = tmp_objects[50:] display = '%s: adding %d object files to %s' % ( os.path.basename(self.archiver[0]), len(objects), output_filename) self.spawn(self.archiver + [output_filename] + objects, display = display) # Not many Unices required ranlib anymore -- SunOS 4.x is, I # think the only major Unix that does. Maybe we need some # platform intelligence here to skip ranlib if it's not # needed -- or maybe Python's configure script took care of # it for us, hence the check for leading colon. if self.ranlib: display = '%s:@ %s' % (os.path.basename(self.ranlib[0]), output_filename) try: self.spawn(self.ranlib + [output_filename], display = display) except DistutilsExecError: msg = str(get_exception()) raise LibError(msg) else: log.debug("skipping %s (up-to-date)", output_filename) return replace_method(UnixCCompiler, 'create_static_lib', UnixCCompiler_create_static_lib)
bsd-3-clause
mavit/ansible
lib/ansible/modules/files/fetch.py
22
3596
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # This is a virtual module that is entirely implemented as an action plugin and runs on the controller from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} DOCUMENTATION = r''' --- module: fetch short_description: Fetch files from remote nodes description: - This module works like M(copy), but in reverse. - It is used for fetching files from remote machines and storing them locally in a file tree, organized by hostname. - This module is also supported for Windows targets. version_added: '0.2' options: src: description: - The file on the remote system to fetch. - This I(must) be a file, not a directory. - Recursive fetching may be supported in a later release. required: yes dest: description: - A directory to save the file into. - For example, if the I(dest) directory is C(/backup) a I(src) file named C(/etc/profile) on host C(host.example.com), would be saved into C(/backup/host.example.com/etc/profile). required: yes fail_on_missing: version_added: '1.1' description: - When set to C(yes), the task will fail if the remote file cannot be read for any reason. - Prior to Ansible 2.5, setting this would only fail if the source file was missing. - The default was changed to C(yes) in Ansible 2.5. type: bool default: yes validate_checksum: version_added: '1.4' description: - Verify that the source and destination checksums match after the files are fetched. type: bool default: yes flat: version_added: '1.2' description: - Allows you to override the default behavior of appending hostname/path/to/file to the destination. - If C(dest) ends with '/', it will use the basename of the source file, similar to the copy module. - Obviously this is only handy if the filenames are unique. type: bool default: no author: - Ansible Core Team - Michael DeHaan notes: - When running fetch with C(become), the M(slurp) module will also be used to fetch the contents of the file for determining the remote checksum. This effectively doubles the transfer size, and depending on the file size can consume all available memory on the remote or local hosts causing a C(MemoryError). Due to this it is advisable to run this module without C(become) whenever possible. - Prior to Ansible 2.5 this module would not fail if reading the remote file was impossible unless C(fail_on_missing) was set. - In Ansible 2.5 or later, playbook authors are encouraged to use C(fail_when) or C(ignore_errors) to get this ability. They may also explicitly set C(fail_on_missing) to C(no) to get the non-failing behaviour. - This module is also supported for Windows targets. ''' EXAMPLES = r''' - name: Store file into /tmp/fetched/host.example.com/tmp/somefile fetch: src: /tmp/somefile dest: /tmp/fetched - name: Specifying a path directly fetch: src: /tmp/somefile dest: /tmp/prefix-{{ inventory_hostname }} flat: yes - name: Specifying a destination path fetch: src: /tmp/uniquefile dest: /tmp/special/ flat: yes - name: Storing in a path relative to the playbook fetch: src: /tmp/uniquefile dest: special/prefix-{{ inventory_hostname }} flat: yes '''
gpl-3.0
hoosteeno/mozillians
vendor-local/lib/python/tablib/packages/odf/text.py
91
17212
# -*- coding: utf-8 -*- # Copyright (C) 2006-2007 Søren Roug, European Environment Agency # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # from namespaces import TEXTNS from element import Element from style import StyleElement # Autogenerated def A(**args): return Element(qname = (TEXTNS,'a'), **args) def AlphabeticalIndex(**args): return Element(qname = (TEXTNS,'alphabetical-index'), **args) def AlphabeticalIndexAutoMarkFile(**args): return Element(qname = (TEXTNS,'alphabetical-index-auto-mark-file'), **args) def AlphabeticalIndexEntryTemplate(**args): return Element(qname = (TEXTNS,'alphabetical-index-entry-template'), **args) def AlphabeticalIndexMark(**args): return Element(qname = (TEXTNS,'alphabetical-index-mark'), **args) def AlphabeticalIndexMarkEnd(**args): return Element(qname = (TEXTNS,'alphabetical-index-mark-end'), **args) def AlphabeticalIndexMarkStart(**args): return Element(qname = (TEXTNS,'alphabetical-index-mark-start'), **args) def AlphabeticalIndexSource(**args): return Element(qname = (TEXTNS,'alphabetical-index-source'), **args) def AuthorInitials(**args): return Element(qname = (TEXTNS,'author-initials'), **args) def AuthorName(**args): return Element(qname = (TEXTNS,'author-name'), **args) def Bibliography(**args): return Element(qname = (TEXTNS,'bibliography'), **args) def BibliographyConfiguration(**args): return Element(qname = (TEXTNS,'bibliography-configuration'), **args) def BibliographyEntryTemplate(**args): return Element(qname = (TEXTNS,'bibliography-entry-template'), **args) def BibliographyMark(**args): return Element(qname = (TEXTNS,'bibliography-mark'), **args) def BibliographySource(**args): return Element(qname = (TEXTNS,'bibliography-source'), **args) def Bookmark(**args): return Element(qname = (TEXTNS,'bookmark'), **args) def BookmarkEnd(**args): return Element(qname = (TEXTNS,'bookmark-end'), **args) def BookmarkRef(**args): return Element(qname = (TEXTNS,'bookmark-ref'), **args) def BookmarkStart(**args): return Element(qname = (TEXTNS,'bookmark-start'), **args) def Change(**args): return Element(qname = (TEXTNS,'change'), **args) def ChangeEnd(**args): return Element(qname = (TEXTNS,'change-end'), **args) def ChangeStart(**args): return Element(qname = (TEXTNS,'change-start'), **args) def ChangedRegion(**args): return Element(qname = (TEXTNS,'changed-region'), **args) def Chapter(**args): return Element(qname = (TEXTNS,'chapter'), **args) def CharacterCount(**args): return Element(qname = (TEXTNS,'character-count'), **args) def ConditionalText(**args): return Element(qname = (TEXTNS,'conditional-text'), **args) def CreationDate(**args): return Element(qname = (TEXTNS,'creation-date'), **args) def CreationTime(**args): return Element(qname = (TEXTNS,'creation-time'), **args) def Creator(**args): return Element(qname = (TEXTNS,'creator'), **args) def DatabaseDisplay(**args): return Element(qname = (TEXTNS,'database-display'), **args) def DatabaseName(**args): return Element(qname = (TEXTNS,'database-name'), **args) def DatabaseNext(**args): return Element(qname = (TEXTNS,'database-next'), **args) def DatabaseRowNumber(**args): return Element(qname = (TEXTNS,'database-row-number'), **args) def DatabaseRowSelect(**args): return Element(qname = (TEXTNS,'database-row-select'), **args) def Date(**args): return Element(qname = (TEXTNS,'date'), **args) def DdeConnection(**args): return Element(qname = (TEXTNS,'dde-connection'), **args) def DdeConnectionDecl(**args): return Element(qname = (TEXTNS,'dde-connection-decl'), **args) def DdeConnectionDecls(**args): return Element(qname = (TEXTNS,'dde-connection-decls'), **args) def Deletion(**args): return Element(qname = (TEXTNS,'deletion'), **args) def Description(**args): return Element(qname = (TEXTNS,'description'), **args) def EditingCycles(**args): return Element(qname = (TEXTNS,'editing-cycles'), **args) def EditingDuration(**args): return Element(qname = (TEXTNS,'editing-duration'), **args) def ExecuteMacro(**args): return Element(qname = (TEXTNS,'execute-macro'), **args) def Expression(**args): return Element(qname = (TEXTNS,'expression'), **args) def FileName(**args): return Element(qname = (TEXTNS,'file-name'), **args) def FormatChange(**args): return Element(qname = (TEXTNS,'format-change'), **args) def H(**args): return Element(qname = (TEXTNS, 'h'), **args) def HiddenParagraph(**args): return Element(qname = (TEXTNS,'hidden-paragraph'), **args) def HiddenText(**args): return Element(qname = (TEXTNS,'hidden-text'), **args) def IllustrationIndex(**args): return Element(qname = (TEXTNS,'illustration-index'), **args) def IllustrationIndexEntryTemplate(**args): return Element(qname = (TEXTNS,'illustration-index-entry-template'), **args) def IllustrationIndexSource(**args): return Element(qname = (TEXTNS,'illustration-index-source'), **args) def ImageCount(**args): return Element(qname = (TEXTNS,'image-count'), **args) def IndexBody(**args): return Element(qname = (TEXTNS,'index-body'), **args) def IndexEntryBibliography(**args): return Element(qname = (TEXTNS,'index-entry-bibliography'), **args) def IndexEntryChapter(**args): return Element(qname = (TEXTNS,'index-entry-chapter'), **args) def IndexEntryLinkEnd(**args): return Element(qname = (TEXTNS,'index-entry-link-end'), **args) def IndexEntryLinkStart(**args): return Element(qname = (TEXTNS,'index-entry-link-start'), **args) def IndexEntryPageNumber(**args): return Element(qname = (TEXTNS,'index-entry-page-number'), **args) def IndexEntrySpan(**args): return Element(qname = (TEXTNS,'index-entry-span'), **args) def IndexEntryTabStop(**args): return Element(qname = (TEXTNS,'index-entry-tab-stop'), **args) def IndexEntryText(**args): return Element(qname = (TEXTNS,'index-entry-text'), **args) def IndexSourceStyle(**args): return Element(qname = (TEXTNS,'index-source-style'), **args) def IndexSourceStyles(**args): return Element(qname = (TEXTNS,'index-source-styles'), **args) def IndexTitle(**args): return Element(qname = (TEXTNS,'index-title'), **args) def IndexTitleTemplate(**args): return Element(qname = (TEXTNS,'index-title-template'), **args) def InitialCreator(**args): return Element(qname = (TEXTNS,'initial-creator'), **args) def Insertion(**args): return Element(qname = (TEXTNS,'insertion'), **args) def Keywords(**args): return Element(qname = (TEXTNS,'keywords'), **args) def LineBreak(**args): return Element(qname = (TEXTNS,'line-break'), **args) def LinenumberingConfiguration(**args): return Element(qname = (TEXTNS,'linenumbering-configuration'), **args) def LinenumberingSeparator(**args): return Element(qname = (TEXTNS,'linenumbering-separator'), **args) def List(**args): return Element(qname = (TEXTNS,'list'), **args) def ListHeader(**args): return Element(qname = (TEXTNS,'list-header'), **args) def ListItem(**args): return Element(qname = (TEXTNS,'list-item'), **args) def ListLevelStyleBullet(**args): return Element(qname = (TEXTNS,'list-level-style-bullet'), **args) def ListLevelStyleImage(**args): return Element(qname = (TEXTNS,'list-level-style-image'), **args) def ListLevelStyleNumber(**args): return Element(qname = (TEXTNS,'list-level-style-number'), **args) def ListStyle(**args): return StyleElement(qname = (TEXTNS,'list-style'), **args) def Measure(**args): return Element(qname = (TEXTNS,'measure'), **args) def ModificationDate(**args): return Element(qname = (TEXTNS,'modification-date'), **args) def ModificationTime(**args): return Element(qname = (TEXTNS,'modification-time'), **args) def Note(**args): return Element(qname = (TEXTNS,'note'), **args) def NoteBody(**args): return Element(qname = (TEXTNS,'note-body'), **args) def NoteCitation(**args): return Element(qname = (TEXTNS,'note-citation'), **args) def NoteContinuationNoticeBackward(**args): return Element(qname = (TEXTNS,'note-continuation-notice-backward'), **args) def NoteContinuationNoticeForward(**args): return Element(qname = (TEXTNS,'note-continuation-notice-forward'), **args) def NoteRef(**args): return Element(qname = (TEXTNS,'note-ref'), **args) def NotesConfiguration(**args): return Element(qname = (TEXTNS,'notes-configuration'), **args) def Number(**args): return Element(qname = (TEXTNS,'number'), **args) def NumberedParagraph(**args): return Element(qname = (TEXTNS,'numbered-paragraph'), **args) def ObjectCount(**args): return Element(qname = (TEXTNS,'object-count'), **args) def ObjectIndex(**args): return Element(qname = (TEXTNS,'object-index'), **args) def ObjectIndexEntryTemplate(**args): return Element(qname = (TEXTNS,'object-index-entry-template'), **args) def ObjectIndexSource(**args): return Element(qname = (TEXTNS,'object-index-source'), **args) def OutlineLevelStyle(**args): return Element(qname = (TEXTNS,'outline-level-style'), **args) def OutlineStyle(**args): return Element(qname = (TEXTNS,'outline-style'), **args) def P(**args): return Element(qname = (TEXTNS, 'p'), **args) def Page(**args): return Element(qname = (TEXTNS,'page'), **args) def PageContinuation(**args): return Element(qname = (TEXTNS,'page-continuation'), **args) def PageCount(**args): return Element(qname = (TEXTNS,'page-count'), **args) def PageNumber(**args): return Element(qname = (TEXTNS,'page-number'), **args) def PageSequence(**args): return Element(qname = (TEXTNS,'page-sequence'), **args) def PageVariableGet(**args): return Element(qname = (TEXTNS,'page-variable-get'), **args) def PageVariableSet(**args): return Element(qname = (TEXTNS,'page-variable-set'), **args) def ParagraphCount(**args): return Element(qname = (TEXTNS,'paragraph-count'), **args) def Placeholder(**args): return Element(qname = (TEXTNS,'placeholder'), **args) def PrintDate(**args): return Element(qname = (TEXTNS,'print-date'), **args) def PrintTime(**args): return Element(qname = (TEXTNS,'print-time'), **args) def PrintedBy(**args): return Element(qname = (TEXTNS,'printed-by'), **args) def ReferenceMark(**args): return Element(qname = (TEXTNS,'reference-mark'), **args) def ReferenceMarkEnd(**args): return Element(qname = (TEXTNS,'reference-mark-end'), **args) def ReferenceMarkStart(**args): return Element(qname = (TEXTNS,'reference-mark-start'), **args) def ReferenceRef(**args): return Element(qname = (TEXTNS,'reference-ref'), **args) def Ruby(**args): return Element(qname = (TEXTNS,'ruby'), **args) def RubyBase(**args): return Element(qname = (TEXTNS,'ruby-base'), **args) def RubyText(**args): return Element(qname = (TEXTNS,'ruby-text'), **args) def S(**args): return Element(qname = (TEXTNS,'s'), **args) def Script(**args): return Element(qname = (TEXTNS,'script'), **args) def Section(**args): return Element(qname = (TEXTNS,'section'), **args) def SectionSource(**args): return Element(qname = (TEXTNS,'section-source'), **args) def SenderCity(**args): return Element(qname = (TEXTNS,'sender-city'), **args) def SenderCompany(**args): return Element(qname = (TEXTNS,'sender-company'), **args) def SenderCountry(**args): return Element(qname = (TEXTNS,'sender-country'), **args) def SenderEmail(**args): return Element(qname = (TEXTNS,'sender-email'), **args) def SenderFax(**args): return Element(qname = (TEXTNS,'sender-fax'), **args) def SenderFirstname(**args): return Element(qname = (TEXTNS,'sender-firstname'), **args) def SenderInitials(**args): return Element(qname = (TEXTNS,'sender-initials'), **args) def SenderLastname(**args): return Element(qname = (TEXTNS,'sender-lastname'), **args) def SenderPhonePrivate(**args): return Element(qname = (TEXTNS,'sender-phone-private'), **args) def SenderPhoneWork(**args): return Element(qname = (TEXTNS,'sender-phone-work'), **args) def SenderPosition(**args): return Element(qname = (TEXTNS,'sender-position'), **args) def SenderPostalCode(**args): return Element(qname = (TEXTNS,'sender-postal-code'), **args) def SenderStateOrProvince(**args): return Element(qname = (TEXTNS,'sender-state-or-province'), **args) def SenderStreet(**args): return Element(qname = (TEXTNS,'sender-street'), **args) def SenderTitle(**args): return Element(qname = (TEXTNS,'sender-title'), **args) def Sequence(**args): return Element(qname = (TEXTNS,'sequence'), **args) def SequenceDecl(**args): return Element(qname = (TEXTNS,'sequence-decl'), **args) def SequenceDecls(**args): return Element(qname = (TEXTNS,'sequence-decls'), **args) def SequenceRef(**args): return Element(qname = (TEXTNS,'sequence-ref'), **args) def SheetName(**args): return Element(qname = (TEXTNS,'sheet-name'), **args) def SoftPageBreak(**args): return Element(qname = (TEXTNS,'soft-page-break'), **args) def SortKey(**args): return Element(qname = (TEXTNS,'sort-key'), **args) def Span(**args): return Element(qname = (TEXTNS,'span'), **args) def Subject(**args): return Element(qname = (TEXTNS,'subject'), **args) def Tab(**args): return Element(qname = (TEXTNS,'tab'), **args) def TableCount(**args): return Element(qname = (TEXTNS,'table-count'), **args) def TableFormula(**args): return Element(qname = (TEXTNS,'table-formula'), **args) def TableIndex(**args): return Element(qname = (TEXTNS,'table-index'), **args) def TableIndexEntryTemplate(**args): return Element(qname = (TEXTNS,'table-index-entry-template'), **args) def TableIndexSource(**args): return Element(qname = (TEXTNS,'table-index-source'), **args) def TableOfContent(**args): return Element(qname = (TEXTNS,'table-of-content'), **args) def TableOfContentEntryTemplate(**args): return Element(qname = (TEXTNS,'table-of-content-entry-template'), **args) def TableOfContentSource(**args): return Element(qname = (TEXTNS,'table-of-content-source'), **args) def TemplateName(**args): return Element(qname = (TEXTNS,'template-name'), **args) def TextInput(**args): return Element(qname = (TEXTNS,'text-input'), **args) def Time(**args): return Element(qname = (TEXTNS,'time'), **args) def Title(**args): return Element(qname = (TEXTNS,'title'), **args) def TocMark(**args): return Element(qname = (TEXTNS,'toc-mark'), **args) def TocMarkEnd(**args): return Element(qname = (TEXTNS,'toc-mark-end'), **args) def TocMarkStart(**args): return Element(qname = (TEXTNS,'toc-mark-start'), **args) def TrackedChanges(**args): return Element(qname = (TEXTNS,'tracked-changes'), **args) def UserDefined(**args): return Element(qname = (TEXTNS,'user-defined'), **args) def UserFieldDecl(**args): return Element(qname = (TEXTNS,'user-field-decl'), **args) def UserFieldDecls(**args): return Element(qname = (TEXTNS,'user-field-decls'), **args) def UserFieldGet(**args): return Element(qname = (TEXTNS,'user-field-get'), **args) def UserFieldInput(**args): return Element(qname = (TEXTNS,'user-field-input'), **args) def UserIndex(**args): return Element(qname = (TEXTNS,'user-index'), **args) def UserIndexEntryTemplate(**args): return Element(qname = (TEXTNS,'user-index-entry-template'), **args) def UserIndexMark(**args): return Element(qname = (TEXTNS,'user-index-mark'), **args) def UserIndexMarkEnd(**args): return Element(qname = (TEXTNS,'user-index-mark-end'), **args) def UserIndexMarkStart(**args): return Element(qname = (TEXTNS,'user-index-mark-start'), **args) def UserIndexSource(**args): return Element(qname = (TEXTNS,'user-index-source'), **args) def VariableDecl(**args): return Element(qname = (TEXTNS,'variable-decl'), **args) def VariableDecls(**args): return Element(qname = (TEXTNS,'variable-decls'), **args) def VariableGet(**args): return Element(qname = (TEXTNS,'variable-get'), **args) def VariableInput(**args): return Element(qname = (TEXTNS,'variable-input'), **args) def VariableSet(**args): return Element(qname = (TEXTNS,'variable-set'), **args) def WordCount(**args): return Element(qname = (TEXTNS,'word-count'), **args)
bsd-3-clause
atvcaptain/enigma2
lib/python/Components/Converter/ConfigEntryTest.py
1
1749
from __future__ import print_function from __future__ import absolute_import from Components.Converter.Converter import Converter from Components.Element import cached from Components.config import configfile class ConfigEntryTest(Converter, object): def __init__(self, argstr): Converter.__init__(self, argstr) args = argstr.split(',') self.argerror = False self.checkSourceBoolean = False self.checkInvertSourceBoolean = False self.invert = False self.configKey = None self.configValue = None if len(args) < 2: self.argerror = True else: if "config." in args[0]: self.configKey = args[0] self.configValue = args[1] def checkArg(arg): if arg == 'Invert': self.invert = True elif arg == 'CheckSourceBoolean': self.checkSourceBoolean = True elif arg == 'CheckInvertSourceBoolean': self.checkInvertSourceBoolean = True else: self.argerror = True if len(args) > 2: checkArg(args[2]) if len(args) > 3: checkArg(args[3]) else: self.argerror = True if self.argerror: print("ConfigEntryTest Converter got incorrect arguments", args, "!!!\narg[0] must start with 'config.',\narg[1] is the compare string,\narg[2],arg[3] are optional arguments and must be 'Invert' or 'CheckSourceBoolean'") @cached def getBoolean(self): if self.argerror: print("ConfigEntryTest got invalid arguments", self.converter_arguments, "force True!!") return True if self.checkSourceBoolean and not self.source.boolean: return False if self.checkInvertSourceBoolean and self.source.boolean: return False val = configfile.getResolvedKey(self.configKey) ret = val == self.configValue return ret ^ self.invert boolean = property(getBoolean)
gpl-2.0
AlanZatarain/raft
ui/DiffWindow.py
11
3420
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'DiffWindow.ui' # # Created: Sun Oct 3 00:57:43 2010 # by: PyQt4 UI code generator 4.7.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui class Ui_DiffWindow(object): def setupUi(self, DiffWindow): DiffWindow.setObjectName("DiffWindow") DiffWindow.resize(800, 600) self.centralwidget = QtGui.QWidget(DiffWindow) self.centralwidget.setObjectName("centralwidget") self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget) self.verticalLayout.setObjectName("verticalLayout") self.splitter_2 = QtGui.QSplitter(self.centralwidget) self.splitter_2.setOrientation(QtCore.Qt.Vertical) self.splitter_2.setObjectName("splitter_2") self.splitter = QtGui.QSplitter(self.splitter_2) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName("splitter") self.leftTree = QtGui.QTreeWidget(self.splitter) self.leftTree.setObjectName("leftTree") self.rightTree = QtGui.QTreeWidget(self.splitter) self.rightTree.setObjectName("rightTree") self.diffView = QtWebKit.QWebView(self.splitter_2) self.diffView.setUrl(QtCore.QUrl("about:blank")) self.diffView.setObjectName("diffView") self.verticalLayout.addWidget(self.splitter_2) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.diffCloseButton = QtGui.QPushButton(self.centralwidget) self.diffCloseButton.setObjectName("diffCloseButton") self.horizontalLayout.addWidget(self.diffCloseButton) self.verticalLayout.addLayout(self.horizontalLayout) DiffWindow.setCentralWidget(self.centralwidget) self.menubar = QtGui.QMenuBar(DiffWindow) self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22)) self.menubar.setObjectName("menubar") DiffWindow.setMenuBar(self.menubar) self.statusbar = QtGui.QStatusBar(DiffWindow) self.statusbar.setObjectName("statusbar") DiffWindow.setStatusBar(self.statusbar) self.retranslateUi(DiffWindow) QtCore.QObject.connect(self.diffCloseButton, QtCore.SIGNAL("clicked()"), DiffWindow.close) QtCore.QMetaObject.connectSlotsByName(DiffWindow) def retranslateUi(self, DiffWindow): DiffWindow.setWindowTitle(QtGui.QApplication.translate("DiffWindow", "Diff Window", None, QtGui.QApplication.UnicodeUTF8)) self.leftTree.headerItem().setText(0, QtGui.QApplication.translate("DiffWindow", "Id", None, QtGui.QApplication.UnicodeUTF8)) self.leftTree.headerItem().setText(1, QtGui.QApplication.translate("DiffWindow", "URL", None, QtGui.QApplication.UnicodeUTF8)) self.rightTree.headerItem().setText(0, QtGui.QApplication.translate("DiffWindow", "Id", None, QtGui.QApplication.UnicodeUTF8)) self.rightTree.headerItem().setText(1, QtGui.QApplication.translate("DiffWindow", "URL", None, QtGui.QApplication.UnicodeUTF8)) self.diffCloseButton.setText(QtGui.QApplication.translate("DiffWindow", "Close", None, QtGui.QApplication.UnicodeUTF8)) from PyQt4 import QtWebKit
gpl-3.0
zefciu/django-extensions
tests/test_clean_pyc.py
12
2392
import fnmatch import os import shutil import six from django.core.management import call_command from django.test import TestCase from django_extensions.management.utils import get_project_root class CleanPycTests(TestCase): def setUp(self): self._settings = os.environ.get('DJANGO_SETTINGS_MODULE') os.environ['DJANGO_SETTINGS_MODULE'] = 'django_extensions.settings' def tearDown(self): if self._settings: os.environ['DJANGO_SETTINGS_MODULE'] = self._settings def _find_pyc(self, path): pyc_glob = [] for root, dirnames, filenames in os.walk(path): for filename in fnmatch.filter(filenames, '*.pyc'): pyc_glob.append(os.path.join(root, filename)) return pyc_glob def test_removes_pyc_files(self): with self.settings(BASE_DIR=get_project_root()): call_command('compile_pyc') pyc_glob = self._find_pyc(get_project_root()) self.assertTrue(len(pyc_glob) > 0) with self.settings(BASE_DIR=get_project_root()): call_command('clean_pyc') pyc_glob = self._find_pyc(get_project_root()) self.assertEqual(len(pyc_glob), 0) def test_takes_path(self): out = six.StringIO() project_root = os.path.join('tests', 'testapp') call_command('compile_pyc', path=project_root) pyc_glob = self._find_pyc(project_root) self.assertTrue(len(pyc_glob) > 0) call_command('clean_pyc', verbosity=2, path=project_root, stdout=out) output = out.getvalue().splitlines() self.assertEqual(sorted(pyc_glob), sorted(output)) def test_removes_pyo_files(self): out = six.StringIO() project_root = os.path.join('tests', 'testapp') call_command('compile_pyc', path=project_root) pyc_glob = self._find_pyc(project_root) self.assertTrue(len(pyc_glob) > 0) # Create some fake .pyo files since we can't force them to be created. pyo_glob = [] for fn in pyc_glob: pyo = '%s.pyo' % os.path.splitext(fn)[0] shutil.copyfile(fn, pyo) pyo_glob.append(pyo) call_command('clean_pyc', verbosity=2, path=project_root, optimize=True, stdout=out) output = out.getvalue().splitlines() self.assertEqual(sorted(pyc_glob + pyo_glob), sorted(output))
mit
luismasuelli/python-server-cantrips
cantrips/task/timed.py
1
4468
from six import integer_types from cantrips.types.exception import factory from .features import TornadoTimerFeature, TwistedTimerFeature, ThreadedTimerFeature class Timeout(object): """ Lets a timeout be triggered and cancelled. If the timeout is reached, an exception is triggered. Notes: Processors should be able to launch their own timeouts by using these implementations. """ Error = factory(['ALREADY_RUNNING', 'STILL_RUNNING', 'NOT_RUNNING', 'COULDNT_RUN']) def __init__(self, seconds, on_reach): """ Initializes the timeout. """ self.__time = seconds if isinstance(seconds, integer_types + (float,)) else 15 self.__reached = None self.__on_reach = on_reach if callable(on_reach) else lambda o: None def _set(self, seconds, callback): """ Creates the timeout. """ raise NotImplementedError def _unset(self): """ Unsets the timeout. Must be overriden since it is framework dependent. """ raise NotImplementedError def _reach(self, forced=False): """ Terminates a timeout, if it is not already reached. """ if self.__reached is not False: self.__reached = True self._unset() self.__on_reach(self, forced) def start(self): """ Starts a timeout, with default implementation. """ if self.__reached is False: # It is an error since the timeout is already running. raise self.Error("Timeout already running", self.Error.ALREADY_RUNNING) self.__reached = False self._set(self.__time, lambda: self._reach(False)) def force_stop(self): """ Forces the timeout to terminate. """ if self.__reached is not False: # It is an error since the timeout is not running. raise self.Error("Timeout not running", self.Error.NOT_RUNNING) self._reach(True) def reset(self): """ Resets the reached state to None. """ if self.__reached is False: # It is an error since the timeout is still running. raise self.Error("Timeout still running", self.Error.STILL_RUNNING) self.__reached = None class TornadoTimeout(Timeout): """ Timeouts implemented in Tornado. """ def __init__(self, ioloop, seconds, on_reach): self.__create_timeout, self.__cancel_timeout = TornadoTimerFeature.import_it() self.__ioloop = ioloop self.__timer = None super(TornadoTimeout, self).__init__(seconds, on_reach) def _unset(self): try: self.__cancel_timeout(self.__ioloop, self.__timer) except Exception as e: pass def _set(self, seconds, callback): try: self.__timer = self.__create_timeout(self.__ioloop, seconds, callback) except Exception as e: raise self.Error("Couldn't run timer", self.Error.COULDNT_RUN, e) class TwistedTimeout(Timeout): """ Timeouts implemented in Twisted. """ def __init__(self, reactor, seconds, on_reach): self.__create_timeout, self.__cancel_timeout = TwistedTimerFeature.import_it() self.__reactor = reactor self.__timer = None super(TwistedTimeout, self).__init__(seconds, on_reach) def _unset(self): try: self.__cancel_timeout(self.__timer) except Exception as e: pass def _set(self, seconds, callback): try: self.__timer = self.__create_timeout(self.__reactor, seconds, callback) except Exception as e: raise self.Error("Couldn't run timer", self.Error.COULDNT_RUN, e) class ThreadedTimeout(Timeout): """ Timeouts implemented with Threads """ def __init__(self, seconds, on_reach): self.__create_timeout, self.__cancel_timeout = ThreadedTimerFeature.import_it() self.__timer = None super(ThreadedTimeout, self).__init__(seconds, on_reach) def _unset(self): try: self.__cancel_timeout(self.__timer) except Exception as e: pass def _set(self, seconds, callback): try: self.__timer = self.__create_timeout(seconds, callback) except Exception as e: raise self.Error("Couldn't run timer", self.Error.COULDNT_RUN, e)
lgpl-3.0
marcelovilaca/DIRAC
DataManagementSystem/Client/test/FTSSiteTests.py
15
2616
######################################################################## # File: FTSSiteTests.py # Author: Krzysztof.Ciba@NOSPAMgmail.com # Date: 2013/04/16 08:52:36 ######################################################################## """ :mod: FTSSiteTests ================== .. module: FTSSiteTests :synopsis: unittest for FTSSite class .. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com unittest for FTSSite class """ # # # @file FTSSiteTests.py # @author Krzysztof.Ciba@NOSPAMgmail.com # @date 2013/04/16 08:52:44 # @brief Definition of FTSSiteTests class. # # imports import unittest # # SUT from DIRAC.DataManagementSystem.Client.FTSSite import FTSSite ######################################################################## class FTSSiteTests( unittest.TestCase ): """ .. class:: FTSSiteTests """ def setUp( self ): """ test set up """ self.fromDict = { "FTSServer": "https://something.somewhere.org/FTSService", "Name": "something.somewhere.org", "MaxActiveJobs" : 100 } def tearDown( self ): """ test tear down """ del self.fromDict def test( self ): """ test case """ ftsSite = FTSSite( name = "something.somewhere.org", ftsServer = "https://something.somewhere.org/FTSService", maxActiveJobs = 100 ) self.assertEqual( type( ftsSite ), FTSSite, "wrong type" ) for k, v in self.fromDict.items(): self.assertEqual( hasattr( ftsSite, k ), True, "%s attr is missing" % k ) self.assertEqual( getattr( ftsSite, k ), v, "wrong value for attr %s" % k ) # # serialization # FS: actually these methods aren't present # # # # to JSON # toJSON = ftsSite.toJSON() # self.assertEqual( toJSON["OK"], True, "toJSON failed" ) # toJSON = toJSON["Value"] # # # # to SQL # toSQL = ftsSite.toSQL() # self.assertEqual( toSQL["OK"], True, "toSQL failed" ) # self.assertEqual( toSQL["Value"].startswith( "INSERT" ), True, "toSQL should start with INSERT" ) # # # # FTSSiteID set # ftsSite.FTSSiteID = 10 # self.assertEqual( ftsSite.FTSSiteID, 10, "wrong value for FTSSite" ) # # # # to SQL again # toSQL = ftsSite.toSQL() # self.assertEqual( toSQL["OK"], True, "toSQL failed" ) # self.assertEqual( toSQL["Value"].startswith( "UPDATE" ), True, "toSQL should start with UPDATE" ) # # test execution if __name__ == "__main__": testLoader = unittest.TestLoader() suite = testLoader.loadTestsFromTestCase( FTSSiteTests ) suite = unittest.TestSuite( [ suite ] ) unittest.TextTestRunner( verbosity = 3 ).run( suite )
gpl-3.0
ROB-Seismology/oq-hazardlib
openquake/hazardlib/calc/__init__.py
1
1266
# The Hazard Library # Copyright (C) 2012 GEM Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Package :mod:`openquake.hazardlib.calc` contains hazard calculator modules and utilities for them, such as :mod:`~openquake.hazardlib.calc.filters`. """ from openquake.hazardlib.calc.hazard_curve import hazard_curves_poissonian from openquake.hazardlib.calc.gmf import ground_motion_fields from openquake.hazardlib.calc.stochastic import stochastic_event_set_poissonian # from disagg we want to import main calc function # as well as all the pmf extractors from openquake.hazardlib.calc.disagg import * from openquake.hazardlib.calc import filters
agpl-3.0
ngugi/geonode
geonode/catalogue/backends/pycsw_local.py
3
6015
######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### import os from lxml import etree from django.conf import settings from ConfigParser import SafeConfigParser from owslib.iso import MD_Metadata from pycsw import server from geonode.catalogue.backends.generic import CatalogueBackend as GenericCatalogueBackend from geonode.catalogue.backends.generic import METADATA_FORMATS from shapely.geometry.base import ReadingError # pycsw settings that the user shouldn't have to worry about CONFIGURATION = { 'server': { 'home': '.', 'url': settings.CATALOGUE['default']['URL'], 'encoding': 'UTF-8', 'language': settings.LANGUAGE_CODE, 'maxrecords': '10', # 'loglevel': 'DEBUG', # 'logfile': '/tmp/pycsw.log', # 'federatedcatalogues': 'http://geo.data.gov/geoportal/csw/discovery', # 'pretty_print': 'true', # 'domainquerytype': 'range', # 'domaincounts': 'true', 'profiles': 'apiso,ebrim', }, 'repository': { 'source': 'geonode', 'mappings': os.path.join(os.path.dirname(__file__), 'pycsw_local_mappings.py') } } class CatalogueBackend(GenericCatalogueBackend): def __init__(self, *args, **kwargs): super(CatalogueBackend, self).__init__(*args, **kwargs) self.catalogue.formats = ['Atom', 'DIF', 'Dublin Core', 'ebRIM', 'FGDC', 'ISO'] self.catalogue.local = True def remove_record(self, uuid): pass def create_record(self, item): pass def get_record(self, uuid): results = self._csw_local_dispatch(identifier=uuid) if len(results) < 1: return None result = etree.fromstring(results).find('{http://www.isotc211.org/2005/gmd}MD_Metadata') if result is None: return None record = MD_Metadata(result) record.keywords = [] if hasattr(record, 'identification') and hasattr(record.identification, 'keywords'): for kw in record.identification.keywords: record.keywords.extend(kw['keywords']) record.links = {} record.links['metadata'] = self.catalogue.urls_for_uuid(uuid) record.links['download'] = self.catalogue.extract_links(record) return record def search_records(self, keywords, start, limit, bbox): with self.catalogue: lresults = self._csw_local_dispatch(keywords, keywords, start+1, limit, bbox) # serialize XML e = etree.fromstring(lresults) self.catalogue.records = \ [MD_Metadata(x) for x in e.findall('//{http://www.isotc211.org/2005/gmd}MD_Metadata')] # build results into JSON for API results = [self.catalogue.metadatarecord2dict(doc) for v, doc in self.catalogue.records.iteritems()] result = {'rows': results, 'total': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get( 'numberOfRecordsMatched'), 'next_page': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get( 'nextRecord') } return result def _csw_local_dispatch(self, keywords=None, start=0, limit=10, bbox=None, identifier=None): """ HTTP-less CSW """ # serialize pycsw settings into SafeConfigParser # object for interaction with pycsw mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION) config = SafeConfigParser() for section, options in mdict.iteritems(): config.add_section(section) for option, value in options.iteritems(): config.set(section, option, value) # fake HTTP environment variable os.environ['QUERY_STRING'] = '' # init pycsw csw = server.Csw(config) # fake HTTP method csw.requesttype = 'POST' # fake HTTP request parameters if identifier is None: # it's a GetRecords request formats = [] for f in self.catalogue.formats: formats.append(METADATA_FORMATS[f][0]) csw.kvp = { 'elementsetname': 'full', 'typenames': formats, 'resulttype': 'results', 'constraintlanguage': 'CQL_TEXT', 'constraint': 'csw:AnyText like "%%%s%%"' % keywords, 'outputschema': 'http://www.isotc211.org/2005/gmd', 'constraint': None, 'startposition': start, 'maxrecords': limit } response = csw.getrecords() else: # it's a GetRecordById request csw.kvp = { 'id': [identifier], 'outputschema': 'http://www.isotc211.org/2005/gmd', } # FIXME(Ariel): Remove this try/except block when pycsw deals with # empty geometry fields better. # https://gist.github.com/ingenieroariel/717bb720a201030e9b3a try: response = csw.getrecordbyid() except ReadingError: return [] return etree.tostring(response)
gpl-3.0
nhejazi/scikit-learn
sklearn/linear_model/least_angle.py
6
58438
""" Least Angle Regression algorithm. See the documentation on the Generalized Linear Model for a complete discussion. """ from __future__ import print_function # Author: Fabian Pedregosa <fabian.pedregosa@inria.fr> # Alexandre Gramfort <alexandre.gramfort@inria.fr> # Gael Varoquaux # # License: BSD 3 clause from math import log import sys import warnings import numpy as np from scipy import linalg, interpolate from scipy.linalg.lapack import get_lapack_funcs from .base import LinearModel from ..base import RegressorMixin from ..utils import arrayfuncs, as_float_array, check_X_y, deprecated from ..model_selection import check_cv from ..exceptions import ConvergenceWarning from ..externals.joblib import Parallel, delayed from ..externals.six.moves import xrange from ..externals.six import string_types solve_triangular_args = {'check_finite': False} def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0, method='lar', copy_X=True, eps=np.finfo(np.float).eps, copy_Gram=True, verbose=0, return_path=True, return_n_iter=False, positive=False): """Compute Least Angle Regression or Lasso path using LARS algorithm [1] The optimization objective for the case method='lasso' is:: (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1 in the case of method='lars', the objective function is only known in the form of an implicit equation (see discussion in [1]) Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ----------- X : array, shape: (n_samples, n_features) Input data. y : array, shape: (n_samples) Input targets. Xy : array-like, shape (n_samples,) or (n_samples, n_targets), \ optional Xy = np.dot(X.T, y) that can be precomputed. It is useful only when the Gram matrix is precomputed. Gram : None, 'auto', array, shape: (n_features, n_features), optional Precomputed Gram matrix (X' * X), if ``'auto'``, the Gram matrix is precomputed from the given X, if there are more samples than features. max_iter : integer, optional (default=500) Maximum number of iterations to perform, set to infinity for no limit. alpha_min : float, optional (default=0) Minimum correlation along the path. It corresponds to the regularization parameter alpha parameter in the Lasso. method : {'lar', 'lasso'}, optional (default='lar') Specifies the returned model. Select ``'lar'`` for Least Angle Regression, ``'lasso'`` for the Lasso. copy_X : bool, optional (default=True) If ``False``, ``X`` is overwritten. eps : float, optional (default=``np.finfo(np.float).eps``) The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. copy_Gram : bool, optional (default=True) If ``False``, ``Gram`` is overwritten. verbose : int (default=0) Controls output verbosity. return_path : bool, optional (default=True) If ``return_path==True`` returns the entire path, else returns only the last point of the path. return_n_iter : bool, optional (default=False) Whether to return the number of iterations. positive : boolean (default=False) Restrict coefficients to be >= 0. When using this option together with method 'lasso' the model coefficients will not converge to the ordinary-least-squares solution for small values of alpha (neither will they when using method 'lar' ..). Only coefficients up to the smallest alpha value (``alphas_[alphas_ > 0.].min()`` when fit_path=True) reached by the stepwise Lars-Lasso algorithm are typically in congruence with the solution of the coordinate descent lasso_path function. Returns -------- alphas : array, shape: [n_alphas + 1] Maximum of covariances (in absolute value) at each iteration. ``n_alphas`` is either ``max_iter``, ``n_features`` or the number of nodes in the path with ``alpha >= alpha_min``, whichever is smaller. active : array, shape [n_alphas] Indices of active variables at the end of the path. coefs : array, shape (n_features, n_alphas + 1) Coefficients along the path n_iter : int Number of iterations run. Returned only if return_n_iter is set to True. See also -------- lasso_path LassoLars Lars LassoLarsCV LarsCV sklearn.decomposition.sparse_encode References ---------- .. [1] "Least Angle Regression", Effron et al. http://statweb.stanford.edu/~tibs/ftp/lars.pdf .. [2] `Wikipedia entry on the Least-angle regression <https://en.wikipedia.org/wiki/Least-angle_regression>`_ .. [3] `Wikipedia entry on the Lasso <https://en.wikipedia.org/wiki/Lasso_(statistics)>`_ """ n_features = X.shape[1] n_samples = y.size max_features = min(max_iter, n_features) if return_path: coefs = np.zeros((max_features + 1, n_features)) alphas = np.zeros(max_features + 1) else: coef, prev_coef = np.zeros(n_features), np.zeros(n_features) alpha, prev_alpha = np.array([0.]), np.array([0.]) # better ideas? n_iter, n_active = 0, 0 active, indices = list(), np.arange(n_features) # holds the sign of covariance sign_active = np.empty(max_features, dtype=np.int8) drop = False # will hold the cholesky factorization. Only lower part is # referenced. # We are initializing this to "zeros" and not empty, because # it is passed to scipy linalg functions and thus if it has NaNs, # even if they are in the upper part that it not used, we # get errors raised. # Once we support only scipy > 0.12 we can use check_finite=False and # go back to "empty" L = np.zeros((max_features, max_features), dtype=X.dtype) swap, nrm2 = linalg.get_blas_funcs(('swap', 'nrm2'), (X,)) solve_cholesky, = get_lapack_funcs(('potrs',), (X,)) if Gram is None or Gram is False: Gram = None if copy_X: # force copy. setting the array to be fortran-ordered # speeds up the calculation of the (partial) Gram matrix # and allows to easily swap columns X = X.copy('F') elif isinstance(Gram, string_types) and Gram == 'auto' or Gram is True: if Gram is True or X.shape[0] > X.shape[1]: Gram = np.dot(X.T, X) else: Gram = None elif copy_Gram: Gram = Gram.copy() if Xy is None: Cov = np.dot(X.T, y) else: Cov = Xy.copy() if verbose: if verbose > 1: print("Step\t\tAdded\t\tDropped\t\tActive set size\t\tC") else: sys.stdout.write('.') sys.stdout.flush() tiny32 = np.finfo(np.float32).tiny # to avoid division by 0 warning equality_tolerance = np.finfo(np.float32).eps while True: if Cov.size: if positive: C_idx = np.argmax(Cov) else: C_idx = np.argmax(np.abs(Cov)) C_ = Cov[C_idx] if positive: C = C_ else: C = np.fabs(C_) else: C = 0. if return_path: alpha = alphas[n_iter, np.newaxis] coef = coefs[n_iter] prev_alpha = alphas[n_iter - 1, np.newaxis] prev_coef = coefs[n_iter - 1] alpha[0] = C / n_samples if alpha[0] <= alpha_min + equality_tolerance: # early stopping if abs(alpha[0] - alpha_min) > equality_tolerance: # interpolation factor 0 <= ss < 1 if n_iter > 0: # In the first iteration, all alphas are zero, the formula # below would make ss a NaN ss = ((prev_alpha[0] - alpha_min) / (prev_alpha[0] - alpha[0])) coef[:] = prev_coef + ss * (coef - prev_coef) alpha[0] = alpha_min if return_path: coefs[n_iter] = coef break if n_iter >= max_iter or n_active >= n_features: break if not drop: ########################################################## # Append x_j to the Cholesky factorization of (Xa * Xa') # # # # ( L 0 ) # # L -> ( ) , where L * w = Xa' x_j # # ( w z ) and z = ||x_j|| # # # ########################################################## if positive: sign_active[n_active] = np.ones_like(C_) else: sign_active[n_active] = np.sign(C_) m, n = n_active, C_idx + n_active Cov[C_idx], Cov[0] = swap(Cov[C_idx], Cov[0]) indices[n], indices[m] = indices[m], indices[n] Cov_not_shortened = Cov Cov = Cov[1:] # remove Cov[0] if Gram is None: X.T[n], X.T[m] = swap(X.T[n], X.T[m]) c = nrm2(X.T[n_active]) ** 2 L[n_active, :n_active] = \ np.dot(X.T[n_active], X.T[:n_active].T) else: # swap does only work inplace if matrix is fortran # contiguous ... Gram[m], Gram[n] = swap(Gram[m], Gram[n]) Gram[:, m], Gram[:, n] = swap(Gram[:, m], Gram[:, n]) c = Gram[n_active, n_active] L[n_active, :n_active] = Gram[n_active, :n_active] # Update the cholesky decomposition for the Gram matrix if n_active: linalg.solve_triangular(L[:n_active, :n_active], L[n_active, :n_active], trans=0, lower=1, overwrite_b=True, **solve_triangular_args) v = np.dot(L[n_active, :n_active], L[n_active, :n_active]) diag = max(np.sqrt(np.abs(c - v)), eps) L[n_active, n_active] = diag if diag < 1e-7: # The system is becoming too ill-conditioned. # We have degenerate vectors in our active set. # We'll 'drop for good' the last regressor added. # Note: this case is very rare. It is no longer triggered by # the test suite. The `equality_tolerance` margin added in 0.16 # to get early stopping to work consistently on all versions of # Python including 32 bit Python under Windows seems to make it # very difficult to trigger the 'drop for good' strategy. warnings.warn('Regressors in active set degenerate. ' 'Dropping a regressor, after %i iterations, ' 'i.e. alpha=%.3e, ' 'with an active set of %i regressors, and ' 'the smallest cholesky pivot element being %.3e.' ' Reduce max_iter or increase eps parameters.' % (n_iter, alpha, n_active, diag), ConvergenceWarning) # XXX: need to figure a 'drop for good' way Cov = Cov_not_shortened Cov[0] = 0 Cov[C_idx], Cov[0] = swap(Cov[C_idx], Cov[0]) continue active.append(indices[n_active]) n_active += 1 if verbose > 1: print("%s\t\t%s\t\t%s\t\t%s\t\t%s" % (n_iter, active[-1], '', n_active, C)) if method == 'lasso' and n_iter > 0 and prev_alpha[0] < alpha[0]: # alpha is increasing. This is because the updates of Cov are # bringing in too much numerical error that is greater than # than the remaining correlation with the # regressors. Time to bail out warnings.warn('Early stopping the lars path, as the residues ' 'are small and the current value of alpha is no ' 'longer well controlled. %i iterations, alpha=%.3e, ' 'previous alpha=%.3e, with an active set of %i ' 'regressors.' % (n_iter, alpha, prev_alpha, n_active), ConvergenceWarning) break # least squares solution least_squares, info = solve_cholesky(L[:n_active, :n_active], sign_active[:n_active], lower=True) if least_squares.size == 1 and least_squares == 0: # This happens because sign_active[:n_active] = 0 least_squares[...] = 1 AA = 1. else: # is this really needed ? AA = 1. / np.sqrt(np.sum(least_squares * sign_active[:n_active])) if not np.isfinite(AA): # L is too ill-conditioned i = 0 L_ = L[:n_active, :n_active].copy() while not np.isfinite(AA): L_.flat[::n_active + 1] += (2 ** i) * eps least_squares, info = solve_cholesky( L_, sign_active[:n_active], lower=True) tmp = max(np.sum(least_squares * sign_active[:n_active]), eps) AA = 1. / np.sqrt(tmp) i += 1 least_squares *= AA if Gram is None: # equiangular direction of variables in the active set eq_dir = np.dot(X.T[:n_active].T, least_squares) # correlation between each unactive variables and # eqiangular vector corr_eq_dir = np.dot(X.T[n_active:], eq_dir) else: # if huge number of features, this takes 50% of time, I # think could be avoided if we just update it using an # orthogonal (QR) decomposition of X corr_eq_dir = np.dot(Gram[:n_active, n_active:].T, least_squares) g1 = arrayfuncs.min_pos((C - Cov) / (AA - corr_eq_dir + tiny32)) if positive: gamma_ = min(g1, C / AA) else: g2 = arrayfuncs.min_pos((C + Cov) / (AA + corr_eq_dir + tiny32)) gamma_ = min(g1, g2, C / AA) # TODO: better names for these variables: z drop = False z = -coef[active] / (least_squares + tiny32) z_pos = arrayfuncs.min_pos(z) if z_pos < gamma_: # some coefficients have changed sign idx = np.where(z == z_pos)[0][::-1] # update the sign, important for LAR sign_active[idx] = -sign_active[idx] if method == 'lasso': gamma_ = z_pos drop = True n_iter += 1 if return_path: if n_iter >= coefs.shape[0]: del coef, alpha, prev_alpha, prev_coef # resize the coefs and alphas array add_features = 2 * max(1, (max_features - n_active)) coefs = np.resize(coefs, (n_iter + add_features, n_features)) coefs[-add_features:] = 0 alphas = np.resize(alphas, n_iter + add_features) alphas[-add_features:] = 0 coef = coefs[n_iter] prev_coef = coefs[n_iter - 1] alpha = alphas[n_iter, np.newaxis] prev_alpha = alphas[n_iter - 1, np.newaxis] else: # mimic the effect of incrementing n_iter on the array references prev_coef = coef prev_alpha[0] = alpha[0] coef = np.zeros_like(coef) coef[active] = prev_coef[active] + gamma_ * least_squares # update correlations Cov -= gamma_ * corr_eq_dir # See if any coefficient has changed sign if drop and method == 'lasso': # handle the case when idx is not length of 1 [arrayfuncs.cholesky_delete(L[:n_active, :n_active], ii) for ii in idx] n_active -= 1 m, n = idx, n_active # handle the case when idx is not length of 1 drop_idx = [active.pop(ii) for ii in idx] if Gram is None: # propagate dropped variable for ii in idx: for i in range(ii, n_active): X.T[i], X.T[i + 1] = swap(X.T[i], X.T[i + 1]) # yeah this is stupid indices[i], indices[i + 1] = indices[i + 1], indices[i] # TODO: this could be updated residual = y - np.dot(X[:, :n_active], coef[active]) temp = np.dot(X.T[n_active], residual) Cov = np.r_[temp, Cov] else: for ii in idx: for i in range(ii, n_active): indices[i], indices[i + 1] = indices[i + 1], indices[i] Gram[i], Gram[i + 1] = swap(Gram[i], Gram[i + 1]) Gram[:, i], Gram[:, i + 1] = swap(Gram[:, i], Gram[:, i + 1]) # Cov_n = Cov_j + x_j * X + increment(betas) TODO: # will this still work with multiple drops ? # recompute covariance. Probably could be done better # wrong as Xy is not swapped with the rest of variables # TODO: this could be updated residual = y - np.dot(X, coef) temp = np.dot(X.T[drop_idx], residual) Cov = np.r_[temp, Cov] sign_active = np.delete(sign_active, idx) sign_active = np.append(sign_active, 0.) # just to maintain size if verbose > 1: print("%s\t\t%s\t\t%s\t\t%s\t\t%s" % (n_iter, '', drop_idx, n_active, abs(temp))) if return_path: # resize coefs in case of early stop alphas = alphas[:n_iter + 1] coefs = coefs[:n_iter + 1] if return_n_iter: return alphas, active, coefs.T, n_iter else: return alphas, active, coefs.T else: if return_n_iter: return alpha, active, coef, n_iter else: return alpha, active, coef ############################################################################### # Estimator classes class Lars(LinearModel, RegressorMixin): """Least Angle Regression model a.k.a. LAR Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ---------- fit_intercept : boolean Whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. precompute : True | False | 'auto' | array-like Whether to use a precomputed Gram matrix to speed up calculations. If set to ``'auto'`` let us decide. The Gram matrix can also be passed as argument. n_nonzero_coefs : int, optional Target number of non-zero coefficients. Use ``np.inf`` for no limit. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the ``tol`` parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. copy_X : boolean, optional, default True If ``True``, X will be copied; else, it may be overwritten. fit_path : boolean If True the full path is stored in the ``coef_path_`` attribute. If you compute the solution for a large problem or many targets, setting ``fit_path`` to ``False`` will lead to a speedup, especially with a small alpha. positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. Attributes ---------- alphas_ : array, shape (n_alphas + 1,) | list of n_targets such arrays Maximum of covariances (in absolute value) at each iteration. \ ``n_alphas`` is either ``n_nonzero_coefs`` or ``n_features``, \ whichever is smaller. active_ : list, length = n_alphas | list of n_targets such lists Indices of active variables at the end of the path. coef_path_ : array, shape (n_features, n_alphas + 1) \ | list of n_targets such arrays The varying values of the coefficients along the path. It is not present if the ``fit_path`` parameter is ``False``. coef_ : array, shape (n_features,) or (n_targets, n_features) Parameter vector (w in the formulation formula). intercept_ : float | array, shape (n_targets,) Independent term in decision function. n_iter_ : array-like or int The number of iterations taken by lars_path to find the grid of alphas for each target. Examples -------- >>> from sklearn import linear_model >>> reg = linear_model.Lars(n_nonzero_coefs=1) >>> reg.fit([[-1, 1], [0, 0], [1, 1]], [-1.1111, 0, -1.1111]) ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE Lars(copy_X=True, eps=..., fit_intercept=True, fit_path=True, n_nonzero_coefs=1, normalize=True, positive=False, precompute='auto', verbose=False) >>> print(reg.coef_) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE [ 0. -1.11...] See also -------- lars_path, LarsCV sklearn.decomposition.sparse_encode """ method = 'lar' def __init__(self, fit_intercept=True, verbose=False, normalize=True, precompute='auto', n_nonzero_coefs=500, eps=np.finfo(np.float).eps, copy_X=True, fit_path=True, positive=False): self.fit_intercept = fit_intercept self.verbose = verbose self.normalize = normalize self.precompute = precompute self.n_nonzero_coefs = n_nonzero_coefs self.positive = positive self.eps = eps self.copy_X = copy_X self.fit_path = fit_path def _get_gram(self, precompute, X, y): if (not hasattr(precompute, '__array__')) and ( (precompute is True) or (precompute == 'auto' and X.shape[0] > X.shape[1]) or (precompute == 'auto' and y.shape[1] > 1)): precompute = np.dot(X.T, X) return precompute def _fit(self, X, y, max_iter, alpha, fit_path, Xy=None): """Auxiliary method to fit the model using X, y as training data""" n_features = X.shape[1] X, y, X_offset, y_offset, X_scale = self._preprocess_data(X, y, self.fit_intercept, self.normalize, self.copy_X) if y.ndim == 1: y = y[:, np.newaxis] n_targets = y.shape[1] Gram = self._get_gram(self.precompute, X, y) self.alphas_ = [] self.n_iter_ = [] self.coef_ = np.empty((n_targets, n_features)) if fit_path: self.active_ = [] self.coef_path_ = [] for k in xrange(n_targets): this_Xy = None if Xy is None else Xy[:, k] alphas, active, coef_path, n_iter_ = lars_path( X, y[:, k], Gram=Gram, Xy=this_Xy, copy_X=self.copy_X, copy_Gram=True, alpha_min=alpha, method=self.method, verbose=max(0, self.verbose - 1), max_iter=max_iter, eps=self.eps, return_path=True, return_n_iter=True, positive=self.positive) self.alphas_.append(alphas) self.active_.append(active) self.n_iter_.append(n_iter_) self.coef_path_.append(coef_path) self.coef_[k] = coef_path[:, -1] if n_targets == 1: self.alphas_, self.active_, self.coef_path_, self.coef_ = [ a[0] for a in (self.alphas_, self.active_, self.coef_path_, self.coef_)] self.n_iter_ = self.n_iter_[0] else: for k in xrange(n_targets): this_Xy = None if Xy is None else Xy[:, k] alphas, _, self.coef_[k], n_iter_ = lars_path( X, y[:, k], Gram=Gram, Xy=this_Xy, copy_X=self.copy_X, copy_Gram=True, alpha_min=alpha, method=self.method, verbose=max(0, self.verbose - 1), max_iter=max_iter, eps=self.eps, return_path=False, return_n_iter=True, positive=self.positive) self.alphas_.append(alphas) self.n_iter_.append(n_iter_) if n_targets == 1: self.alphas_ = self.alphas_[0] self.n_iter_ = self.n_iter_[0] self._set_intercept(X_offset, y_offset, X_scale) return self def fit(self, X, y, Xy=None): """Fit the model using X, y as training data. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array-like, shape (n_samples,) or (n_samples, n_targets) Target values. Xy : array-like, shape (n_samples,) or (n_samples, n_targets), \ optional Xy = np.dot(X.T, y) that can be precomputed. It is useful only when the Gram matrix is precomputed. Returns ------- self : object returns an instance of self. """ X, y = check_X_y(X, y, y_numeric=True, multi_output=True) alpha = getattr(self, 'alpha', 0.) if hasattr(self, 'n_nonzero_coefs'): alpha = 0. # n_nonzero_coefs parametrization takes priority max_iter = self.n_nonzero_coefs else: max_iter = self.max_iter self._fit(X, y, max_iter=max_iter, alpha=alpha, fit_path=self.fit_path, Xy=Xy) return self class LassoLars(Lars): """Lasso model fit with Least Angle Regression a.k.a. Lars It is a Linear Model trained with an L1 prior as regularizer. The optimization objective for Lasso is:: (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1 Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ---------- alpha : float Constant that multiplies the penalty term. Defaults to 1.0. ``alpha = 0`` is equivalent to an ordinary least square, solved by :class:`LinearRegression`. For numerical reasons, using ``alpha = 0`` with the LassoLars object is not advised and you should prefer the LinearRegression object. fit_intercept : boolean whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. precompute : True | False | 'auto' | array-like Whether to use a precomputed Gram matrix to speed up calculations. If set to ``'auto'`` let us decide. The Gram matrix can also be passed as argument. max_iter : integer, optional Maximum number of iterations to perform. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the ``tol`` parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. copy_X : boolean, optional, default True If True, X will be copied; else, it may be overwritten. fit_path : boolean If ``True`` the full path is stored in the ``coef_path_`` attribute. If you compute the solution for a large problem or many targets, setting ``fit_path`` to ``False`` will lead to a speedup, especially with a small alpha. positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. Under the positive restriction the model coefficients will not converge to the ordinary-least-squares solution for small values of alpha. Only coefficients up to the smallest alpha value (``alphas_[alphas_ > 0.].min()`` when fit_path=True) reached by the stepwise Lars-Lasso algorithm are typically in congruence with the solution of the coordinate descent Lasso estimator. Attributes ---------- alphas_ : array, shape (n_alphas + 1,) | list of n_targets such arrays Maximum of covariances (in absolute value) at each iteration. \ ``n_alphas`` is either ``max_iter``, ``n_features``, or the number of \ nodes in the path with correlation greater than ``alpha``, whichever \ is smaller. active_ : list, length = n_alphas | list of n_targets such lists Indices of active variables at the end of the path. coef_path_ : array, shape (n_features, n_alphas + 1) or list If a list is passed it's expected to be one of n_targets such arrays. The varying values of the coefficients along the path. It is not present if the ``fit_path`` parameter is ``False``. coef_ : array, shape (n_features,) or (n_targets, n_features) Parameter vector (w in the formulation formula). intercept_ : float | array, shape (n_targets,) Independent term in decision function. n_iter_ : array-like or int. The number of iterations taken by lars_path to find the grid of alphas for each target. Examples -------- >>> from sklearn import linear_model >>> reg = linear_model.LassoLars(alpha=0.01) >>> reg.fit([[-1, 1], [0, 0], [1, 1]], [-1, 0, -1]) ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE LassoLars(alpha=0.01, copy_X=True, eps=..., fit_intercept=True, fit_path=True, max_iter=500, normalize=True, positive=False, precompute='auto', verbose=False) >>> print(reg.coef_) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE [ 0. -0.963257...] See also -------- lars_path lasso_path Lasso LassoCV LassoLarsCV sklearn.decomposition.sparse_encode """ method = 'lasso' def __init__(self, alpha=1.0, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, copy_X=True, fit_path=True, positive=False): self.alpha = alpha self.fit_intercept = fit_intercept self.max_iter = max_iter self.verbose = verbose self.normalize = normalize self.positive = positive self.precompute = precompute self.copy_X = copy_X self.eps = eps self.fit_path = fit_path ############################################################################### # Cross-validated estimator classes def _check_copy_and_writeable(array, copy=False): if copy or not array.flags.writeable: return array.copy() return array def _lars_path_residues(X_train, y_train, X_test, y_test, Gram=None, copy=True, method='lars', verbose=False, fit_intercept=True, normalize=True, max_iter=500, eps=np.finfo(np.float).eps, positive=False): """Compute the residues on left-out data for a full LARS path Parameters ----------- X_train : array, shape (n_samples, n_features) The data to fit the LARS on y_train : array, shape (n_samples) The target variable to fit LARS on X_test : array, shape (n_samples, n_features) The data to compute the residues on y_test : array, shape (n_samples) The target variable to compute the residues on Gram : None, 'auto', array, shape: (n_features, n_features), optional Precomputed Gram matrix (X' * X), if ``'auto'``, the Gram matrix is precomputed from the given X, if there are more samples than features copy : boolean, optional Whether X_train, X_test, y_train and y_test should be copied; if False, they may be overwritten. method : 'lar' | 'lasso' Specifies the returned model. Select ``'lar'`` for Least Angle Regression, ``'lasso'`` for the Lasso. verbose : integer, optional Sets the amount of verbosity fit_intercept : boolean whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. See reservations for using this option in combination with method 'lasso' for expected small values of alpha in the doc of LassoLarsCV and LassoLarsIC. normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. max_iter : integer, optional Maximum number of iterations to perform. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the ``tol`` parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. Returns -------- alphas : array, shape (n_alphas,) Maximum of covariances (in absolute value) at each iteration. ``n_alphas`` is either ``max_iter`` or ``n_features``, whichever is smaller. active : list Indices of active variables at the end of the path. coefs : array, shape (n_features, n_alphas) Coefficients along the path residues : array, shape (n_alphas, n_samples) Residues of the prediction on the test data """ X_train = _check_copy_and_writeable(X_train, copy) y_train = _check_copy_and_writeable(y_train, copy) X_test = _check_copy_and_writeable(X_test, copy) y_test = _check_copy_and_writeable(y_test, copy) if fit_intercept: X_mean = X_train.mean(axis=0) X_train -= X_mean X_test -= X_mean y_mean = y_train.mean(axis=0) y_train = as_float_array(y_train, copy=False) y_train -= y_mean y_test = as_float_array(y_test, copy=False) y_test -= y_mean if normalize: norms = np.sqrt(np.sum(X_train ** 2, axis=0)) nonzeros = np.flatnonzero(norms) X_train[:, nonzeros] /= norms[nonzeros] alphas, active, coefs = lars_path( X_train, y_train, Gram=Gram, copy_X=False, copy_Gram=False, method=method, verbose=max(0, verbose - 1), max_iter=max_iter, eps=eps, positive=positive) if normalize: coefs[nonzeros] /= norms[nonzeros][:, np.newaxis] residues = np.dot(X_test, coefs) - y_test[:, np.newaxis] return alphas, active, coefs, residues.T class LarsCV(Lars): """Cross-validated Least Angle Regression model Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ---------- fit_intercept : boolean whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount max_iter : integer, optional Maximum number of iterations to perform. normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. precompute : True | False | 'auto' | array-like Whether to use a precomputed Gram matrix to speed up calculations. If set to ``'auto'`` let us decide. The Gram matrix cannot be passed as argument since we will use only subsets of X. cv : int, cross-validation generator or an iterable, optional Determines the cross-validation splitting strategy. Possible inputs for cv are: - None, to use the default 3-fold cross-validation, - integer, to specify the number of folds. - An object to be used as a cross-validation generator. - An iterable yielding train/test splits. For integer/None inputs, :class:`KFold` is used. Refer :ref:`User Guide <cross_validation>` for the various cross-validation strategies that can be used here. max_n_alphas : integer, optional The maximum number of points on the path used to compute the residuals in the cross-validation n_jobs : integer, optional Number of CPUs to use during the cross validation. If ``-1``, use all the CPUs eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. copy_X : boolean, optional, default True If ``True``, X will be copied; else, it may be overwritten. positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. Attributes ---------- coef_ : array, shape (n_features,) parameter vector (w in the formulation formula) intercept_ : float independent term in decision function coef_path_ : array, shape (n_features, n_alphas) the varying values of the coefficients along the path alpha_ : float the estimated regularization parameter alpha alphas_ : array, shape (n_alphas,) the different values of alpha along the path cv_alphas_ : array, shape (n_cv_alphas,) all the values of alpha along the path for the different folds mse_path_ : array, shape (n_folds, n_cv_alphas) the mean square error on left-out for each fold along the path (alpha values given by ``cv_alphas``) n_iter_ : array-like or int the number of iterations run by Lars with the optimal alpha. See also -------- lars_path, LassoLars, LassoLarsCV """ method = 'lar' def __init__(self, fit_intercept=True, verbose=False, max_iter=500, normalize=True, precompute='auto', cv=None, max_n_alphas=1000, n_jobs=1, eps=np.finfo(np.float).eps, copy_X=True, positive=False): self.max_iter = max_iter self.cv = cv self.max_n_alphas = max_n_alphas self.n_jobs = n_jobs super(LarsCV, self).__init__(fit_intercept=fit_intercept, verbose=verbose, normalize=normalize, precompute=precompute, n_nonzero_coefs=500, eps=eps, copy_X=copy_X, fit_path=True, positive=positive) def fit(self, X, y): """Fit the model using X, y as training data. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array-like, shape (n_samples,) Target values. Returns ------- self : object returns an instance of self. """ X, y = check_X_y(X, y, y_numeric=True) X = as_float_array(X, copy=self.copy_X) y = as_float_array(y, copy=self.copy_X) # init cross-validation generator cv = check_cv(self.cv, classifier=False) # As we use cross-validation, the Gram matrix is not precomputed here Gram = self.precompute if hasattr(Gram, '__array__'): warnings.warn("Parameter 'precompute' cannot be an array in " "%s. Automatically switch to 'auto' instead." % self.__class__.__name__) Gram = 'auto' cv_paths = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)( delayed(_lars_path_residues)( X[train], y[train], X[test], y[test], Gram=Gram, copy=False, method=self.method, verbose=max(0, self.verbose - 1), normalize=self.normalize, fit_intercept=self.fit_intercept, max_iter=self.max_iter, eps=self.eps, positive=self.positive) for train, test in cv.split(X, y)) all_alphas = np.concatenate(list(zip(*cv_paths))[0]) # Unique also sorts all_alphas = np.unique(all_alphas) # Take at most max_n_alphas values stride = int(max(1, int(len(all_alphas) / float(self.max_n_alphas)))) all_alphas = all_alphas[::stride] mse_path = np.empty((len(all_alphas), len(cv_paths))) for index, (alphas, active, coefs, residues) in enumerate(cv_paths): alphas = alphas[::-1] residues = residues[::-1] if alphas[0] != 0: alphas = np.r_[0, alphas] residues = np.r_[residues[0, np.newaxis], residues] if alphas[-1] != all_alphas[-1]: alphas = np.r_[alphas, all_alphas[-1]] residues = np.r_[residues, residues[-1, np.newaxis]] this_residues = interpolate.interp1d(alphas, residues, axis=0)(all_alphas) this_residues **= 2 mse_path[:, index] = np.mean(this_residues, axis=-1) mask = np.all(np.isfinite(mse_path), axis=-1) all_alphas = all_alphas[mask] mse_path = mse_path[mask] # Select the alpha that minimizes left-out error i_best_alpha = np.argmin(mse_path.mean(axis=-1)) best_alpha = all_alphas[i_best_alpha] # Store our parameters self.alpha_ = best_alpha self.cv_alphas_ = all_alphas self.mse_path_ = mse_path # Now compute the full model # it will call a lasso internally when self if LassoLarsCV # as self.method == 'lasso' self._fit(X, y, max_iter=self.max_iter, alpha=best_alpha, Xy=None, fit_path=True) return self @property @deprecated("Attribute alpha is deprecated in 0.19 and " "will be removed in 0.21. See ``alpha_`` instead") def alpha(self): # impedance matching for the above Lars.fit (should not be documented) return self.alpha_ @property @deprecated("Attribute ``cv_mse_path_`` is deprecated in 0.18 and " "will be removed in 0.20. Use ``mse_path_`` instead") def cv_mse_path_(self): return self.mse_path_ class LassoLarsCV(LarsCV): """Cross-validated Lasso, using the LARS algorithm The optimization objective for Lasso is:: (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1 Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ---------- fit_intercept : boolean whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount max_iter : integer, optional Maximum number of iterations to perform. normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. precompute : True | False | 'auto' Whether to use a precomputed Gram matrix to speed up calculations. If set to ``'auto'`` let us decide. The Gram matrix cannot be passed as argument since we will use only subsets of X. cv : int, cross-validation generator or an iterable, optional Determines the cross-validation splitting strategy. Possible inputs for cv are: - None, to use the default 3-fold cross-validation, - integer, to specify the number of folds. - An object to be used as a cross-validation generator. - An iterable yielding train/test splits. For integer/None inputs, :class:`KFold` is used. Refer :ref:`User Guide <cross_validation>` for the various cross-validation strategies that can be used here. max_n_alphas : integer, optional The maximum number of points on the path used to compute the residuals in the cross-validation n_jobs : integer, optional Number of CPUs to use during the cross validation. If ``-1``, use all the CPUs eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. copy_X : boolean, optional, default True If True, X will be copied; else, it may be overwritten. positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. Under the positive restriction the model coefficients do not converge to the ordinary-least-squares solution for small values of alpha. Only coefficients up to the smallest alpha value (``alphas_[alphas_ > 0.].min()`` when fit_path=True) reached by the stepwise Lars-Lasso algorithm are typically in congruence with the solution of the coordinate descent Lasso estimator. As a consequence using LassoLarsCV only makes sense for problems where a sparse solution is expected and/or reached. Attributes ---------- coef_ : array, shape (n_features,) parameter vector (w in the formulation formula) intercept_ : float independent term in decision function. coef_path_ : array, shape (n_features, n_alphas) the varying values of the coefficients along the path alpha_ : float the estimated regularization parameter alpha alphas_ : array, shape (n_alphas,) the different values of alpha along the path cv_alphas_ : array, shape (n_cv_alphas,) all the values of alpha along the path for the different folds mse_path_ : array, shape (n_folds, n_cv_alphas) the mean square error on left-out for each fold along the path (alpha values given by ``cv_alphas``) n_iter_ : array-like or int the number of iterations run by Lars with the optimal alpha. Notes ----- The object solves the same problem as the LassoCV object. However, unlike the LassoCV, it find the relevant alphas values by itself. In general, because of this property, it will be more stable. However, it is more fragile to heavily multicollinear datasets. It is more efficient than the LassoCV if only a small number of features are selected compared to the total number, for instance if there are very few samples compared to the number of features. See also -------- lars_path, LassoLars, LarsCV, LassoCV """ method = 'lasso' def __init__(self, fit_intercept=True, verbose=False, max_iter=500, normalize=True, precompute='auto', cv=None, max_n_alphas=1000, n_jobs=1, eps=np.finfo(np.float).eps, copy_X=True, positive=False): self.fit_intercept = fit_intercept self.verbose = verbose self.max_iter = max_iter self.normalize = normalize self.precompute = precompute self.cv = cv self.max_n_alphas = max_n_alphas self.n_jobs = n_jobs self.eps = eps self.copy_X = copy_X self.positive = positive # XXX : we don't use super(LarsCV, self).__init__ # to avoid setting n_nonzero_coefs class LassoLarsIC(LassoLars): """Lasso model fit with Lars using BIC or AIC for model selection The optimization objective for Lasso is:: (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1 AIC is the Akaike information criterion and BIC is the Bayes Information criterion. Such criteria are useful to select the value of the regularization parameter by making a trade-off between the goodness of fit and the complexity of the model. A good model should explain well the data while being simple. Read more in the :ref:`User Guide <least_angle_regression>`. Parameters ---------- criterion : 'bic' | 'aic' The type of criterion to use. fit_intercept : boolean whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True This parameter is ignored when ``fit_intercept`` is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use :class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on an estimator with ``normalize=False``. precompute : True | False | 'auto' | array-like Whether to use a precomputed Gram matrix to speed up calculations. If set to ``'auto'`` let us decide. The Gram matrix can also be passed as argument. max_iter : integer, optional Maximum number of iterations to perform. Can be used for early stopping. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the ``tol`` parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. copy_X : boolean, optional, default True If True, X will be copied; else, it may be overwritten. positive : boolean (default=False) Restrict coefficients to be >= 0. Be aware that you might want to remove fit_intercept which is set True by default. Under the positive restriction the model coefficients do not converge to the ordinary-least-squares solution for small values of alpha. Only coefficients up to the smallest alpha value (``alphas_[alphas_ > 0.].min()`` when fit_path=True) reached by the stepwise Lars-Lasso algorithm are typically in congruence with the solution of the coordinate descent Lasso estimator. As a consequence using LassoLarsIC only makes sense for problems where a sparse solution is expected and/or reached. Attributes ---------- coef_ : array, shape (n_features,) parameter vector (w in the formulation formula) intercept_ : float independent term in decision function. alpha_ : float the alpha parameter chosen by the information criterion n_iter_ : int number of iterations run by lars_path to find the grid of alphas. criterion_ : array, shape (n_alphas,) The value of the information criteria ('aic', 'bic') across all alphas. The alpha which has the smallest information criterion is chosen. This value is larger by a factor of ``n_samples`` compared to Eqns. 2.15 and 2.16 in (Zou et al, 2007). Examples -------- >>> from sklearn import linear_model >>> reg = linear_model.LassoLarsIC(criterion='bic') >>> reg.fit([[-1, 1], [0, 0], [1, 1]], [-1.1111, 0, -1.1111]) ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE LassoLarsIC(copy_X=True, criterion='bic', eps=..., fit_intercept=True, max_iter=500, normalize=True, positive=False, precompute='auto', verbose=False) >>> print(reg.coef_) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE [ 0. -1.11...] Notes ----- The estimation of the number of degrees of freedom is given by: "On the degrees of freedom of the lasso" Hui Zou, Trevor Hastie, and Robert Tibshirani Ann. Statist. Volume 35, Number 5 (2007), 2173-2192. https://en.wikipedia.org/wiki/Akaike_information_criterion https://en.wikipedia.org/wiki/Bayesian_information_criterion See also -------- lars_path, LassoLars, LassoLarsCV """ def __init__(self, criterion='aic', fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, copy_X=True, positive=False): self.criterion = criterion self.fit_intercept = fit_intercept self.positive = positive self.max_iter = max_iter self.verbose = verbose self.normalize = normalize self.copy_X = copy_X self.precompute = precompute self.eps = eps self.fit_path = True def fit(self, X, y, copy_X=True): """Fit the model using X, y as training data. Parameters ---------- X : array-like, shape (n_samples, n_features) training data. y : array-like, shape (n_samples,) target values. Will be cast to X's dtype if necessary copy_X : boolean, optional, default True If ``True``, X will be copied; else, it may be overwritten. Returns ------- self : object returns an instance of self. """ X, y = check_X_y(X, y, y_numeric=True) X, y, Xmean, ymean, Xstd = LinearModel._preprocess_data( X, y, self.fit_intercept, self.normalize, self.copy_X) max_iter = self.max_iter Gram = self.precompute alphas_, active_, coef_path_, self.n_iter_ = lars_path( X, y, Gram=Gram, copy_X=copy_X, copy_Gram=True, alpha_min=0.0, method='lasso', verbose=self.verbose, max_iter=max_iter, eps=self.eps, return_n_iter=True, positive=self.positive) n_samples = X.shape[0] if self.criterion == 'aic': K = 2 # AIC elif self.criterion == 'bic': K = log(n_samples) # BIC else: raise ValueError('criterion should be either bic or aic') R = y[:, np.newaxis] - np.dot(X, coef_path_) # residuals mean_squared_error = np.mean(R ** 2, axis=0) sigma2 = np.var(y) df = np.zeros(coef_path_.shape[1], dtype=np.int) # Degrees of freedom for k, coef in enumerate(coef_path_.T): mask = np.abs(coef) > np.finfo(coef.dtype).eps if not np.any(mask): continue # get the number of degrees of freedom equal to: # Xc = X[:, mask] # Trace(Xc * inv(Xc.T, Xc) * Xc.T) ie the number of non-zero coefs df[k] = np.sum(mask) self.alphas_ = alphas_ eps64 = np.finfo('float64').eps self.criterion_ = (n_samples * mean_squared_error / (sigma2 + eps64) + K * df) # Eqns. 2.15--16 in (Zou et al, 2007) n_best = np.argmin(self.criterion_) self.alpha_ = alphas_[n_best] self.coef_ = coef_path_[:, n_best] self._set_intercept(Xmean, ymean, Xstd) return self
bsd-3-clause
etianen/cms
src/cms/apps/news/admin.py
2
1829
"""Admin settings for the CMS news app.""" from django.contrib import admin from cms.admin import PageBaseAdmin from cms.apps.news.models import Category, Article class CategoryAdmin(PageBaseAdmin): """Admin settings for the Category model.""" fieldsets = ( PageBaseAdmin.TITLE_FIELDS, ("Content", { "fields": ("content_primary",), }), PageBaseAdmin.PUBLICATION_FIELDS, PageBaseAdmin.NAVIGATION_FIELDS, PageBaseAdmin.SEO_FIELDS, ) admin.site.register(Category, CategoryAdmin) class ArticleAdmin(PageBaseAdmin): """Admin settings for the Article model.""" date_hierarchy = "date" search_fields = PageBaseAdmin.search_fields + ("content", "summary",) list_display = ("title", "date", "is_online", "get_date_modified",) list_filter = ("is_online", "categories",) fieldsets = ( (None, { "fields": ("title", "url_title", "news_feed", "date",), }), ("Content", { "fields": ("image", "content", "summary",), }), ("Publication", { "fields": ("categories", "authors", "is_online",), "classes": ("collapse",), }), PageBaseAdmin.NAVIGATION_FIELDS, PageBaseAdmin.SEO_FIELDS, ) raw_id_fields = ("image",) filter_horizontal = ("categories", "authors",) def save_related(self, request, form, formsets, change): """Saves the author of the article.""" super(ArticleAdmin, self).save_related(request, form, formsets, change) # For new articles, add in the current author. if not change and not form.cleaned_data["authors"]: form.instance.authors.add(request.user) admin.site.register(Article, ArticleAdmin)
bsd-3-clause
IndonesiaX/edx-platform
common/test/acceptance/pages/studio/textbooks.py
65
2540
""" Course Textbooks page. """ import requests from path import Path as path from ..common.utils import click_css from .course_page import CoursePage class TextbooksPage(CoursePage): """ Course Textbooks page. """ url_path = "textbooks" def is_browser_on_page(self): return self.q(css='body.view-textbooks').present def open_add_textbook_form(self): """ Open new textbook form by clicking on new textbook button. """ self.q(css='.nav-item .new-button').click() def get_element_text(self, selector): """ Return the text of the css selector. """ return self.q(css=selector)[0].text def set_input_field_value(self, selector, value): """ Set the value of input field by selector. """ self.q(css=selector)[0].send_keys(value) def upload_pdf_file(self, file_name): """ Uploads a pdf textbook. """ # If the pdf upload section has not yet been toggled on, click on the upload pdf button test_dir = path(__file__).abspath().dirname().dirname().dirname() file_path = test_dir + '/data/uploads/' + file_name click_css(self, ".edit-textbook .action-upload", require_notification=False) self.wait_for_element_visibility(".upload-dialog input", "Upload modal opened") file_input = self.q(css=".upload-dialog input").results[0] file_input.send_keys(file_path) click_css(self, ".wrapper-modal-window-assetupload .action-upload", require_notification=False) self.wait_for_element_absence(".modal-window-overlay", "Upload modal closed") def click_textbook_submit_button(self): """ Submit the new textbook form and check if it is rendered properly. """ self.wait_for_element_visibility('#edit_textbook_form button[type="submit"]', 'Save button visibility') self.q(css='#edit_textbook_form button[type="submit"]').first.click() self.wait_for_element_absence(".wrapper-form", "Add/Edit form closed") def is_view_live_link_worked(self): """ Check if the view live button of textbook is working fine. """ try: self.wait_for(lambda: len(self.q(css='.textbook a.view').attrs('href')) > 0, "href value present") response = requests.get(self.q(css='.textbook a.view').attrs('href')[0]) except requests.exceptions.ConnectionError: return False return response.status_code == 200
agpl-3.0
povellesto/blobygames
Blob Rage App/random/blobrage.py
1
5331
import random from Tkinter import * root = Tk() print("") print("Welcome to Blob Rage Beta 2") print("Instructions:") print("To kill blobs you click them until they are dead. You do 2.5 damage to a blob on contact.") print("") print("Blobs:") print("Green Blob: 10 to 50 health, and speed equals a range of -2 to 2.") print("Red Blob: 100 heath, and a speed of 3.") print("Blue Blob: Health of 300, and a speed of 5.") print ("BOSS BLOB: COLOR ORANGE, HEALTH OF 500, AND SPEED OF 9") print("") level = input("Please enter a level: ") class Blob: def __init__(self): self.health = random.randrange(10,50) self.damage = 2.5 self.x = random.randrange(0,100) self.y = random.randrange(0,150) self.speedx = level self.speedy = level def forward(self): self.x -= self.speedx self.y += self.speedy if self.x > 200: self.speedx = -self.speedx if self.x < 0: self.speedx = -self.speedx if self.y > 250: self.speedy = -self.speedy if self.y < 0: self.speedy = -self.speedy def healthloss(self, x, y): if x > self.x and x < self.x + 10 and y > self.y and y < self.y + 10: self.health -= 10 def destroyblob(self): if self.health > 0: canvas.create_oval(self.x, self.y, self.x + 10, self.y + 10, fill="green") class redBlob: def __init__(self): self.health = 100 self.damage = 2.5 self.x = random.randrange(0,100) self.y = random.randrange(0,150) self.speedx = level + 1 self.speedy = level + 1 def forward(self): self.x -= self.speedx self.y += self.speedy if self.x > 200: self.speedx = -self.speedx if self.x < 0: self.speedx = -self.speedx if self.y > 250: self.speedy = -self.speedy if self.y < 0: self.speedy = -self.speedy def healthloss(self, x, y): if x > self.x and x < self.x + 10 and y > self.y and y < self.y + 10: self.health -= 10 def destroyblob(self): if self.health > 0: canvas.create_oval(self.x, self.y, self.x + 10, self.y + 10, fill="red") class blueBlob: def __init__(self): self.health = 300 self.damage = 2.5 self.x = random.randrange(0,100) self.y = random.randrange(0,150) self.speedx = level * 2 self.speedy = level * 2 def forward(self): self.x -= self.speedx self.y += self.speedy if self.x > 200: self.speedx = -self.speedx if self.x < 0: self.speedx = -self.speedx if self.y > 250: self.speedy = -self.speedy if self.y < 0: self.speedy = -self.speedy def healthloss(self, x, y): if x > self.x and x < self.x + 10 and y > self.y and y < self.y + 10: self.health -= 10 def destroyblob(self): if self.health > 0: canvas.create_oval(self.x, self.y, self.x + 10, self.y + 10, fill="blue") class bossBlob: def __init__(self): self.health = 500 self.damage = 2.5 self.x = random.randrange(0,100) self.y = random.randrange(0,150) if level < 1 : self.speedx = level + 6 else : self.speedx = level ** 2 if level < 1 : self.speedy = level + 6 else : self.speedy = level ** 2 def forward(self): self.x -= self.speedx self.y += self.speedy if self.x > 200: self.speedx = -self.speedx if self.x < 0: self.speedx = -self.speedx if self.y > 250: self.speedy = -self.speedy if self.y < 0: self.speedy = -self.speedy def healthloss(self, x, y): if x > self.x and x < self.x + 10 and y > self.y and y < self.y + 10: self.health -= 10 def destroyblob(self): if self.health > 0: canvas.create_oval(self.x, self.y, self.x + 10, self.y + 10, fill="darkorange") def stopgame(self): for blob in blobs: if blob.health > 0: break else: root.destroy() def callback(event): for blob in blobs: blob.healthloss(event.x,event.y) def timerFired(canvas): canvas.delete(ALL) delay = 1 # milliseconds for blob in blobs: blob.forward() blob.destroyblob() for redblob in redblobs: redblob.forward() redblob.destroyblob() for blueblob in blueblobs: blueblob.forward() blueblob.destroyblob() for bossblob in bossblobs: bossblob.forward() bossblob.destroyblob() canvas.after(delay, timerFired, canvas) blobs = [Blob() for _ in range(5)] redblobs = [redBlob() for _ in range(5)] blueblobs = [blueBlob() for _ in range(9)] bossblobs = [bossBlob() for _ in range(1)] canvas = Canvas(root, width = 200, height=250) canvas.bind("<Button-1>", callback) canvas.pack() root.resizable(width=0, height=0) root.canvas = canvas.canvas = canvas timerFired(canvas) root.wm_title("Blob Rage") commands = input("") if commands == quit: canvas.destroy() root.mainloop() #code
unlicense
cpyou/odoo
openerp/addons/base/ir/ir_cron.py
276
15096
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging import threading import time import psycopg2 from datetime import datetime from dateutil.relativedelta import relativedelta import pytz import openerp from openerp import SUPERUSER_ID, netsvc, api from openerp.osv import fields, osv from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT from openerp.tools.safe_eval import safe_eval as eval from openerp.tools.translate import _ from openerp.modules import load_information_from_description_file _logger = logging.getLogger(__name__) BASE_VERSION = load_information_from_description_file('base')['version'] def str2tuple(s): return eval('tuple(%s)' % (s or '')) _intervalTypes = { 'work_days': lambda interval: relativedelta(days=interval), 'days': lambda interval: relativedelta(days=interval), 'hours': lambda interval: relativedelta(hours=interval), 'weeks': lambda interval: relativedelta(days=7*interval), 'months': lambda interval: relativedelta(months=interval), 'minutes': lambda interval: relativedelta(minutes=interval), } class ir_cron(osv.osv): """ Model describing cron jobs (also called actions or tasks). """ # TODO: perhaps in the future we could consider a flag on ir.cron jobs # that would cause database wake-up even if the database has not been # loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something) # See also openerp.cron _name = "ir.cron" _order = 'name' _columns = { 'name': fields.char('Name', required=True), 'user_id': fields.many2one('res.users', 'User', required=True), 'active': fields.boolean('Active'), 'interval_number': fields.integer('Interval Number',help="Repeat every x."), 'interval_type': fields.selection( [('minutes', 'Minutes'), ('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'), 'numbercall': fields.integer('Number of Calls', help='How many times the method is called,\na negative number indicates no limit.'), 'doall' : fields.boolean('Repeat Missed', help="Specify if missed occurrences should be executed when the server restarts."), 'nextcall' : fields.datetime('Next Execution Date', required=True, help="Next planned execution date for this job."), 'model': fields.char('Object', help="Model name on which the method to be called is located, e.g. 'res.partner'."), 'function': fields.char('Method', help="Name of the method to be called when this job is processed."), 'args': fields.text('Arguments', help="Arguments to be passed to the method, e.g. (uid,)."), 'priority': fields.integer('Priority', help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.') } _defaults = { 'nextcall' : lambda *a: time.strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'priority' : 5, 'user_id' : lambda obj,cr,uid,context: uid, 'interval_number' : 1, 'interval_type' : 'months', 'numbercall' : 1, 'active' : 1, } def _check_args(self, cr, uid, ids, context=None): try: for this in self.browse(cr, uid, ids, context): str2tuple(this.args) except Exception: return False return True _constraints = [ (_check_args, 'Invalid arguments', ['args']), ] def _handle_callback_exception(self, cr, uid, model_name, method_name, args, job_id, job_exception): """ Method called when an exception is raised by a job. Simply logs the exception and rollback the transaction. :param model_name: model name on which the job method is located. :param method_name: name of the method to call when this job is processed. :param args: arguments of the method (without the usual self, cr, uid). :param job_id: job id. :param job_exception: exception raised by the job. """ cr.rollback() _logger.exception("Call of self.pool.get('%s').%s(cr, uid, *%r) failed in Job %s" % (model_name, method_name, args, job_id)) def _callback(self, cr, uid, model_name, method_name, args, job_id): """ Run the method associated to a given job It takes care of logging and exception handling. :param model_name: model name on which the job method is located. :param method_name: name of the method to call when this job is processed. :param args: arguments of the method (without the usual self, cr, uid). :param job_id: job id. """ try: args = str2tuple(args) openerp.modules.registry.RegistryManager.check_registry_signaling(cr.dbname) registry = openerp.registry(cr.dbname) if model_name in registry: model = registry[model_name] if hasattr(model, method_name): log_depth = (None if _logger.isEnabledFor(logging.DEBUG) else 1) netsvc.log(_logger, logging.DEBUG, 'cron.object.execute', (cr.dbname,uid,'*',model_name,method_name)+tuple(args), depth=log_depth) if _logger.isEnabledFor(logging.DEBUG): start_time = time.time() getattr(model, method_name)(cr, uid, *args) if _logger.isEnabledFor(logging.DEBUG): end_time = time.time() _logger.debug('%.3fs (%s, %s)' % (end_time - start_time, model_name, method_name)) openerp.modules.registry.RegistryManager.signal_caches_change(cr.dbname) else: msg = "Method `%s.%s` does not exist." % (model_name, method_name) _logger.warning(msg) else: msg = "Model `%s` does not exist." % model_name _logger.warning(msg) except Exception, e: self._handle_callback_exception(cr, uid, model_name, method_name, args, job_id, e) def _process_job(self, job_cr, job, cron_cr): """ Run a given job taking care of the repetition. :param job_cr: cursor to use to execute the job, safe to commit/rollback :param job: job to be run (as a dictionary). :param cron_cr: cursor holding lock on the cron job row, to use to update the next exec date, must not be committed/rolled back! """ try: with api.Environment.manage(): now = fields.datetime.context_timestamp(job_cr, job['user_id'], datetime.now()) nextcall = fields.datetime.context_timestamp(job_cr, job['user_id'], datetime.strptime(job['nextcall'], DEFAULT_SERVER_DATETIME_FORMAT)) numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: self._callback(job_cr, job['user_id'], job['model'], job['function'], job['args'], job['id']) if numbercall: nextcall += _intervalTypes[job['interval_type']](job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cron_cr.execute("UPDATE ir_cron SET nextcall=%s, numbercall=%s"+addsql+" WHERE id=%s", (nextcall.astimezone(pytz.UTC).strftime(DEFAULT_SERVER_DATETIME_FORMAT), numbercall, job['id'])) self.invalidate_cache(job_cr, SUPERUSER_ID) finally: job_cr.commit() cron_cr.commit() @classmethod def _acquire_job(cls, db_name): # TODO remove 'check' argument from addons/base_action_rule/base_action_rule.py """ Try to process one cron job. This selects in database all the jobs that should be processed. It then tries to lock each of them and, if it succeeds, run the cron job (if it doesn't succeed, it means the job was already locked to be taken care of by another thread) and return. If a job was processed, returns True, otherwise returns False. """ db = openerp.sql_db.db_connect(db_name) threading.current_thread().dbname = db_name cr = db.cursor() jobs = [] try: # Make sure the database we poll has the same version as the code of base cr.execute("SELECT 1 FROM ir_module_module WHERE name=%s AND latest_version=%s", ('base', BASE_VERSION)) if cr.fetchone(): # Careful to compare timestamps with 'UTC' - everything is UTC as of v6.1. cr.execute("""SELECT * FROM ir_cron WHERE numbercall != 0 AND active AND nextcall <= (now() at time zone 'UTC') ORDER BY priority""") jobs = cr.dictfetchall() else: _logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION) except psycopg2.ProgrammingError, e: if e.pgcode == '42P01': # Class 42 — Syntax Error or Access Rule Violation; 42P01: undefined_table # The table ir_cron does not exist; this is probably not an OpenERP database. _logger.warning('Tried to poll an undefined table on database %s.', db_name) else: raise except Exception: _logger.warning('Exception in cron:', exc_info=True) finally: cr.close() for job in jobs: lock_cr = db.cursor() try: # Try to grab an exclusive lock on the job row from within the task transaction # Restrict to the same conditions as for the search since the job may have already # been run by an other thread when cron is running in multi thread lock_cr.execute("""SELECT * FROM ir_cron WHERE numbercall != 0 AND active AND nextcall <= (now() at time zone 'UTC') AND id=%s FOR UPDATE NOWAIT""", (job['id'],), log_exceptions=False) locked_job = lock_cr.fetchone() if not locked_job: _logger.debug("Job `%s` already executed by another process/thread. skipping it", job['name']) continue # Got the lock on the job row, run its code _logger.debug('Starting job `%s`.', job['name']) job_cr = db.cursor() try: registry = openerp.registry(db_name) registry[cls._name]._process_job(job_cr, job, lock_cr) except Exception: _logger.exception('Unexpected exception while processing cron job %r', job) finally: job_cr.close() except psycopg2.OperationalError, e: if e.pgcode == '55P03': # Class 55: Object not in prerequisite state; 55P03: lock_not_available _logger.debug('Another process/thread is already busy executing job `%s`, skipping it.', job['name']) continue else: # Unexpected OperationalError raise finally: # we're exiting due to an exception while acquiring the lock lock_cr.close() if hasattr(threading.current_thread(), 'dbname'): # cron job could have removed it as side-effect del threading.current_thread().dbname def _try_lock(self, cr, uid, ids, context=None): """Try to grab a dummy exclusive write-lock to the rows with the given ids, to make sure a following write() or unlink() will not block due to a process currently executing those cron tasks""" try: cr.execute("""SELECT id FROM "%s" WHERE id IN %%s FOR UPDATE NOWAIT""" % self._table, (tuple(ids),), log_exceptions=False) except psycopg2.OperationalError: cr.rollback() # early rollback to allow translations to work for the user feedback raise osv.except_osv(_("Record cannot be modified right now"), _("This cron task is currently being executed and may not be modified, " "please try again in a few minutes")) def create(self, cr, uid, vals, context=None): res = super(ir_cron, self).create(cr, uid, vals, context=context) return res def write(self, cr, uid, ids, vals, context=None): self._try_lock(cr, uid, ids, context) res = super(ir_cron, self).write(cr, uid, ids, vals, context=context) return res def unlink(self, cr, uid, ids, context=None): self._try_lock(cr, uid, ids, context) res = super(ir_cron, self).unlink(cr, uid, ids, context=context) return res def try_write(self, cr, uid, ids, values, context=None): try: with cr.savepoint(): cr.execute("""SELECT id FROM "%s" WHERE id IN %%s FOR UPDATE NOWAIT""" % self._table, (tuple(ids),), log_exceptions=False) except psycopg2.OperationalError: pass else: return super(ir_cron, self).write(cr, uid, ids, values, context=context) return False def toggle(self, cr, uid, ids, model, domain, context=None): active = bool(self.pool[model].search_count(cr, uid, domain, context=context)) return self.try_write(cr, uid, ids, {'active': active}, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
rubyinhell/brython
www/src/Lib/test/test_dict.py
21
29053
import unittest from test import support import collections, random, string import collections.abc import gc, weakref import pickle class DictTest(unittest.TestCase): def test_invalid_keyword_arguments(self): class Custom(dict): pass for invalid in {1 : 2}, Custom({1 : 2}): with self.assertRaises(TypeError): dict(**invalid) with self.assertRaises(TypeError): {}.update(**invalid) def test_constructor(self): # calling built-in types without argument must return empty self.assertEqual(dict(), {}) self.assertIsNot(dict(), {}) def test_literal_constructor(self): # check literal constructor for different sized dicts # (to exercise the BUILD_MAP oparg). for n in (0, 1, 6, 256, 400): items = [(''.join(random.sample(string.ascii_letters, 8)), i) for i in range(n)] random.shuffle(items) formatted_items = ('{!r}: {:d}'.format(k, v) for k, v in items) dictliteral = '{' + ', '.join(formatted_items) + '}' self.assertEqual(eval(dictliteral), dict(items)) def test_bool(self): self.assertIs(not {}, True) self.assertTrue({1: 2}) self.assertIs(bool({}), False) self.assertIs(bool({1: 2}), True) def test_keys(self): d = {} self.assertEqual(set(d.keys()), set()) d = {'a': 1, 'b': 2} k = d.keys() self.assertEqual(set(k), {'a', 'b'}) self.assertIn('a', k) self.assertIn('b', k) self.assertIn('a', d) self.assertIn('b', d) self.assertRaises(TypeError, d.keys, None) self.assertEqual(repr(dict(a=1).keys()), "dict_keys(['a'])") def test_values(self): d = {} self.assertEqual(set(d.values()), set()) d = {1:2} self.assertEqual(set(d.values()), {2}) self.assertRaises(TypeError, d.values, None) self.assertEqual(repr(dict(a=1).values()), "dict_values([1])") def test_items(self): d = {} self.assertEqual(set(d.items()), set()) d = {1:2} self.assertEqual(set(d.items()), {(1, 2)}) self.assertRaises(TypeError, d.items, None) self.assertEqual(repr(dict(a=1).items()), "dict_items([('a', 1)])") def test_contains(self): d = {} self.assertNotIn('a', d) self.assertFalse('a' in d) self.assertTrue('a' not in d) d = {'a': 1, 'b': 2} self.assertIn('a', d) self.assertIn('b', d) self.assertNotIn('c', d) self.assertRaises(TypeError, d.__contains__) def test_len(self): d = {} self.assertEqual(len(d), 0) d = {'a': 1, 'b': 2} self.assertEqual(len(d), 2) def test_getitem(self): d = {'a': 1, 'b': 2} self.assertEqual(d['a'], 1) self.assertEqual(d['b'], 2) d['c'] = 3 d['a'] = 4 self.assertEqual(d['c'], 3) self.assertEqual(d['a'], 4) del d['b'] self.assertEqual(d, {'a': 4, 'c': 3}) self.assertRaises(TypeError, d.__getitem__) class BadEq(object): def __eq__(self, other): raise Exc() def __hash__(self): return 24 d = {} d[BadEq()] = 42 self.assertRaises(KeyError, d.__getitem__, 23) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.__getitem__, x) def test_clear(self): d = {1:1, 2:2, 3:3} d.clear() self.assertEqual(d, {}) self.assertRaises(TypeError, d.clear, None) def test_update(self): d = {} d.update({1:100}) d.update({2:20}) d.update({1:1, 2:2, 3:3}) self.assertEqual(d, {1:1, 2:2, 3:3}) d.update() self.assertEqual(d, {1:1, 2:2, 3:3}) self.assertRaises((TypeError, AttributeError), d.update, None) class SimpleUserDict: def __init__(self): self.d = {1:1, 2:2, 3:3} def keys(self): return self.d.keys() def __getitem__(self, i): return self.d[i] d.clear() d.update(SimpleUserDict()) self.assertEqual(d, {1:1, 2:2, 3:3}) class Exc(Exception): pass d.clear() class FailingUserDict: def keys(self): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = 1 def __iter__(self): return self def __next__(self): if self.i: self.i = 0 return 'a' raise Exc return BogonIter() def __getitem__(self, key): return key self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = ord('a') def __iter__(self): return self def __next__(self): if self.i <= ord('z'): rtn = chr(self.i) self.i += 1 return rtn raise StopIteration return BogonIter() def __getitem__(self, key): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class badseq(object): def __iter__(self): return self def __next__(self): raise Exc() self.assertRaises(Exc, {}.update, badseq()) self.assertRaises(ValueError, {}.update, [(1, 2, 3)]) def test_fromkeys(self): self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) d = {} self.assertIsNot(d.fromkeys('abc'), d) self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0}) self.assertEqual(d.fromkeys([]), {}) def g(): yield 1 self.assertEqual(d.fromkeys(g()), {1:None}) self.assertRaises(TypeError, {}.fromkeys, 3) class dictlike(dict): pass self.assertEqual(dictlike.fromkeys('a'), {'a':None}) self.assertEqual(dictlike().fromkeys('a'), {'a':None}) self.assertIsInstance(dictlike.fromkeys('a'), dictlike) self.assertIsInstance(dictlike().fromkeys('a'), dictlike) class mydict(dict): def __new__(cls): return collections.UserDict() ud = mydict.fromkeys('ab') self.assertEqual(ud, {'a':None, 'b':None}) self.assertIsInstance(ud, collections.UserDict) self.assertRaises(TypeError, dict.fromkeys) class Exc(Exception): pass class baddict1(dict): def __init__(self): raise Exc() self.assertRaises(Exc, baddict1.fromkeys, [1]) class BadSeq(object): def __iter__(self): return self def __next__(self): raise Exc() self.assertRaises(Exc, dict.fromkeys, BadSeq()) class baddict2(dict): def __setitem__(self, key, value): raise Exc() self.assertRaises(Exc, baddict2.fromkeys, [1]) # test fast path for dictionary inputs d = dict(zip(range(6), range(6))) self.assertEqual(dict.fromkeys(d, 0), dict(zip(range(6), [0]*6))) class baddict3(dict): def __new__(cls): return d d = {i : i for i in range(10)} res = d.copy() res.update(a=None, b=None, c=None) self.assertEqual(baddict3.fromkeys({"a", "b", "c"}), res) def test_copy(self): d = {1:1, 2:2, 3:3} self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) self.assertEqual({}.copy(), {}) self.assertRaises(TypeError, d.copy, None) def test_get(self): d = {} self.assertIs(d.get('c'), None) self.assertEqual(d.get('c', 3), 3) d = {'a': 1, 'b': 2} self.assertIs(d.get('c'), None) self.assertEqual(d.get('c', 3), 3) self.assertEqual(d.get('a'), 1) self.assertEqual(d.get('a', 3), 1) self.assertRaises(TypeError, d.get) self.assertRaises(TypeError, d.get, None, None, None) def test_setdefault(self): # dict.setdefault() d = {} self.assertIs(d.setdefault('key0'), None) d.setdefault('key0', []) self.assertIs(d.setdefault('key0'), None) d.setdefault('key', []).append(3) self.assertEqual(d['key'][0], 3) d.setdefault('key', []).append(4) self.assertEqual(len(d['key']), 2) self.assertRaises(TypeError, d.setdefault) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.setdefault, x, []) def test_setdefault_atomic(self): # Issue #13521: setdefault() calls __hash__ and __eq__ only once. class Hashed(object): def __init__(self): self.hash_count = 0 self.eq_count = 0 def __hash__(self): self.hash_count += 1 return 42 def __eq__(self, other): self.eq_count += 1 return id(self) == id(other) hashed1 = Hashed() y = {hashed1: 5} hashed2 = Hashed() y.setdefault(hashed2, []) self.assertEqual(hashed1.hash_count, 1) self.assertEqual(hashed2.hash_count, 1) self.assertEqual(hashed1.eq_count + hashed2.eq_count, 1) def test_setitem_atomic_at_resize(self): class Hashed(object): def __init__(self): self.hash_count = 0 self.eq_count = 0 def __hash__(self): self.hash_count += 1 return 42 def __eq__(self, other): self.eq_count += 1 return id(self) == id(other) hashed1 = Hashed() # 5 items y = {hashed1: 5, 0: 0, 1: 1, 2: 2, 3: 3} hashed2 = Hashed() # 6th item forces a resize y[hashed2] = [] self.assertEqual(hashed1.hash_count, 1) self.assertEqual(hashed2.hash_count, 1) self.assertEqual(hashed1.eq_count + hashed2.eq_count, 1) def test_popitem(self): # dict.popitem() for copymode in -1, +1: # -1: b has same structure as a # +1: b is a.copy() for log2size in range(12): size = 2**log2size a = {} b = {} for i in range(size): a[repr(i)] = i if copymode < 0: b[repr(i)] = i if copymode > 0: b = a.copy() for i in range(size): ka, va = ta = a.popitem() self.assertEqual(va, int(ka)) kb, vb = tb = b.popitem() self.assertEqual(vb, int(kb)) self.assertFalse(copymode < 0 and ta != tb) self.assertFalse(a) self.assertFalse(b) d = {} self.assertRaises(KeyError, d.popitem) def test_pop(self): # Tests for pop with specified key d = {} k, v = 'abc', 'def' d[k] = v self.assertRaises(KeyError, d.pop, 'ghi') self.assertEqual(d.pop(k), v) self.assertEqual(len(d), 0) self.assertRaises(KeyError, d.pop, k) self.assertEqual(d.pop(k, v), v) d[k] = v self.assertEqual(d.pop(k, 1), v) self.assertRaises(TypeError, d.pop) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.pop, x) def test_mutating_iteration(self): # changing dict size during iteration d = {} d[1] = 1 with self.assertRaises(RuntimeError): for i in d: d[i+1] = 1 def test_mutating_lookup(self): # changing dict during a lookup (issue #14417) class NastyKey: mutate_dict = None def __init__(self, value): self.value = value def __hash__(self): # hash collision! return 1 def __eq__(self, other): if NastyKey.mutate_dict: mydict, key = NastyKey.mutate_dict NastyKey.mutate_dict = None del mydict[key] return self.value == other.value key1 = NastyKey(1) key2 = NastyKey(2) d = {key1: 1} NastyKey.mutate_dict = (d, key1) d[key2] = 2 self.assertEqual(d, {key2: 2}) def test_repr(self): d = {} self.assertEqual(repr(d), '{}') d[1] = 2 self.assertEqual(repr(d), '{1: 2}') d = {} d[1] = d self.assertEqual(repr(d), '{1: {...}}') class Exc(Exception): pass class BadRepr(object): def __repr__(self): raise Exc() d = {1: BadRepr()} self.assertRaises(Exc, repr, d) def test_eq(self): self.assertEqual({}, {}) self.assertEqual({1: 2}, {1: 2}) class Exc(Exception): pass class BadCmp(object): def __eq__(self, other): raise Exc() def __hash__(self): return 1 d1 = {BadCmp(): 1} d2 = {1: 1} with self.assertRaises(Exc): d1 == d2 def test_keys_contained(self): self.helper_keys_contained(lambda x: x.keys()) self.helper_keys_contained(lambda x: x.items()) def helper_keys_contained(self, fn): # Test rich comparisons against dict key views, which should behave the # same as sets. empty = fn(dict()) empty2 = fn(dict()) smaller = fn({1:1, 2:2}) larger = fn({1:1, 2:2, 3:3}) larger2 = fn({1:1, 2:2, 3:3}) larger3 = fn({4:1, 2:2, 3:3}) self.assertTrue(smaller < larger) self.assertTrue(smaller <= larger) self.assertTrue(larger > smaller) self.assertTrue(larger >= smaller) self.assertFalse(smaller >= larger) self.assertFalse(smaller > larger) self.assertFalse(larger <= smaller) self.assertFalse(larger < smaller) self.assertFalse(smaller < larger3) self.assertFalse(smaller <= larger3) self.assertFalse(larger3 > smaller) self.assertFalse(larger3 >= smaller) # Inequality strictness self.assertTrue(larger2 >= larger) self.assertTrue(larger2 <= larger) self.assertFalse(larger2 > larger) self.assertFalse(larger2 < larger) self.assertTrue(larger == larger2) self.assertTrue(smaller != larger) # There is an optimization on the zero-element case. self.assertTrue(empty == empty2) self.assertFalse(empty != empty2) self.assertFalse(empty == smaller) self.assertTrue(empty != smaller) # With the same size, an elementwise compare happens self.assertTrue(larger != larger3) self.assertFalse(larger == larger3) def test_errors_in_view_containment_check(self): class C: def __eq__(self, other): raise RuntimeError d1 = {1: C()} d2 = {1: C()} with self.assertRaises(RuntimeError): d1.items() == d2.items() with self.assertRaises(RuntimeError): d1.items() != d2.items() with self.assertRaises(RuntimeError): d1.items() <= d2.items() with self.assertRaises(RuntimeError): d1.items() >= d2.items() d3 = {1: C(), 2: C()} with self.assertRaises(RuntimeError): d2.items() < d3.items() with self.assertRaises(RuntimeError): d3.items() > d2.items() def test_dictview_set_operations_on_keys(self): k1 = {1:1, 2:2}.keys() k2 = {1:1, 2:2, 3:3}.keys() k3 = {4:4}.keys() self.assertEqual(k1 - k2, set()) self.assertEqual(k1 - k3, {1,2}) self.assertEqual(k2 - k1, {3}) self.assertEqual(k3 - k1, {4}) self.assertEqual(k1 & k2, {1,2}) self.assertEqual(k1 & k3, set()) self.assertEqual(k1 | k2, {1,2,3}) self.assertEqual(k1 ^ k2, {3}) self.assertEqual(k1 ^ k3, {1,2,4}) def test_dictview_set_operations_on_items(self): k1 = {1:1, 2:2}.items() k2 = {1:1, 2:2, 3:3}.items() k3 = {4:4}.items() self.assertEqual(k1 - k2, set()) self.assertEqual(k1 - k3, {(1,1), (2,2)}) self.assertEqual(k2 - k1, {(3,3)}) self.assertEqual(k3 - k1, {(4,4)}) self.assertEqual(k1 & k2, {(1,1), (2,2)}) self.assertEqual(k1 & k3, set()) self.assertEqual(k1 | k2, {(1,1), (2,2), (3,3)}) self.assertEqual(k1 ^ k2, {(3,3)}) self.assertEqual(k1 ^ k3, {(1,1), (2,2), (4,4)}) def test_dictview_mixed_set_operations(self): # Just a few for .keys() self.assertTrue({1:1}.keys() == {1}) self.assertTrue({1} == {1:1}.keys()) self.assertEqual({1:1}.keys() | {2}, {1, 2}) self.assertEqual({2} | {1:1}.keys(), {1, 2}) # And a few for .items() self.assertTrue({1:1}.items() == {(1,1)}) self.assertTrue({(1,1)} == {1:1}.items()) self.assertEqual({1:1}.items() | {2}, {(1,1), 2}) self.assertEqual({2} | {1:1}.items(), {(1,1), 2}) def test_missing(self): # Make sure dict doesn't have a __missing__ method self.assertFalse(hasattr(dict, "__missing__")) self.assertFalse(hasattr({}, "__missing__")) # Test several cases: # (D) subclass defines __missing__ method returning a value # (E) subclass defines __missing__ method raising RuntimeError # (F) subclass sets __missing__ instance variable (no effect) # (G) subclass doesn't define __missing__ at a all class D(dict): def __missing__(self, key): return 42 d = D({1: 2, 3: 4}) self.assertEqual(d[1], 2) self.assertEqual(d[3], 4) self.assertNotIn(2, d) self.assertNotIn(2, d.keys()) self.assertEqual(d[2], 42) class E(dict): def __missing__(self, key): raise RuntimeError(key) e = E() with self.assertRaises(RuntimeError) as c: e[42] self.assertEqual(c.exception.args, (42,)) class F(dict): def __init__(self): # An instance variable __missing__ should have no effect self.__missing__ = lambda key: None f = F() with self.assertRaises(KeyError) as c: f[42] self.assertEqual(c.exception.args, (42,)) class G(dict): pass g = G() with self.assertRaises(KeyError) as c: g[42] self.assertEqual(c.exception.args, (42,)) def test_tuple_keyerror(self): # SF #1576657 d = {} with self.assertRaises(KeyError) as c: d[(1,)] self.assertEqual(c.exception.args, ((1,),)) def test_bad_key(self): # Dictionary lookups should fail if __eq__() raises an exception. class CustomException(Exception): pass class BadDictKey: def __hash__(self): return hash(self.__class__) def __eq__(self, other): if isinstance(other, self.__class__): raise CustomException return other d = {} x1 = BadDictKey() x2 = BadDictKey() d[x1] = 1 for stmt in ['d[x2] = 2', 'z = d[x2]', 'x2 in d', 'd.get(x2)', 'd.setdefault(x2, 42)', 'd.pop(x2)', 'd.update({x2: 2})']: with self.assertRaises(CustomException): exec(stmt, locals()) def test_resize1(self): # Dict resizing bug, found by Jack Jansen in 2.2 CVS development. # This version got an assert failure in debug build, infinite loop in # release build. Unfortunately, provoking this kind of stuff requires # a mix of inserts and deletes hitting exactly the right hash codes in # exactly the right order, and I can't think of a randomized approach # that would be *likely* to hit a failing case in reasonable time. d = {} for i in range(5): d[i] = i for i in range(5): del d[i] for i in range(5, 9): # i==8 was the problem d[i] = i def test_resize2(self): # Another dict resizing bug (SF bug #1456209). # This caused Segmentation faults or Illegal instructions. class X(object): def __hash__(self): return 5 def __eq__(self, other): if resizing: d.clear() return False d = {} resizing = False d[X()] = 1 d[X()] = 2 d[X()] = 3 d[X()] = 4 d[X()] = 5 # now trigger a resize resizing = True d[9] = 6 def test_empty_presized_dict_in_freelist(self): # Bug #3537: if an empty but presized dict with a size larger # than 7 was in the freelist, it triggered an assertion failure with self.assertRaises(ZeroDivisionError): d = {'a': 1 // 0, 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'g': None, 'h': None} d = {} def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for dictiter and # dictview objects. class C(object): pass views = (dict.items, dict.values, dict.keys) for v in views: obj = C() ref = weakref.ref(obj) container = {obj: 1} obj.v = v(container) obj.x = iter(obj.v) del obj, container gc.collect() self.assertIs(ref(), None, "Cycle was not collected") def _not_tracked(self, t): # Nested containers can take several collections to untrack gc.collect() gc.collect() self.assertFalse(gc.is_tracked(t), t) def _tracked(self, t): self.assertTrue(gc.is_tracked(t), t) gc.collect() gc.collect() self.assertTrue(gc.is_tracked(t), t) @support.cpython_only def test_track_literals(self): # Test GC-optimization of dict literals x, y, z, w = 1.5, "a", (1, None), [] self._not_tracked({}) self._not_tracked({x:(), y:x, z:1}) self._not_tracked({1: "a", "b": 2}) self._not_tracked({1: 2, (None, True, False, ()): int}) self._not_tracked({1: object()}) # Dicts with mutable elements are always tracked, even if those # elements are not tracked right now. self._tracked({1: []}) self._tracked({1: ([],)}) self._tracked({1: {}}) self._tracked({1: set()}) @support.cpython_only def test_track_dynamic(self): # Test GC-optimization of dynamically-created dicts class MyObject(object): pass x, y, z, w, o = 1.5, "a", (1, object()), [], MyObject() d = dict() self._not_tracked(d) d[1] = "a" self._not_tracked(d) d[y] = 2 self._not_tracked(d) d[z] = 3 self._not_tracked(d) self._not_tracked(d.copy()) d[4] = w self._tracked(d) self._tracked(d.copy()) d[4] = None self._not_tracked(d) self._not_tracked(d.copy()) # dd isn't tracked right now, but it may mutate and therefore d # which contains it must be tracked. d = dict() dd = dict() d[1] = dd self._not_tracked(dd) self._tracked(d) dd[1] = d self._tracked(dd) d = dict.fromkeys([x, y, z]) self._not_tracked(d) dd = dict() dd.update(d) self._not_tracked(dd) d = dict.fromkeys([x, y, z, o]) self._tracked(d) dd = dict() dd.update(d) self._tracked(dd) d = dict(x=x, y=y, z=z) self._not_tracked(d) d = dict(x=x, y=y, z=z, w=w) self._tracked(d) d = dict() d.update(x=x, y=y, z=z) self._not_tracked(d) d.update(w=w) self._tracked(d) d = dict([(x, y), (z, 1)]) self._not_tracked(d) d = dict([(x, y), (z, w)]) self._tracked(d) d = dict() d.update([(x, y), (z, 1)]) self._not_tracked(d) d.update([(x, y), (z, w)]) self._tracked(d) @support.cpython_only def test_track_subtypes(self): # Dict subtypes are always tracked class MyDict(dict): pass self._tracked(MyDict()) def test_iterator_pickling(self): data = {1:"a", 2:"b", 3:"c"} it = iter(data) d = pickle.dumps(it) it = pickle.loads(d) self.assertEqual(sorted(it), sorted(data)) it = pickle.loads(d) try: drop = next(it) except StopIteration: return d = pickle.dumps(it) it = pickle.loads(d) del data[drop] self.assertEqual(sorted(it), sorted(data)) def test_itemiterator_pickling(self): data = {1:"a", 2:"b", 3:"c"} # dictviews aren't picklable, only their iterators itorg = iter(data.items()) d = pickle.dumps(itorg) it = pickle.loads(d) # note that the type of type of the unpickled iterator # is not necessarily the same as the original. It is # merely an object supporting the iterator protocol, yielding # the same objects as the original one. # self.assertEqual(type(itorg), type(it)) #self.assertTrue(isinstance(it, collections.abc.Iterator)) self.assertEqual(dict(it), data) it = pickle.loads(d) drop = next(it) d = pickle.dumps(it) it = pickle.loads(d) del data[drop[0]] self.assertEqual(dict(it), data) def test_valuesiterator_pickling(self): data = {1:"a", 2:"b", 3:"c"} # data.values() isn't picklable, only its iterator it = iter(data.values()) d = pickle.dumps(it) it = pickle.loads(d) self.assertEqual(sorted(list(it)), sorted(list(data.values()))) it = pickle.loads(d) drop = next(it) d = pickle.dumps(it) it = pickle.loads(d) values = list(it) + [drop] self.assertEqual(sorted(values), sorted(list(data.values()))) def test_instance_dict_getattr_str_subclass(self): class Foo: def __init__(self, msg): self.msg = msg f = Foo('123') class _str(str): pass self.assertEqual(f.msg, getattr(f, _str('msg'))) self.assertEqual(f.msg, f.__dict__[_str('msg')]) def test_object_set_item_single_instance_non_str_key(self): class Foo: pass f = Foo() f.__dict__[1] = 1 f.a = 'a' self.assertEqual(f.__dict__, {1:1, 'a':'a'}) from test import mapping_tests class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = dict class Dict(dict): pass class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = Dict def test_main(): support.run_unittest( DictTest, GeneralMappingTests, SubclassMappingTests, ) if __name__ == "__main__": test_main()
bsd-3-clause
mattjmorrison/logilab-common-clone
optparser.py
25
3344
# -*- coding: utf-8 -*- # copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of logilab-common. # # logilab-common is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) any # later version. # # logilab-common is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License along # with logilab-common. If not, see <http://www.gnu.org/licenses/>. """Extend OptionParser with commands. Example: >>> parser = OptionParser() >>> parser.usage = '%prog COMMAND [options] <arg> ...' >>> parser.add_command('build', 'mymod.build') >>> parser.add_command('clean', run_clean, add_opt_clean) >>> run, options, args = parser.parse_command(sys.argv[1:]) >>> return run(options, args[1:]) With mymod.build that defines two functions run and add_options """ __docformat__ = "restructuredtext en" from warnings import warn warn('lgc.optparser module is deprecated, use lgc.clcommands instead', DeprecationWarning, stacklevel=2) import sys import optparse class OptionParser(optparse.OptionParser): def __init__(self, *args, **kwargs): optparse.OptionParser.__init__(self, *args, **kwargs) self._commands = {} self.min_args, self.max_args = 0, 1 def add_command(self, name, mod_or_funcs, help=''): """name of the command, name of module or tuple of functions (run, add_options) """ assert isinstance(mod_or_funcs, str) or isinstance(mod_or_funcs, tuple), \ "mod_or_funcs has to be a module name or a tuple of functions" self._commands[name] = (mod_or_funcs, help) def print_main_help(self): optparse.OptionParser.print_help(self) print '\ncommands:' for cmdname, (_, help) in self._commands.items(): print '% 10s - %s' % (cmdname, help) def parse_command(self, args): if len(args) == 0: self.print_main_help() sys.exit(1) cmd = args[0] args = args[1:] if cmd not in self._commands: if cmd in ('-h', '--help'): self.print_main_help() sys.exit(0) elif self.version is not None and cmd == "--version": self.print_version() sys.exit(0) self.error('unknown command') self.prog = '%s %s' % (self.prog, cmd) mod_or_f, help = self._commands[cmd] # optparse inserts self.description between usage and options help self.description = help if isinstance(mod_or_f, str): exec 'from %s import run, add_options' % mod_or_f else: run, add_options = mod_or_f add_options(self) (options, args) = self.parse_args(args) if not (self.min_args <= len(args) <= self.max_args): self.error('incorrect number of arguments') return run, options, args
gpl-2.0
gaddman/ansible
test/units/modules/network/onyx/test_onyx_l3_interface.py
68
4375
# # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.onyx import onyx_l3_interface from units.modules.utils import set_module_args from .onyx_module import TestOnyxModule, load_fixture class TestOnyxL3InterfaceModule(TestOnyxModule): module = onyx_l3_interface def setUp(self): super(TestOnyxL3InterfaceModule, self).setUp() self.mock_get_config = patch.object( onyx_l3_interface.OnyxL3InterfaceModule, "_get_interfaces_config") self.get_config = self.mock_get_config.start() self.mock_load_config = patch( 'ansible.module_utils.network.onyx.onyx.load_config') self.load_config = self.mock_load_config.start() self.mock_get_version = patch.object( onyx_l3_interface.OnyxL3InterfaceModule, "_get_os_version") self.get_version = self.mock_get_version.start() def tearDown(self): super(TestOnyxL3InterfaceModule, self).tearDown() self.mock_get_config.stop() self.mock_load_config.stop() def _execute_module(self, failed=False, changed=False, commands=None, sort=True): if failed: result = self.failed() self.assertTrue(result['failed'], result) else: result = self.changed(changed) self.assertEqual(result['changed'], changed, result) if commands is not None: commands_res = result.get('commands') if sort: self.assertEqual(sorted(commands), sorted(commands_res), commands_res) else: self.assertEqual(commands, commands_res, commands_res) return result def load_fixture(self, config_file): self.get_config.return_value = load_fixture(config_file) self.load_config.return_value = None self.get_version.return_value = "3.6.5000" def load_eth_ifc_fixture(self): config_file = 'onyx_l3_interface_show.cfg' self.load_fixture(config_file) def load_vlan_ifc_fixture(self): config_file = 'onyx_l3_vlan_interface_show.cfg' self.load_fixture(config_file) def test_vlan_ifc_no_change(self): set_module_args(dict(name='Vlan 1002', state='present', ipv4='172.3.12.4/24')) self.load_vlan_ifc_fixture() self._execute_module(changed=False) def test_vlan_ifc_remove(self): set_module_args(dict(name='Vlan 1002', state='absent')) commands = ['interface vlan 1002 no ip address'] self.load_vlan_ifc_fixture() self._execute_module(changed=True, commands=commands) def test_vlan_ifc_update(self): set_module_args(dict(name='Vlan 1002', state='present', ipv4='172.3.13.4/24')) commands = ['interface vlan 1002 ip address 172.3.13.4/24'] self.load_vlan_ifc_fixture() self._execute_module(changed=True, commands=commands) def test_eth_ifc_no_change(self): set_module_args(dict(name='Eth1/5', state='present', ipv4='172.3.12.4/24')) self.load_eth_ifc_fixture() self._execute_module(changed=False) def test_eth_ifc_remove(self): set_module_args(dict(name='Eth1/5', state='absent')) commands = ['interface ethernet 1/5 no ip address'] self.load_eth_ifc_fixture() self._execute_module(changed=True, commands=commands) def test_eth_ifc_update(self): set_module_args(dict(name='Eth1/5', state='present', ipv4='172.3.13.4/24')) commands = ['interface ethernet 1/5 ip address 172.3.13.4/24'] self.load_eth_ifc_fixture() self._execute_module(changed=True, commands=commands) def test_eth_ifc_add_ip(self): set_module_args(dict(name='Eth1/6', state='present', ipv4='172.3.14.4/24')) commands = ['interface ethernet 1/6 no switchport force', 'interface ethernet 1/6 ip address 172.3.14.4/24'] self.load_eth_ifc_fixture() self._execute_module(changed=True, commands=commands)
gpl-3.0
navodissa/python-flask
flask/lib/python2.7/site-packages/setuptools/tests/test_resources.py
345
23973
#!/usr/bin/python # -*- coding: utf-8 -*- # NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove import os import sys import tempfile import shutil from unittest import TestCase import pkg_resources from pkg_resources import (parse_requirements, VersionConflict, parse_version, Distribution, EntryPoint, Requirement, safe_version, safe_name, WorkingSet) from setuptools.command.easy_install import (get_script_header, is_sh, nt_quote_arg) from setuptools.compat import StringIO, iteritems try: frozenset except NameError: from sets import ImmutableSet as frozenset def safe_repr(obj, short=False): """ copied from Python2.7""" try: result = repr(obj) except Exception: result = object.__repr__(obj) if not short or len(result) < pkg_resources._MAX_LENGTH: return result return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' class Metadata(pkg_resources.EmptyProvider): """Mock object to return metadata as if from an on-disk distribution""" def __init__(self,*pairs): self.metadata = dict(pairs) def has_metadata(self,name): return name in self.metadata def get_metadata(self,name): return self.metadata[name] def get_metadata_lines(self,name): return pkg_resources.yield_lines(self.get_metadata(name)) dist_from_fn = pkg_resources.Distribution.from_filename class DistroTests(TestCase): def testCollection(self): # empty path should produce no distributions ad = pkg_resources.Environment([], platform=None, python=None) self.assertEqual(list(ad), []) self.assertEqual(ad['FooPkg'],[]) ad.add(dist_from_fn("FooPkg-1.3_1.egg")) ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) # Name is in there now self.assertTrue(ad['FooPkg']) # But only 1 package self.assertEqual(list(ad), ['foopkg']) # Distributions sort by version self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] ) # Removing a distribution leaves sequence alone ad.remove(ad['FooPkg'][1]) self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] ) # And inserting adds them in order ad.add(dist_from_fn("FooPkg-1.9.egg")) self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] ) ws = WorkingSet([]) foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") req, = parse_requirements("FooPkg>=1.3") # Nominal case: no distros on path, should yield all applicable self.assertEqual(ad.best_match(req,ws).version, '1.9') # If a matching distro is already installed, should return only that ws.add(foo14) self.assertEqual(ad.best_match(req,ws).version, '1.4') # If the first matching distro is unsuitable, it's a version conflict ws = WorkingSet([]) ws.add(foo12) ws.add(foo14) self.assertRaises(VersionConflict, ad.best_match, req, ws) # If more than one match on the path, the first one takes precedence ws = WorkingSet([]) ws.add(foo14) ws.add(foo12) ws.add(foo14) self.assertEqual(ad.best_match(req,ws).version, '1.4') def checkFooPkg(self,d): self.assertEqual(d.project_name, "FooPkg") self.assertEqual(d.key, "foopkg") self.assertEqual(d.version, "1.3-1") self.assertEqual(d.py_version, "2.4") self.assertEqual(d.platform, "win32") self.assertEqual(d.parsed_version, parse_version("1.3-1")) def testDistroBasics(self): d = Distribution( "/some/path", project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" ) self.checkFooPkg(d) d = Distribution("/some/path") self.assertEqual(d.py_version, sys.version[:3]) self.assertEqual(d.platform, None) def testDistroParse(self): d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") self.checkFooPkg(d) d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") self.checkFooPkg(d) def testDistroMetadata(self): d = Distribution( "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", metadata = Metadata( ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") ) ) self.checkFooPkg(d) def distRequires(self, txt): return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) def checkRequires(self, dist, txt, extras=()): self.assertEqual( list(dist.requires(extras)), list(parse_requirements(txt)) ) def testDistroDependsSimple(self): for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": self.checkRequires(self.distRequires(v), v) def testResolve(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Resolving no requirements -> nothing to install self.assertEqual(list(ws.resolve([],ad)), []) # Request something not in the collection -> DistributionNotFound self.assertRaises( pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad ) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.egg", metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) ) ad.add(Foo) ad.add(Distribution.from_filename("Foo-0.9.egg")) # Request thing(s) that are available -> list to activate for i in range(3): targets = list(ws.resolve(parse_requirements("Foo"), ad)) self.assertEqual(targets, [Foo]) list(map(ws.add,targets)) self.assertRaises(VersionConflict, ws.resolve, parse_requirements("Foo==0.9"), ad) ws = WorkingSet([]) # reset # Request an extra that causes an unresolved dependency for "Baz" self.assertRaises( pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad ) Baz = Distribution.from_filename( "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) ) ad.add(Baz) # Activation list now includes resolved dependency self.assertEqual( list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] ) # Requests for conflicting versions produce VersionConflict self.assertRaises(VersionConflict, ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) def testDistroDependsOptions(self): d = self.distRequires(""" Twisted>=1.5 [docgen] ZConfig>=2.0 docutils>=0.3 [fastcgi] fcgiapp>=0.1""") self.checkRequires(d,"Twisted>=1.5") self.checkRequires( d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] ) self.checkRequires( d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] ) self.checkRequires( d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), ["docgen","fastcgi"] ) self.checkRequires( d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), ["fastcgi", "docgen"] ) self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) class EntryPointTests(TestCase): def assertfields(self, ep): self.assertEqual(ep.name,"foo") self.assertEqual(ep.module_name,"setuptools.tests.test_resources") self.assertEqual(ep.attrs, ("EntryPointTests",)) self.assertEqual(ep.extras, ("x",)) self.assertTrue(ep.load() is EntryPointTests) self.assertEqual( str(ep), "foo = setuptools.tests.test_resources:EntryPointTests [x]" ) def setUp(self): self.dist = Distribution.from_filename( "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) def testBasics(self): ep = EntryPoint( "foo", "setuptools.tests.test_resources", ["EntryPointTests"], ["x"], self.dist ) self.assertfields(ep) def testParse(self): s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" ep = EntryPoint.parse(s, self.dist) self.assertfields(ep) ep = EntryPoint.parse("bar baz= spammity[PING]") self.assertEqual(ep.name,"bar baz") self.assertEqual(ep.module_name,"spammity") self.assertEqual(ep.attrs, ()) self.assertEqual(ep.extras, ("ping",)) ep = EntryPoint.parse(" fizzly = wocka:foo") self.assertEqual(ep.name,"fizzly") self.assertEqual(ep.module_name,"wocka") self.assertEqual(ep.attrs, ("foo",)) self.assertEqual(ep.extras, ()) def testRejects(self): for ep in [ "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", ]: try: EntryPoint.parse(ep) except ValueError: pass else: raise AssertionError("Should've been bad", ep) def checkSubMap(self, m): self.assertEqual(len(m), len(self.submap_expect)) for key, ep in iteritems(self.submap_expect): self.assertEqual(repr(m.get(key)), repr(ep)) submap_expect = dict( feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), feature3=EntryPoint('feature3', 'this.module', extras=['something']) ) submap_str = """ # define features for blah blah feature1 = somemodule:somefunction feature2 = another.module:SomeClass [extra1,extra2] feature3 = this.module [something] """ def testParseList(self): self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") self.assertRaises(ValueError, EntryPoint.parse_group, "x", ["foo=baz", "foo=bar"]) def testParseMap(self): m = EntryPoint.parse_map({'xyz':self.submap_str}) self.checkSubMap(m['xyz']) self.assertEqual(list(m.keys()),['xyz']) m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) self.checkSubMap(m['xyz']) self.assertEqual(list(m.keys()),['xyz']) self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) class RequirementsTests(TestCase): def testBasics(self): r = Requirement.parse("Twisted>=1.2") self.assertEqual(str(r),"Twisted>=1.2") self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) def testOrdering(self): r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) self.assertEqual(r1,r2) self.assertEqual(str(r1),str(r2)) self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") def testBasicContains(self): r = Requirement("Twisted", [('>=','1.2')], ()) foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") twist11 = Distribution.from_filename("Twisted-1.1.egg") twist12 = Distribution.from_filename("Twisted-1.2.egg") self.assertTrue(parse_version('1.2') in r) self.assertTrue(parse_version('1.1') not in r) self.assertTrue('1.2' in r) self.assertTrue('1.1' not in r) self.assertTrue(foo_dist not in r) self.assertTrue(twist11 not in r) self.assertTrue(twist12 in r) def testAdvancedContains(self): r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): self.assertTrue(v in r, (v,r)) for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): self.assertTrue(v not in r, (v,r)) def testOptionsAndHashing(self): r1 = Requirement.parse("Twisted[foo,bar]>=1.2") r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") self.assertEqual(r1,r2) self.assertEqual(r1,r3) self.assertEqual(r1.extras, ("foo","bar")) self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized self.assertEqual(hash(r1), hash(r2)) self.assertEqual( hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), frozenset(["foo","bar"]))) ) def testVersionEquality(self): r1 = Requirement.parse("foo==0.3a2") r2 = Requirement.parse("foo!=0.3a4") d = Distribution.from_filename self.assertTrue(d("foo-0.3a4.egg") not in r1) self.assertTrue(d("foo-0.3a1.egg") not in r1) self.assertTrue(d("foo-0.3a4.egg") not in r2) self.assertTrue(d("foo-0.3a2.egg") in r1) self.assertTrue(d("foo-0.3a2.egg") in r2) self.assertTrue(d("foo-0.3a3.egg") in r2) self.assertTrue(d("foo-0.3a5.egg") in r2) def testSetuptoolsProjectName(self): """ The setuptools project should implement the setuptools package. """ self.assertEqual( Requirement.parse('setuptools').project_name, 'setuptools') # setuptools 0.7 and higher means setuptools. self.assertEqual( Requirement.parse('setuptools == 0.7').project_name, 'setuptools') self.assertEqual( Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') self.assertEqual( Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') class ParseTests(TestCase): def testEmptyParse(self): self.assertEqual(list(parse_requirements('')), []) def testYielding(self): for inp,out in [ ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), (['x\n\n','y'], ['x','y']), ]: self.assertEqual(list(pkg_resources.yield_lines(inp)),out) def testSplitting(self): sample = """ x [Y] z a [b ] # foo c [ d] [q] v """ self.assertEqual(list(pkg_resources.split_sections(sample)), [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] ) self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) def testSafeName(self): self.assertEqual(safe_name("adns-python"), "adns-python") self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") self.assertNotEqual(safe_name("peak.web"), "peak-web") def testSafeVersion(self): self.assertEqual(safe_version("1.2-1"), "1.2-1") self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") self.assertEqual(safe_version("peak.web"), "peak.web") def testSimpleRequirements(self): self.assertEqual( list(parse_requirements('Twis-Ted>=1.2-1')), [Requirement('Twis-Ted',[('>=','1.2-1')], ())] ) self.assertEqual( list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] ) self.assertEqual( Requirement.parse("FooBar==1.99a3"), Requirement("FooBar", [('==','1.99a3')], ()) ) self.assertRaises(ValueError,Requirement.parse,">=2.3") self.assertRaises(ValueError,Requirement.parse,"x\\") self.assertRaises(ValueError,Requirement.parse,"x==2 q") self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") self.assertRaises(ValueError,Requirement.parse,"#") def testVersionEquality(self): def c(s1,s2): p1, p2 = parse_version(s1),parse_version(s2) self.assertEqual(p1,p2, (s1,s2,p1,p2)) c('1.2-rc1', '1.2rc1') c('0.4', '0.4.0') c('0.4.0.0', '0.4.0') c('0.4.0-0', '0.4-0') c('0pl1', '0.0pl1') c('0pre1', '0.0c1') c('0.0.0preview1', '0c1') c('0.0c1', '0-rc1') c('1.2a1', '1.2.a.1') c('1.2...a', '1.2a') def testVersionOrdering(self): def c(s1,s2): p1, p2 = parse_version(s1),parse_version(s2) self.assertTrue(p1<p2, (s1,s2,p1,p2)) c('2.1','2.1.1') c('2a1','2b0') c('2a1','2.1') c('2.3a1', '2.3') c('2.1-1', '2.1-2') c('2.1-1', '2.1.1') c('2.1', '2.1pl4') c('2.1a0-20040501', '2.1') c('1.1', '02.1') c('A56','B27') c('3.2', '3.2.pl0') c('3.2-1', '3.2pl1') c('3.2pl1', '3.2pl1-1') c('0.4', '4.0') c('0.0.4', '0.4.0') c('0pl1', '0.4pl1') c('2.1.0-rc1','2.1.0') c('2.1dev','2.1a0') torture =""" 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 0.77.2-1 0.77.1-1 0.77.0-1 """.split() for p,v1 in enumerate(torture): for v2 in torture[p+1:]: c(v2,v1) class ScriptHeaderTests(TestCase): non_ascii_exe = '/Users/José/bin/python' exe_with_spaces = r'C:\Program Files\Python33\python.exe' def test_get_script_header(self): if not sys.platform.startswith('java') or not is_sh(sys.executable): # This test is for non-Jython platforms expected = '#!%s\n' % nt_quote_arg(os.path.normpath(sys.executable)) self.assertEqual(get_script_header('#!/usr/local/bin/python'), expected) expected = '#!%s -x\n' % nt_quote_arg(os.path.normpath(sys.executable)) self.assertEqual(get_script_header('#!/usr/bin/python -x'), expected) self.assertEqual(get_script_header('#!/usr/bin/python', executable=self.non_ascii_exe), '#!%s -x\n' % self.non_ascii_exe) candidate = get_script_header('#!/usr/bin/python', executable=self.exe_with_spaces) self.assertEqual(candidate, '#!"%s"\n' % self.exe_with_spaces) def test_get_script_header_jython_workaround(self): # This test doesn't work with Python 3 in some locales if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE") in (None, "C", "POSIX")): return class java: class lang: class System: @staticmethod def getProperty(property): return "" sys.modules["java"] = java platform = sys.platform sys.platform = 'java1.5.0_13' stdout, stderr = sys.stdout, sys.stderr try: # A mock sys.executable that uses a shebang line (this file) exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') self.assertEqual( get_script_header('#!/usr/local/bin/python', executable=exe), '#!/usr/bin/env %s\n' % exe) # Ensure we generate what is basically a broken shebang line # when there's options, with a warning emitted sys.stdout = sys.stderr = StringIO() self.assertEqual(get_script_header('#!/usr/bin/python -x', executable=exe), '#!%s -x\n' % exe) self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) sys.stdout = sys.stderr = StringIO() self.assertEqual(get_script_header('#!/usr/bin/python', executable=self.non_ascii_exe), '#!%s -x\n' % self.non_ascii_exe) self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) finally: del sys.modules["java"] sys.platform = platform sys.stdout, sys.stderr = stdout, stderr class NamespaceTests(TestCase): def setUp(self): self._ns_pkgs = pkg_resources._namespace_packages.copy() self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) self._prev_sys_path = sys.path[:] sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) def tearDown(self): shutil.rmtree(self._tmpdir) pkg_resources._namespace_packages = self._ns_pkgs.copy() sys.path = self._prev_sys_path[:] def _assertIn(self, member, container): """ assertIn and assertTrue does not exist in Python2.3""" if member not in container: standardMsg = '%s not found in %s' % (safe_repr(member), safe_repr(container)) self.fail(self._formatMessage(msg, standardMsg)) def test_two_levels_deep(self): """ Test nested namespace packages Create namespace packages in the following tree : site-packages-1/pkg1/pkg2 site-packages-2/pkg1/pkg2 Check both are in the _namespace_packages dict and that their __path__ is correct """ sys.path.append(os.path.join(self._tmpdir, "site-pkgs2")) os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2")) os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")) ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" for site in ["site-pkgs", "site-pkgs2"]: pkg1_init = open(os.path.join(self._tmpdir, site, "pkg1", "__init__.py"), "w") pkg1_init.write(ns_str) pkg1_init.close() pkg2_init = open(os.path.join(self._tmpdir, site, "pkg1", "pkg2", "__init__.py"), "w") pkg2_init.write(ns_str) pkg2_init.close() import pkg1 self._assertIn("pkg1", pkg_resources._namespace_packages.keys()) try: import pkg1.pkg2 except ImportError: self.fail("Setuptools tried to import the parent namespace package") # check the _namespace_packages dict self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys()) self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"]) # check the __path__ attribute contains both paths self.assertEqual(pkg1.pkg2.__path__, [ os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"), os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")])
bsd-3-clause
csachs/openmicroscopy
components/tools/OmeroPy/test/unit/gatewaytest/test_argument_errors.py
13
2006
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2015 University of Dundee & Open Microscopy Environment. # All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ gateway tests - argument errors in gateway methods """ from omero.gateway import _BlitzGateway import pytest class TestArgumentErrors(object): @classmethod @pytest.fixture(autouse=True) def setup_class(cls, tmpdir, monkeypatch): ice_config = tmpdir / "ice.config" ice_config.write("omero.host=localhost\nomero.port=4064") monkeypatch.setenv("ICE_CONFIG", ice_config) cls.g = _BlitzGateway() def test_graphspec_with_plus(self): """ The graph_spec Name+Qualifier is no longer supported. """ with pytest.raises(AttributeError): self.g.deleteObjects("Image+Only", ["1"]) with pytest.raises(AttributeError): self.g.chgrpObjects("Image+Only", ["1"], 1L) @pytest.mark.parametrize("object_ids", ["1", [], None]) def test_bad_object_ids(self, object_ids): """ object_ids must be a non-zero length list """ with pytest.raises(AttributeError): self.g.deleteObjects("Image", object_ids) with pytest.raises(AttributeError): self.g.chgrpObjects("Image", object_ids, 1L)
gpl-2.0
zxsted/scipy
scipy/linalg/_expm_frechet.py
117
12182
"""Frechet derivative of the matrix exponential.""" from __future__ import division, print_function, absolute_import import numpy as np import scipy.linalg __all__ = ['expm_frechet', 'expm_cond'] def expm_frechet(A, E, method=None, compute_expm=True, check_finite=True): """ Frechet derivative of the matrix exponential of A in the direction E. Parameters ---------- A : (N, N) array_like Matrix of which to take the matrix exponential. E : (N, N) array_like Matrix direction in which to take the Frechet derivative. method : str, optional Choice of algorithm. Should be one of - `SPS` (default) - `blockEnlarge` compute_expm : bool, optional Whether to compute also `expm_A` in addition to `expm_frechet_AE`. Default is True. check_finite : bool, optional Whether to check that the input matrix contains only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- expm_A : ndarray Matrix exponential of A. expm_frechet_AE : ndarray Frechet derivative of the matrix exponential of A in the direction E. For ``compute_expm = False``, only `expm_frechet_AE` is returned. See also -------- expm : Compute the exponential of a matrix. Notes ----- This section describes the available implementations that can be selected by the `method` parameter. The default method is *SPS*. Method *blockEnlarge* is a naive algorithm. Method *SPS* is Scaling-Pade-Squaring [1]_. It is a sophisticated implementation which should take only about 3/8 as much time as the naive implementation. The asymptotics are the same. .. versionadded:: 0.13.0 References ---------- .. [1] Awad H. Al-Mohy and Nicholas J. Higham (2009) Computing the Frechet Derivative of the Matrix Exponential, with an application to Condition Number Estimation. SIAM Journal On Matrix Analysis and Applications., 30 (4). pp. 1639-1657. ISSN 1095-7162 Examples -------- >>> import scipy.linalg >>> A = np.random.randn(3, 3) >>> E = np.random.randn(3, 3) >>> expm_A, expm_frechet_AE = scipy.linalg.expm_frechet(A, E) >>> expm_A.shape, expm_frechet_AE.shape ((3, 3), (3, 3)) >>> import scipy.linalg >>> A = np.random.randn(3, 3) >>> E = np.random.randn(3, 3) >>> expm_A, expm_frechet_AE = scipy.linalg.expm_frechet(A, E) >>> M = np.zeros((6, 6)) >>> M[:3, :3] = A; M[:3, 3:] = E; M[3:, 3:] = A >>> expm_M = scipy.linalg.expm(M) >>> np.allclose(expm_A, expm_M[:3, :3]) True >>> np.allclose(expm_frechet_AE, expm_M[:3, 3:]) True """ if check_finite: A = np.asarray_chkfinite(A) E = np.asarray_chkfinite(E) else: A = np.asarray(A) E = np.asarray(E) if A.ndim != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected A to be a square matrix') if E.ndim != 2 or E.shape[0] != E.shape[1]: raise ValueError('expected E to be a square matrix') if A.shape != E.shape: raise ValueError('expected A and E to be the same shape') if method is None: method = 'SPS' if method == 'SPS': expm_A, expm_frechet_AE = expm_frechet_algo_64(A, E) elif method == 'blockEnlarge': expm_A, expm_frechet_AE = expm_frechet_block_enlarge(A, E) else: raise ValueError('Unknown implementation %s' % method) if compute_expm: return expm_A, expm_frechet_AE else: return expm_frechet_AE def expm_frechet_block_enlarge(A, E): """ This is a helper function, mostly for testing and profiling. Return expm(A), frechet(A, E) """ n = A.shape[0] M = np.vstack([ np.hstack([A, E]), np.hstack([np.zeros_like(A), A])]) expm_M = scipy.linalg.expm(M) return expm_M[:n, :n], expm_M[:n, n:] """ Maximal values ell_m of ||2**-s A|| such that the backward error bound does not exceed 2**-53. """ ell_table_61 = ( None, # 1 2.11e-8, 3.56e-4, 1.08e-2, 6.49e-2, 2.00e-1, 4.37e-1, 7.83e-1, 1.23e0, 1.78e0, 2.42e0, # 11 3.13e0, 3.90e0, 4.74e0, 5.63e0, 6.56e0, 7.52e0, 8.53e0, 9.56e0, 1.06e1, 1.17e1, ) # The b vectors and U and V are copypasted # from scipy.sparse.linalg.matfuncs.py. # M, Lu, Lv follow (6.11), (6.12), (6.13), (3.3) def _diff_pade3(A, E, ident): b = (120., 60., 12., 1.) A2 = A.dot(A) M2 = np.dot(A, E) + np.dot(E, A) U = A.dot(b[3]*A2 + b[1]*ident) V = b[2]*A2 + b[0]*ident Lu = A.dot(b[3]*M2) + E.dot(b[3]*A2 + b[1]*ident) Lv = b[2]*M2 return U, V, Lu, Lv def _diff_pade5(A, E, ident): b = (30240., 15120., 3360., 420., 30., 1.) A2 = A.dot(A) M2 = np.dot(A, E) + np.dot(E, A) A4 = np.dot(A2, A2) M4 = np.dot(A2, M2) + np.dot(M2, A2) U = A.dot(b[5]*A4 + b[3]*A2 + b[1]*ident) V = b[4]*A4 + b[2]*A2 + b[0]*ident Lu = (A.dot(b[5]*M4 + b[3]*M2) + E.dot(b[5]*A4 + b[3]*A2 + b[1]*ident)) Lv = b[4]*M4 + b[2]*M2 return U, V, Lu, Lv def _diff_pade7(A, E, ident): b = (17297280., 8648640., 1995840., 277200., 25200., 1512., 56., 1.) A2 = A.dot(A) M2 = np.dot(A, E) + np.dot(E, A) A4 = np.dot(A2, A2) M4 = np.dot(A2, M2) + np.dot(M2, A2) A6 = np.dot(A2, A4) M6 = np.dot(A4, M2) + np.dot(M4, A2) U = A.dot(b[7]*A6 + b[5]*A4 + b[3]*A2 + b[1]*ident) V = b[6]*A6 + b[4]*A4 + b[2]*A2 + b[0]*ident Lu = (A.dot(b[7]*M6 + b[5]*M4 + b[3]*M2) + E.dot(b[7]*A6 + b[5]*A4 + b[3]*A2 + b[1]*ident)) Lv = b[6]*M6 + b[4]*M4 + b[2]*M2 return U, V, Lu, Lv def _diff_pade9(A, E, ident): b = (17643225600., 8821612800., 2075673600., 302702400., 30270240., 2162160., 110880., 3960., 90., 1.) A2 = A.dot(A) M2 = np.dot(A, E) + np.dot(E, A) A4 = np.dot(A2, A2) M4 = np.dot(A2, M2) + np.dot(M2, A2) A6 = np.dot(A2, A4) M6 = np.dot(A4, M2) + np.dot(M4, A2) A8 = np.dot(A4, A4) M8 = np.dot(A4, M4) + np.dot(M4, A4) U = A.dot(b[9]*A8 + b[7]*A6 + b[5]*A4 + b[3]*A2 + b[1]*ident) V = b[8]*A8 + b[6]*A6 + b[4]*A4 + b[2]*A2 + b[0]*ident Lu = (A.dot(b[9]*M8 + b[7]*M6 + b[5]*M4 + b[3]*M2) + E.dot(b[9]*A8 + b[7]*A6 + b[5]*A4 + b[3]*A2 + b[1]*ident)) Lv = b[8]*M8 + b[6]*M6 + b[4]*M4 + b[2]*M2 return U, V, Lu, Lv def expm_frechet_algo_64(A, E): n = A.shape[0] s = None ident = np.identity(n) A_norm_1 = scipy.linalg.norm(A, 1) m_pade_pairs = ( (3, _diff_pade3), (5, _diff_pade5), (7, _diff_pade7), (9, _diff_pade9)) for m, pade in m_pade_pairs: if A_norm_1 <= ell_table_61[m]: U, V, Lu, Lv = pade(A, E, ident) s = 0 break if s is None: # scaling s = max(0, int(np.ceil(np.log2(A_norm_1 / ell_table_61[13])))) A = A * 2.0**-s E = E * 2.0**-s # pade order 13 A2 = np.dot(A, A) M2 = np.dot(A, E) + np.dot(E, A) A4 = np.dot(A2, A2) M4 = np.dot(A2, M2) + np.dot(M2, A2) A6 = np.dot(A2, A4) M6 = np.dot(A4, M2) + np.dot(M4, A2) b = (64764752532480000., 32382376266240000., 7771770303897600., 1187353796428800., 129060195264000., 10559470521600., 670442572800., 33522128640., 1323241920., 40840800., 960960., 16380., 182., 1.) W1 = b[13]*A6 + b[11]*A4 + b[9]*A2 W2 = b[7]*A6 + b[5]*A4 + b[3]*A2 + b[1]*ident Z1 = b[12]*A6 + b[10]*A4 + b[8]*A2 Z2 = b[6]*A6 + b[4]*A4 + b[2]*A2 + b[0]*ident W = np.dot(A6, W1) + W2 U = np.dot(A, W) V = np.dot(A6, Z1) + Z2 Lw1 = b[13]*M6 + b[11]*M4 + b[9]*M2 Lw2 = b[7]*M6 + b[5]*M4 + b[3]*M2 Lz1 = b[12]*M6 + b[10]*M4 + b[8]*M2 Lz2 = b[6]*M6 + b[4]*M4 + b[2]*M2 Lw = np.dot(A6, Lw1) + np.dot(M6, W1) + Lw2 Lu = np.dot(A, Lw) + np.dot(E, W) Lv = np.dot(A6, Lz1) + np.dot(M6, Z1) + Lz2 # factor once and solve twice lu_piv = scipy.linalg.lu_factor(-U + V) R = scipy.linalg.lu_solve(lu_piv, U + V) L = scipy.linalg.lu_solve(lu_piv, Lu + Lv + np.dot((Lu - Lv), R)) # squaring for k in range(s): L = np.dot(R, L) + np.dot(L, R) R = np.dot(R, R) return R, L def vec(M): """ Stack columns of M to construct a single vector. This is somewhat standard notation in linear algebra. Parameters ---------- M : 2d array_like Input matrix Returns ------- v : 1d ndarray Output vector """ return M.T.ravel() def expm_frechet_kronform(A, method=None, check_finite=True): """ Construct the Kronecker form of the Frechet derivative of expm. Parameters ---------- A : array_like with shape (N, N) Matrix to be expm'd. method : str, optional Extra keyword to be passed to expm_frechet. check_finite : bool, optional Whether to check that the input matrix contains only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- K : 2d ndarray with shape (N*N, N*N) Kronecker form of the Frechet derivative of the matrix exponential. Notes ----- This function is used to help compute the condition number of the matrix exponential. See also -------- expm : Compute a matrix exponential. expm_frechet : Compute the Frechet derivative of the matrix exponential. expm_cond : Compute the relative condition number of the matrix exponential in the Frobenius norm. """ if check_finite: A = np.asarray_chkfinite(A) else: A = np.asarray(A) if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected a square matrix') n = A.shape[0] ident = np.identity(n) cols = [] for i in range(n): for j in range(n): E = np.outer(ident[i], ident[j]) F = expm_frechet(A, E, method=method, compute_expm=False, check_finite=False) cols.append(vec(F)) return np.vstack(cols).T def expm_cond(A, check_finite=True): """ Relative condition number of the matrix exponential in the Frobenius norm. Parameters ---------- A : 2d array_like Square input matrix with shape (N, N). check_finite : bool, optional Whether to check that the input matrix contains only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- kappa : float The relative condition number of the matrix exponential in the Frobenius norm Notes ----- A faster estimate for the condition number in the 1-norm has been published but is not yet implemented in scipy. .. versionadded:: 0.14.0 See also -------- expm : Compute the exponential of a matrix. expm_frechet : Compute the Frechet derivative of the matrix exponential. """ if check_finite: A = np.asarray_chkfinite(A) else: A = np.asarray(A) if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected a square matrix') X = scipy.linalg.expm(A) K = expm_frechet_kronform(A, check_finite=False) # The following norm choices are deliberate. # The norms of A and X are Frobenius norms, # and the norm of K is the induced 2-norm. A_norm = scipy.linalg.norm(A, 'fro') X_norm = scipy.linalg.norm(X, 'fro') K_norm = scipy.linalg.norm(K, 2) kappa = (K_norm * A_norm) / X_norm return kappa
bsd-3-clause
carolineLe/miasm
example/asm/shellcode.py
3
3732
#! /usr/bin/env python2 from __future__ import print_function from argparse import ArgumentParser from pdb import pm from future.utils import viewitems from miasm.loader import pe_init from miasm.loader.strpatchwork import StrPatchwork from miasm.core import parse_asm, asmblock from miasm.analysis.machine import Machine from miasm.core.interval import interval from miasm.core.locationdb import LocationDB from miasm.core.utils import iterbytes, int_to_byte parser = ArgumentParser("Multi-arch (32 bits) assembler") parser.add_argument('architecture', help="architecture: " + ",".join(Machine.available_machine())) parser.add_argument("source", help="Source file to assemble") parser.add_argument("output", help="Output file") parser.add_argument("--PE", help="Create a PE with a few imports", action="store_true") parser.add_argument("-e", "--encrypt", help="Encrypt the code between <label_start> <label_stop>", nargs=2) args = parser.parse_args() # Get architecture-dependent parameters machine = Machine(args.architecture) try: attrib = machine.dis_engine.attrib size = int(attrib) except AttributeError: attrib = None size = 32 except ValueError: size = 32 reg_and_id = dict(machine.mn.regs.all_regs_ids_byname) base_expr = machine.base_expr dst_interval = None # Output format if args.PE: pe = pe_init.PE(wsize=size) s_text = pe.SHList.add_section(name="text", addr=0x1000, rawsize=0x1000) s_iat = pe.SHList.add_section(name="iat", rawsize=0x100) new_dll = [ ( { "name": "USER32.dll", "firstthunk": s_iat.addr }, [ "MessageBoxA" ] ) ] pe.DirImport.add_dlldesc(new_dll) s_myimp = pe.SHList.add_section(name="myimp", rawsize=len(pe.DirImport)) pe.DirImport.set_rva(s_myimp.addr) pe.Opthdr.AddressOfEntryPoint = s_text.addr addr_main = pe.rva2virt(s_text.addr) virt = pe.virt output = pe dst_interval = interval( [ (pe.rva2virt(s_text.addr), pe.rva2virt(s_text.addr + s_text.size)) ] ) else: st = StrPatchwork() addr_main = 0 virt = st output = st # Get and parse the source code with open(args.source) as fstream: source = fstream.read() loc_db = LocationDB() asmcfg, loc_db = parse_asm.parse_txt(machine.mn, attrib, source, loc_db) # Fix shellcode addrs loc_db.set_location_offset(loc_db.get_name_location("main"), addr_main) if args.PE: loc_db.set_location_offset( loc_db.get_or_create_name_location("MessageBoxA"), pe.DirImport.get_funcvirt( 'USER32.dll', 'MessageBoxA' ) ) # Print and graph firsts blocks before patching it for block in asmcfg.blocks: print(block) open("graph.dot", "w").write(asmcfg.dot()) # Apply patches patches = asmblock.asm_resolve_final( machine.mn, asmcfg, loc_db, dst_interval ) if args.encrypt: # Encrypt code loc_start = loc_db.get_or_create_name_location(args.encrypt[0]) loc_stop = loc_db.get_or_create_name_location(args.encrypt[1]) ad_start = loc_db.get_location_offset(loc_start) ad_stop = loc_db.get_location_offset(loc_stop) for ad, val in list(viewitems(patches)): if ad_start <= ad < ad_stop: patches[ad] = b"".join(int_to_byte(ord(x) ^ 0x42) for x in iterbytes(val)) print(patches) if isinstance(virt, StrPatchwork): for offset, raw in viewitems(patches): virt[offset] = raw else: for offset, raw in viewitems(patches): virt.set(offset, raw) # Produce output open(args.output, 'wb').write(bytes(output))
gpl-2.0
bsipocz/ginga
examples/gtk/example2_gtk.py
1
7364
#! /usr/bin/env python # # example2_gtk.py -- Simple, configurable FITS viewer. # # Eric Jeschke (eric@naoj.org) # # Copyright (c) Eric R. Jeschke. All rights reserved. # This is open-source software licensed under a BSD license. # Please see the file LICENSE.txt for details. # from __future__ import print_function import sys, os import logging, logging.handlers from ginga import AstroImage from ginga.gtkw import FileSelection, GtkHelp from ginga.gtkw.ImageViewCanvasGtk import ImageViewCanvas from ginga import colors import gtk STD_FORMAT = '%(asctime)s | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s' class FitsViewer(object): def __init__(self, logger): self.logger = logger self.drawcolors = colors.get_colors() self.select = FileSelection.FileSelection() root = gtk.Window(gtk.WINDOW_TOPLEVEL) root.set_title("ImageViewCanvas Example") root.set_border_width(2) root.connect("delete_event", lambda w, e: quit(w)) self.root = root vbox = gtk.VBox(spacing=2) fi = ImageViewCanvas(logger) fi.enable_autocuts('on') fi.set_autocut_params('zscale') fi.enable_autozoom('on') fi.enable_draw(True) fi.set_drawtype('ruler') fi.set_drawcolor('blue') fi.set_callback('drag-drop', self.drop_file) fi.set_callback('none-move', self.motion) fi.set_bg(0.2, 0.2, 0.2) fi.ui_setActive(True) self.fitsimage = fi bd = fi.get_bindings() bd.enable_pan(True) bd.enable_zoom(True) bd.enable_cuts(True) bd.enable_flip(True) bd.enable_rotate(True) bd.enable_cmap(True) w = fi.get_widget() w.set_size_request(512, 512) vbox.pack_start(w, fill=True, expand=True) self.readout = gtk.Label("") vbox.pack_start(self.readout, fill=True, expand=False) hbox = gtk.HBox(spacing=5) wdrawtype = GtkHelp.combo_box_new_text() self.drawtypes = fi.get_drawtypes() index = 0 for name in self.drawtypes: wdrawtype.insert_text(index, name) index += 1 index = self.drawtypes.index('ruler') wdrawtype.set_active(index) wdrawtype.connect('changed', self.set_drawparams) self.wdrawtype = wdrawtype wdrawcolor = GtkHelp.combo_box_new_text() index = 0 for name in self.drawcolors: wdrawcolor.insert_text(index, name) index += 1 index = self.drawcolors.index('blue') wdrawcolor.set_active(index) wdrawcolor.connect('changed', self.set_drawparams) self.wdrawcolor = wdrawcolor wclear = gtk.Button("Clear Canvas") wclear.connect('clicked', self.clear_canvas) wopen = gtk.Button("Open File") wopen.connect('clicked', self.open_file) wquit = gtk.Button("Quit") wquit.connect('clicked', quit) for w in (wquit, wclear, wdrawcolor, wdrawtype, wopen): hbox.pack_end(w, fill=False, expand=False) vbox.pack_start(hbox, fill=False, expand=False) root.add(vbox) def get_widget(self): return self.root def set_drawparams(self, w): index = self.wdrawtype.get_active() kind = self.drawtypes[index] index = self.wdrawcolor.get_active() params = { 'color': self.drawcolors[index], } self.fitsimage.set_drawtype(kind, **params) def clear_canvas(self, w): self.fitsimage.deleteAllObjects() def load_file(self, filepath): image = AstroImage.AstroImage(logger=self.logger) image.load_file(filepath) self.fitsimage.set_image(image) self.root.set_title(filepath) def open_file(self, w): self.select.popup("Open FITS file", self.load_file) def drop_file(self, fitsimage, paths): fileName = paths[0] self.load_file(fileName) def motion(self, fitsimage, button, data_x, data_y): # Get the value under the data coordinates try: #value = fitsimage.get_data(data_x, data_y) # We report the value across the pixel, even though the coords # change halfway across the pixel value = fitsimage.get_data(int(data_x+0.5), int(data_y+0.5)) except Exception: value = None fits_x, fits_y = data_x + 1, data_y + 1 # Calculate WCS RA try: # NOTE: image function operates on DATA space coords image = fitsimage.get_image() if image == None: # No image loaded return ra_txt, dec_txt = image.pixtoradec(fits_x, fits_y, format='str', coords='fits') except Exception as e: self.logger.warn("Bad coordinate conversion: %s" % ( str(e))) ra_txt = 'BAD WCS' dec_txt = 'BAD WCS' text = "RA: %s DEC: %s X: %.2f Y: %.2f Value: %s" % ( ra_txt, dec_txt, fits_x, fits_y, value) self.readout.set_text(text) def quit(self, w): gtk.main_quit() return True def main(options, args): logger = logging.getLogger("example2") logger.setLevel(options.loglevel) fmt = logging.Formatter(STD_FORMAT) if options.logfile: fileHdlr = logging.handlers.RotatingFileHandler(options.logfile) fileHdlr.setLevel(options.loglevel) fileHdlr.setFormatter(fmt) logger.addHandler(fileHdlr) if options.logstderr: stderrHdlr = logging.StreamHandler() stderrHdlr.setLevel(options.loglevel) stderrHdlr.setFormatter(fmt) logger.addHandler(stderrHdlr) fv = FitsViewer(logger) root = fv.get_widget() root.show_all() if len(args) > 0: fv.load_file(args[0]) gtk.main() if __name__ == "__main__": # Parse command line options with nifty optparse module from optparse import OptionParser usage = "usage: %prog [options] cmd [args]" optprs = OptionParser(usage=usage, version=('%%prog')) optprs.add_option("--debug", dest="debug", default=False, action="store_true", help="Enter the pdb debugger on main()") optprs.add_option("--log", dest="logfile", metavar="FILE", help="Write logging output to FILE") optprs.add_option("--loglevel", dest="loglevel", metavar="LEVEL", type='int', default=logging.INFO, help="Set logging level to LEVEL") optprs.add_option("--stderr", dest="logstderr", default=False, action="store_true", help="Copy logging also to stderr") optprs.add_option("--profile", dest="profile", action="store_true", default=False, help="Run the profiler on main()") (options, args) = optprs.parse_args(sys.argv[1:]) # Are we debugging this? if options.debug: import pdb pdb.run('main(options, args)') # Are we profiling this? elif options.profile: import profile print(("%s profile:" % sys.argv[0])) profile.run('main(options, args)') else: main(options, args) # END
bsd-3-clause
aioue/ansible
lib/ansible/modules/monitoring/rollbar_deployment.py
41
4131
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright 2014, Max Riveiro, <kavu13@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rollbar_deployment version_added: 1.6 author: "Max Riveiro (@kavu)" short_description: Notify Rollbar about app deployments description: - Notify Rollbar about app deployments (see https://rollbar.com/docs/deploys_other/) options: token: description: - Your project access token. required: true environment: description: - Name of the environment being deployed, e.g. 'production'. required: true revision: description: - Revision number/sha being deployed. required: true user: description: - User who deployed. required: false rollbar_user: description: - Rollbar username of the user who deployed. required: false comment: description: - Deploy comment (e.g. what is being deployed). required: false url: description: - Optional URL to submit the notification to. required: false default: 'https://api.rollbar.com/api/1/deploy/' validate_certs: description: - If C(no), SSL certificates for the target url will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: ['yes', 'no'] ''' EXAMPLES = ''' - rollbar_deployment: token: AAAAAA environment: staging user: ansible revision: '4.2' rollbar_user: admin comment: Test Deploy ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils.urls import fetch_url def main(): module = AnsibleModule( argument_spec=dict( token=dict(required=True), environment=dict(required=True), revision=dict(required=True), user=dict(required=False), rollbar_user=dict(required=False), comment=dict(required=False), url=dict( required=False, default='https://api.rollbar.com/api/1/deploy/' ), validate_certs=dict(default='yes', type='bool'), ), supports_check_mode=True ) if module.check_mode: module.exit_json(changed=True) params = dict( access_token=module.params['token'], environment=module.params['environment'], revision=module.params['revision'] ) if module.params['user']: params['local_username'] = module.params['user'] if module.params['rollbar_user']: params['rollbar_username'] = module.params['rollbar_user'] if module.params['comment']: params['comment'] = module.params['comment'] url = module.params.get('url') try: data = urlencode(params) response, info = fetch_url(module, url, data=data) except Exception: e = get_exception() module.fail_json(msg='Unable to notify Rollbar: %s' % e) else: if info['status'] == 200: module.exit_json(changed=True) else: module.fail_json(msg='HTTP result code: %d connecting to %s' % (info['status'], url)) if __name__ == '__main__': main()
gpl-3.0
SiccarPoint/numpy
numpy/lib/__init__.py
71
1189
from __future__ import division, absolute_import, print_function import math from .info import __doc__ from numpy.version import version as __version__ from .type_check import * from .index_tricks import * from .function_base import * from .nanfunctions import * from .shape_base import * from .stride_tricks import * from .twodim_base import * from .ufunclike import * from . import scimath as emath from .polynomial import * #import convertcode from .utils import * from .arraysetops import * from .npyio import * from .financial import * from .arrayterator import Arrayterator from .arraypad import * from ._version import * __all__ = ['emath', 'math'] __all__ += type_check.__all__ __all__ += index_tricks.__all__ __all__ += function_base.__all__ __all__ += shape_base.__all__ __all__ += stride_tricks.__all__ __all__ += twodim_base.__all__ __all__ += ufunclike.__all__ __all__ += arraypad.__all__ __all__ += polynomial.__all__ __all__ += utils.__all__ __all__ += arraysetops.__all__ __all__ += npyio.__all__ __all__ += financial.__all__ __all__ += nanfunctions.__all__ from numpy.testing.nosetester import _numpy_tester test = _numpy_tester().test bench = _numpy_tester().bench
bsd-3-clause
dwillis/django-calaccess-raw-data
calaccess_raw/admin/__init__.py
29
4140
from calaccess_raw.admin.base import BaseAdmin from calaccess_raw.admin.campaign import ( CvrSoCdAdmin, Cvr2SoCdAdmin, CvrCampaignDisclosureCdAdmin, Cvr2CampaignDisclosureCdAdmin, RcptCdAdmin, Cvr3VerificationInfoCdAdmin, LoanCdAdmin, S401CdAdmin, ExpnCdAdmin, F495P2CdAdmin, DebtCdAdmin, S496CdAdmin, SpltCdAdmin, S497CdAdmin, F501502CdAdmin, S498CdAdmin, ) from calaccess_raw.admin.lobbying import ( CvrRegistrationCdAdmin, Cvr2RegistrationCdAdmin, CvrLobbyDisclosureCdAdmin, Cvr2LobbyDisclosureCdAdmin, LobbyAmendmentsCdAdmin, F690P2CdAdmin, LattCdAdmin, LexpCdAdmin, LccmCdAdmin, LothCdAdmin, LempCdAdmin, LpayCdAdmin, ) from calaccess_raw.admin.common import ( FilernameCdAdmin, FilerFilingsCdAdmin, FilingsCdAdmin, SmryCdAdmin, CvrE530CdAdmin, TextMemoCdAdmin, ) from calaccess_raw.admin.other import ( AcronymsCdAdmin, AddressCdAdmin, BallotMeasuresCdAdmin, EfsFilingLogCdAdmin, FilersCdAdmin, FilerAcronymsCdAdmin, FilerAddressCdAdmin, FilerEthicsClassCdAdmin, FilerInterestsCdAdmin, FilerLinksCdAdmin, FilerStatusTypesCdAdmin, FilerToFilerTypeCdAdmin, FilerTypesCdAdmin, FilerXrefCdAdmin, FilingPeriodCdAdmin, GroupTypesCdAdmin, HeaderCdAdmin, HdrCdAdmin, ImageLinksCdAdmin, LegislativeSessionsCdAdmin, LobbyingChgLogCdAdmin, LobbyistContributions1CdAdmin, LobbyistContributions2CdAdmin, LobbyistContributions3CdAdmin, LobbyistEmployer1CdAdmin, LobbyistEmployer2CdAdmin, LobbyistEmployer3CdAdmin, LobbyistEmployerFirms1CdAdmin, LobbyistEmployerFirms2CdAdmin, LobbyistEmpLobbyist1CdAdmin, LobbyistEmpLobbyist2CdAdmin, LobbyistFirm1CdAdmin, LobbyistFirm2CdAdmin, LobbyistFirm3CdAdmin, LobbyistFirmEmployer1CdAdmin, LobbyistFirmEmployer2CdAdmin, LobbyistFirmLobbyist1CdAdmin, LobbyistFirmLobbyist2CdAdmin, LookupCodeAdmin, NamesCdAdmin, ReceivedFilingsCdAdmin, ReportsCdAdmin, ) __all__ = [ 'BaseAdmin', 'CvrSoCdAdmin', 'Cvr2SoCdAdmin', 'CvrCampaignDisclosureCdAdmin', 'Cvr2CampaignDisclosureCdAdmin', 'RcptCdAdmin', 'Cvr3VerificationInfoCdAdmin', 'LoanCdAdmin', 'S401CdAdmin', 'ExpnCdAdmin', 'F495P2CdAdmin', 'DebtCdAdmin', 'S496CdAdmin', 'SpltCdAdmin', 'S497CdAdmin', 'F501502CdAdmin', 'S498CdAdmin', 'CvrRegistrationCdAdmin', 'Cvr2RegistrationCdAdmin', 'CvrLobbyDisclosureCdAdmin', 'Cvr2LobbyDisclosureCdAdmin', 'LobbyAmendmentsCdAdmin', 'F690P2CdAdmin', 'LattCdAdmin', 'LexpCdAdmin', 'LccmCdAdmin', 'LothCdAdmin', 'LempCdAdmin', 'LpayCdAdmin', 'FilerFilingsCdAdmin', 'FilingsCdAdmin', 'SmryCdAdmin', 'CvrE530CdAdmin', 'TextMemoCdAdmin', 'AcronymsCdAdmin', 'AddressCdAdmin', 'BallotMeasuresCdAdmin', 'EfsFilingLogCdAdmin', 'FilernameCdAdmin', 'FilersCdAdmin', 'FilerAcronymsCdAdmin', 'FilerAddressCdAdmin', 'FilerEthicsClassCdAdmin', 'FilerInterestsCdAdmin', 'FilerLinksCdAdmin', 'FilerStatusTypesCdAdmin', 'FilerToFilerTypeCdAdmin', 'FilerTypesCdAdmin', 'FilerXrefCdAdmin', 'FilingPeriodCdAdmin', 'GroupTypesCdAdmin', 'HeaderCdAdmin', 'HdrCdAdmin', 'ImageLinksCdAdmin', 'LegislativeSessionsCdAdmin', 'LobbyingChgLogCdAdmin', 'LobbyistContributions1CdAdmin', 'LobbyistContributions2CdAdmin', 'LobbyistContributions3CdAdmin', 'LobbyistEmployer1CdAdmin', 'LobbyistEmployer2CdAdmin', 'LobbyistEmployer3CdAdmin', 'LobbyistEmployerFirms1CdAdmin', 'LobbyistEmployerFirms2CdAdmin', 'LobbyistEmpLobbyist1CdAdmin', 'LobbyistEmpLobbyist2CdAdmin', 'LobbyistFirm1CdAdmin', 'LobbyistFirm2CdAdmin', 'LobbyistFirm3CdAdmin', 'LobbyistFirmEmployer1CdAdmin', 'LobbyistFirmEmployer2CdAdmin', 'LobbyistFirmLobbyist1CdAdmin', 'LobbyistFirmLobbyist2CdAdmin', 'LookupCodeAdmin', 'NamesCdAdmin', 'ReceivedFilingsCdAdmin', 'ReportsCdAdmin', ]
mit
12AngryMen/votca-scripts
cluster/collectjobs.py
2
3419
#!bin/python import os import re import shutil import subprocess username="wehnerj" sortstring="8PNPO12" #sortstring=None apename="APE_ISO" workground="WORKGROUND" class cd: """Context manager for changing the current working directory""" def __init__(self, newPath): self.newPath = os.path.expanduser(newPath) def __enter__(self): self.savedPath = os.getcwd() os.chdir(self.newPath) def __exit__(self, etype, value, traceback): os.chdir(self.savedPath) def readsubmitscript(filename,sortstring): path=None with open(filename,"r") as f: lines=f.readlines() lines=" ".join(lines) result = re.search("-f .+system.sql", lines) temp=result.group(0) if (sortstring!=None and sortstring in temp) or sortstring==None: path=temp.split()[1] path=os.path.dirname(path) else: print temp,sortstring return path def checkjobfile(filename): number=run_command("cat {} | grep COMPLETE | wc -l".format(filename)) for num in number: #print num number=num return number def run_command(command): p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,shell=True) return iter(p.stdout.readline, b'') jobs=0 for i in range(300): thinc="thinc{:0>3d}".format(i) path="/scratch/{}/{}".format(thinc,username) if os.path.isdir(path): for root, dirs, files in os.walk(path): print "Entering {}".format(root) print files for filename in files: #files=" ".join(files) result = re.search("ctp_\d\d.log", filename) if result!=None: with cd(root): print result.group(0) number=int(re.search('\d+',result.group(0)).group(0)) print "Log found with number {}".format(number) submitscript="ctp_batch_{:0>2d}.sh".format(number) jobpath=readsubmitscript(submitscript,sortstring) if jobpath==None: print "Jobpath does not match searchstring" continue jobfile="jobs.{}.xml".format(number) noofjobs=checkjobfile(jobfile) print "jobfile contains {} jobs".format(noofjobs) copypath=os.path.join(jobpath,workground) if os.path.isfile(os.path.join(copypath,jobfile)): noofjobsatdest=checkjobfile(os.path.join(copypath,jobfile)) if noofjobs<=noofjobsatdest: print "File at source contains {} jobs whereas the clusterfile contains {}. File {} is not copied".format(noofjobsatdest,noofjobs,jobfile) continue if os.path.isfile("jobs.{}.tab".format(number)): print "Copying {} from {} to {}".format(jobfile,root,copypath) rootpath=os.path.join(root,jobfile) shutil.copy(rootpath,copypath) jobs+=1 print "Copied {} files in total".format(jobs)
apache-2.0
Endika/django
django/contrib/sites/managers.py
4
2070
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.core import checks from django.core.exceptions import FieldDoesNotExist from django.db import models class CurrentSiteManager(models.Manager): "Use this to limit objects to those associated with the current site." use_in_migrations = True def __init__(self, field_name=None): super(CurrentSiteManager, self).__init__() self.__field_name = field_name def check(self, **kwargs): errors = super(CurrentSiteManager, self).check(**kwargs) errors.extend(self._check_field_name()) return errors def _check_field_name(self): field_name = self._get_field_name() try: field = self.model._meta.get_field(field_name) except FieldDoesNotExist: return [ checks.Error( "CurrentSiteManager could not find a field named '%s'." % field_name, obj=self, id='sites.E001', ) ] if not isinstance(field, (models.ForeignKey, models.ManyToManyField)): return [ checks.Error( "CurrentSiteManager cannot use '%s.%s' as it is not a ForeignKey or ManyToManyField." % ( self.model._meta.object_name, field_name ), obj=self, id='sites.E002', ) ] return [] def _get_field_name(self): """ Return self.__field_name or 'site' or 'sites'. """ if not self.__field_name: try: self.model._meta.get_field('site') except FieldDoesNotExist: self.__field_name = 'sites' else: self.__field_name = 'site' return self.__field_name def get_queryset(self): return super(CurrentSiteManager, self).get_queryset().filter( **{self._get_field_name() + '__id': settings.SITE_ID})
bsd-3-clause
hobarrera/django
tests/test_utils/tests.py
18
41065
# -*- coding: utf-8 -*- from __future__ import unicode_literals import unittest import warnings from django.conf.urls import url from django.contrib.staticfiles.finders import get_finder, get_finders from django.contrib.staticfiles.storage import staticfiles_storage from django.core.files.storage import default_storage from django.db import connection, models, router from django.forms import EmailField, IntegerField from django.http import HttpResponse from django.template.loader import render_to_string from django.test import ( SimpleTestCase, TestCase, ignore_warnings, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.html import HTMLParseError, parse_html from django.test.utils import ( CaptureQueriesContext, isolate_apps, override_settings, ) from django.urls import NoReverseMatch, reverse from django.utils import six from django.utils._os import abspathu from django.utils.deprecation import RemovedInDjango20Warning from .models import Car, Person, PossessedCar from .views import empty_response class SkippingTestCase(SimpleTestCase): def _assert_skipping(self, func, expected_exc): # We cannot simply use assertRaises because a SkipTest exception will go unnoticed try: func() except expected_exc: pass except Exception as e: self.fail("No %s exception should have been raised for %s." % ( e.__class__.__name__, func.__name__)) def test_skip_unless_db_feature(self): """ Testing the django.test.skipUnlessDBFeature decorator. """ # Total hack, but it works, just want an attribute that's always true. @skipUnlessDBFeature("__class__") def test_func(): raise ValueError @skipUnlessDBFeature("notprovided") def test_func2(): raise ValueError @skipUnlessDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipUnlessDBFeature("__class__", "notprovided") def test_func4(): raise ValueError self._assert_skipping(test_func, ValueError) self._assert_skipping(test_func2, unittest.SkipTest) self._assert_skipping(test_func3, ValueError) self._assert_skipping(test_func4, unittest.SkipTest) def test_skip_if_db_feature(self): """ Testing the django.test.skipIfDBFeature decorator. """ @skipIfDBFeature("__class__") def test_func(): raise ValueError @skipIfDBFeature("notprovided") def test_func2(): raise ValueError @skipIfDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipIfDBFeature("__class__", "notprovided") def test_func4(): raise ValueError @skipIfDBFeature("notprovided", "notprovided") def test_func5(): raise ValueError self._assert_skipping(test_func, unittest.SkipTest) self._assert_skipping(test_func2, ValueError) self._assert_skipping(test_func3, unittest.SkipTest) self._assert_skipping(test_func4, unittest.SkipTest) self._assert_skipping(test_func5, ValueError) class SkippingClassTestCase(SimpleTestCase): def test_skip_class_unless_db_feature(self): @skipUnlessDBFeature("__class__") class NotSkippedTests(unittest.TestCase): def test_dummy(self): return @skipIfDBFeature("__class__") class SkippedTests(unittest.TestCase): def test_will_be_skipped(self): self.fail("We should never arrive here.") test_suite = unittest.TestSuite() test_suite.addTest(NotSkippedTests('test_dummy')) try: test_suite.addTest(SkippedTests('test_will_be_skipped')) except unittest.SkipTest: self.fail("SkipTest should not be raised at this stage") result = unittest.TextTestRunner(stream=six.StringIO()).run(test_suite) self.assertEqual(result.testsRun, 2) self.assertEqual(len(result.skipped), 1) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertNumQueriesTests(TestCase): def test_assert_num_queries(self): def test_func(): raise ValueError with self.assertRaises(ValueError): self.assertNumQueries(2, test_func) def test_assert_num_queries_with_client(self): person = Person.objects.create(name='test') self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) def test_func(): self.client.get("/test_utils/get_person/%s/" % person.pk) self.client.get("/test_utils/get_person/%s/" % person.pk) self.assertNumQueries(2, test_func) class AssertQuerysetEqualTests(TestCase): def setUp(self): self.p1 = Person.objects.create(name='p1') self.p2 = Person.objects.create(name='p2') def test_ordered(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [repr(self.p1), repr(self.p2)] ) def test_unordered(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [repr(self.p2), repr(self.p1)], ordered=False ) def test_transform(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [self.p1.pk, self.p2.pk], transform=lambda x: x.pk ) def test_undefined_order(self): # Using an unordered queryset with more than one ordered value # is an error. with self.assertRaises(ValueError): self.assertQuerysetEqual( Person.objects.all(), [repr(self.p1), repr(self.p2)] ) # No error for one value. self.assertQuerysetEqual( Person.objects.filter(name='p1'), [repr(self.p1)] ) def test_repeated_values(self): """ Test that assertQuerysetEqual checks the number of appearance of each item when used with option ordered=False. """ batmobile = Car.objects.create(name='Batmobile') k2000 = Car.objects.create(name='K 2000') PossessedCar.objects.bulk_create([ PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), ]) with self.assertRaises(AssertionError): self.assertQuerysetEqual( self.p1.cars.all(), [repr(batmobile), repr(k2000)], ordered=False ) self.assertQuerysetEqual( self.p1.cars.all(), [repr(batmobile)] * 2 + [repr(k2000)] * 4, ordered=False ) @override_settings(ROOT_URLCONF='test_utils.urls') class CaptureQueriesContextManagerTests(TestCase): def setUp(self): self.person_pk = six.text_type(Person.objects.create(name='test').pk) def test_simple(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: pass self.assertEqual(0, len(captured_queries)) def test_within(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) def test_nested(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.count() with CaptureQueriesContext(connection) as nested_captured_queries: Person.objects.count() self.assertEqual(1, len(nested_captured_queries)) self.assertEqual(2, len(captured_queries)) def test_failure(self): with self.assertRaises(TypeError): with CaptureQueriesContext(connection): raise TypeError def test_with_client(self): with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 2) self.assertIn(self.person_pk, captured_queries[0]['sql']) self.assertIn(self.person_pk, captured_queries[1]['sql']) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertNumQueriesContextManagerTests(TestCase): def test_simple(self): with self.assertNumQueries(0): pass with self.assertNumQueries(1): Person.objects.count() with self.assertNumQueries(2): Person.objects.count() Person.objects.count() def test_failure(self): with self.assertRaises(AssertionError) as exc_info: with self.assertNumQueries(2): Person.objects.count() self.assertIn("1 queries executed, 2 expected", str(exc_info.exception)) self.assertIn("Captured queries were", str(exc_info.exception)) with self.assertRaises(TypeError): with self.assertNumQueries(4000): raise TypeError def test_with_client(self): person = Person.objects.create(name="test") with self.assertNumQueries(1): self.client.get("/test_utils/get_person/%s/" % person.pk) with self.assertNumQueries(1): self.client.get("/test_utils/get_person/%s/" % person.pk) with self.assertNumQueries(2): self.client.get("/test_utils/get_person/%s/" % person.pk) self.client.get("/test_utils/get_person/%s/" % person.pk) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertTemplateUsedContextManagerTests(SimpleTestCase): def test_usage(self): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/base.html') with self.assertTemplateUsed(template_name='template_used/base.html'): render_to_string('template_used/base.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/include.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/base.html') render_to_string('template_used/base.html') def test_nested_usage(self): with self.assertTemplateUsed('template_used/base.html'): with self.assertTemplateUsed('template_used/include.html'): render_to_string('template_used/include.html') with self.assertTemplateUsed('template_used/extends.html'): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateUsed('template_used/base.html'): with self.assertTemplateUsed('template_used/alternative.html'): render_to_string('template_used/alternative.html') render_to_string('template_used/base.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateNotUsed('template_used/base.html'): render_to_string('template_used/alternative.html') render_to_string('template_used/base.html') def test_not_used(self): with self.assertTemplateNotUsed('template_used/base.html'): pass with self.assertTemplateNotUsed('template_used/alternative.html'): pass def test_error_message(self): with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'): with self.assertTemplateUsed('template_used/base.html'): pass with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'): with self.assertTemplateUsed(template_name='template_used/base.html'): pass with six.assertRaisesRegex( self, AssertionError, r'^template_used/base\.html.*template_used/alternative\.html$'): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/alternative.html') with self.assertRaises(AssertionError) as cm: response = self.client.get('/test_utils/no_template_used/') self.assertTemplateUsed(response, 'template_used/base.html') self.assertEqual(cm.exception.args[0], "No templates used to render the response") def test_failure(self): with self.assertRaises(TypeError): with self.assertTemplateUsed(): pass with self.assertRaises(AssertionError): with self.assertTemplateUsed(''): pass with self.assertRaises(AssertionError): with self.assertTemplateUsed(''): render_to_string('template_used/base.html') with self.assertRaises(AssertionError): with self.assertTemplateUsed(template_name=''): pass with self.assertRaises(AssertionError): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/alternative.html') def test_assert_used_on_http_response(self): response = HttpResponse() error_msg = ( 'assertTemplateUsed() and assertTemplateNotUsed() are only ' 'usable on responses fetched using the Django test Client.' ) with self.assertRaisesMessage(ValueError, error_msg): self.assertTemplateUsed(response, 'template.html') with self.assertRaisesMessage(ValueError, error_msg): self.assertTemplateNotUsed(response, 'template.html') class HTMLEqualTests(SimpleTestCase): def test_html_parser(self): element = parse_html('<div><p>Hello</p></div>') self.assertEqual(len(element.children), 1) self.assertEqual(element.children[0].name, 'p') self.assertEqual(element.children[0].children[0], 'Hello') parse_html('<p>') parse_html('<p attr>') dom = parse_html('<p>foo') self.assertEqual(len(dom.children), 1) self.assertEqual(dom.name, 'p') self.assertEqual(dom[0], 'foo') def test_parse_html_in_script(self): parse_html('<script>var a = "<p" + ">";</script>') parse_html(''' <script> var js_sha_link='<p>***</p>'; </script> ''') # script content will be parsed to text dom = parse_html(''' <script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script> ''') self.assertEqual(len(dom.children), 1) self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>") def test_self_closing_tags(self): self_closing_tags = ( 'br', 'hr', 'input', 'img', 'meta', 'spacer', 'link', 'frame', 'base', 'col', ) for tag in self_closing_tags: dom = parse_html('<p>Hello <%s> world</p>' % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], 'Hello') self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], 'world') dom = parse_html('<p>Hello <%s /> world</p>' % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], 'Hello') self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], 'world') def test_simple_equal_html(self): self.assertHTMLEqual('', '') self.assertHTMLEqual('<p></p>', '<p></p>') self.assertHTMLEqual('<p></p>', ' <p> </p> ') self.assertHTMLEqual( '<div><p>Hello</p></div>', '<div><p>Hello</p></div>') self.assertHTMLEqual( '<div><p>Hello</p></div>', '<div> <p>Hello</p> </div>') self.assertHTMLEqual( '<div>\n<p>Hello</p></div>', '<div><p>Hello</p></div>\n') self.assertHTMLEqual( '<div><p>Hello\nWorld !</p></div>', '<div><p>Hello World\n!</p></div>') self.assertHTMLEqual( '<div><p>Hello\nWorld !</p></div>', '<div><p>Hello World\n!</p></div>') self.assertHTMLEqual( '<p>Hello World !</p>', '<p>Hello World\n\n!</p>') self.assertHTMLEqual('<p> </p>', '<p></p>') self.assertHTMLEqual('<p/>', '<p></p>') self.assertHTMLEqual('<p />', '<p></p>') self.assertHTMLEqual('<input checked>', '<input checked="checked">') self.assertHTMLEqual('<p>Hello', '<p> Hello') self.assertHTMLEqual('<p>Hello</p>World', '<p>Hello</p> World') def test_ignore_comments(self): self.assertHTMLEqual( '<div>Hello<!-- this is a comment --> World!</div>', '<div>Hello World!</div>') def test_unequal_html(self): self.assertHTMLNotEqual('<p>Hello</p>', '<p>Hello!</p>') self.assertHTMLNotEqual('<p>foo&#20;bar</p>', '<p>foo&nbsp;bar</p>') self.assertHTMLNotEqual('<p>foo bar</p>', '<p>foo &nbsp;bar</p>') self.assertHTMLNotEqual('<p>foo nbsp</p>', '<p>foo &nbsp;</p>') self.assertHTMLNotEqual('<p>foo #20</p>', '<p>foo &#20;</p>') self.assertHTMLNotEqual( '<p><span>Hello</span><span>World</span></p>', '<p><span>Hello</span>World</p>') self.assertHTMLNotEqual( '<p><span>Hello</span>World</p>', '<p><span>Hello</span><span>World</span></p>') def test_attributes(self): self.assertHTMLEqual( '<input type="text" id="id_name" />', '<input id="id_name" type="text" />') self.assertHTMLEqual( '''<input type='text' id="id_name" />''', '<input id="id_name" type="text" />') self.assertHTMLNotEqual( '<input type="text" id="id_name" />', '<input type="password" id="id_name" />') def test_complex_examples(self): self.assertHTMLEqual( """<tr><th><label for="id_first_name">First name:</label></th> <td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr> <tr><th><label for="id_last_name">Last name:</label></th> <td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr> <tr><th><label for="id_birthday">Birthday:</label></th> <td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""", """ <tr><th> <label for="id_first_name">First name:</label></th><td> <input type="text" name="first_name" value="John" id="id_first_name" /> </td></tr> <tr><th> <label for="id_last_name">Last name:</label></th><td> <input type="text" name="last_name" value="Lennon" id="id_last_name" /> </td></tr> <tr><th> <label for="id_birthday">Birthday:</label></th><td> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /> </td></tr> """) self.assertHTMLEqual( """<!DOCTYPE html> <html> <head> <link rel="stylesheet"> <title>Document</title> <meta attribute="value"> </head> <body> <p> This is a valid paragraph <div> this is a div AFTER the p</div> </body> </html>""", """ <html> <head> <link rel="stylesheet"> <title>Document</title> <meta attribute="value"> </head> <body> <p> This is a valid paragraph <!-- browsers would close the p tag here --> <div> this is a div AFTER the p</div> </p> <!-- this is invalid HTML parsing, but it should make no difference in most cases --> </body> </html>""") def test_html_contain(self): # equal html contains each other dom1 = parse_html('<p>foo') dom2 = parse_html('<p>foo</p>') self.assertIn(dom1, dom2) self.assertIn(dom2, dom1) dom2 = parse_html('<div><p>foo</p></div>') self.assertIn(dom1, dom2) self.assertNotIn(dom2, dom1) self.assertNotIn('<p>foo</p>', dom2) self.assertIn('foo', dom2) # when a root element is used ... dom1 = parse_html('<p>foo</p><p>bar</p>') dom2 = parse_html('<p>foo</p><p>bar</p>') self.assertIn(dom1, dom2) dom1 = parse_html('<p>foo</p>') self.assertIn(dom1, dom2) dom1 = parse_html('<p>bar</p>') self.assertIn(dom1, dom2) def test_count(self): # equal html contains each other one time dom1 = parse_html('<p>foo') dom2 = parse_html('<p>foo</p>') self.assertEqual(dom1.count(dom2), 1) self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo</p><p>bar</p>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo foo</p><p>foo</p>') self.assertEqual(dom2.count('foo'), 3) dom2 = parse_html('<p class="bar">foo</p>') self.assertEqual(dom2.count('bar'), 0) self.assertEqual(dom2.count('class'), 0) self.assertEqual(dom2.count('p'), 0) self.assertEqual(dom2.count('o'), 2) dom2 = parse_html('<p>foo</p><p>foo</p>') self.assertEqual(dom2.count(dom1), 2) dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<div><div><p>foo</p></div></div>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo<p>foo</p></p>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo<p>bar</p></p>') self.assertEqual(dom2.count(dom1), 0) def test_parsing_errors(self): with self.assertRaises(AssertionError): self.assertHTMLEqual('<p>', '') with self.assertRaises(AssertionError): self.assertHTMLEqual('', '<p>') with self.assertRaises(HTMLParseError): parse_html('</p>') def test_contains_html(self): response = HttpResponse('''<body> This is a form: <form action="" method="get"> <input type="text" name="Hello" /> </form></body>''') self.assertNotContains(response, "<input name='Hello' type='text'>") self.assertContains(response, '<form action="" method="get">') self.assertContains(response, "<input name='Hello' type='text'>", html=True) self.assertNotContains(response, '<form action="" method="get">', html=True) invalid_response = HttpResponse('''<body <bad>>''') with self.assertRaises(AssertionError): self.assertContains(invalid_response, '<p></p>') with self.assertRaises(AssertionError): self.assertContains(response, '<p "whats" that>') def test_unicode_handling(self): response = HttpResponse('<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>') self.assertContains( response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True ) class JSONEqualTests(SimpleTestCase): def test_simple_equal(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr1": "foo", "attr2":"baz"}' self.assertJSONEqual(json1, json2) def test_simple_equal_unordered(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz", "attr1": "foo"}' self.assertJSONEqual(json1, json2) def test_simple_equal_raise(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONEqual(json1, json2) def test_equal_parsing_errors(self): invalid_json = '{"attr1": "foo, "attr2":"baz"}' valid_json = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONEqual(invalid_json, valid_json) with self.assertRaises(AssertionError): self.assertJSONEqual(valid_json, invalid_json) def test_simple_not_equal(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz"}' self.assertJSONNotEqual(json1, json2) def test_simple_not_equal_raise(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONNotEqual(json1, json2) def test_not_equal_parsing_errors(self): invalid_json = '{"attr1": "foo, "attr2":"baz"}' valid_json = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONNotEqual(invalid_json, valid_json) with self.assertRaises(AssertionError): self.assertJSONNotEqual(valid_json, invalid_json) class XMLEqualTests(SimpleTestCase): def test_simple_equal(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr1='a' attr2='b' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_unordered(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_raise(self): xml1 = "<elem attr1='a' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLEqual(xml1, xml2) def test_simple_equal_raises_message(self): xml1 = "<elem attr1='a' />" xml2 = "<elem attr2='b' attr1='a' />" msg = '''{xml1} != {xml2} - <elem attr1='a' /> + <elem attr2='b' attr1='a' /> ? ++++++++++ '''.format(xml1=repr(xml1), xml2=repr(xml2)) with self.assertRaisesMessage(AssertionError, msg): self.assertXMLEqual(xml1, xml2) def test_simple_not_equal(self): xml1 = "<elem attr1='a' attr2='c' />" xml2 = "<elem attr1='a' attr2='b' />" self.assertXMLNotEqual(xml1, xml2) def test_simple_not_equal_raise(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLNotEqual(xml1, xml2) def test_parsing_errors(self): xml_unvalid = "<elem attr1='a attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLNotEqual(xml_unvalid, xml2) def test_comment_root(self): xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />" xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_with_leading_or_trailing_whitespace(self): xml1 = "<elem>foo</elem> \t\n" xml2 = " \t\n<elem>foo</elem>" self.assertXMLEqual(xml1, xml2) def test_simple_not_equal_with_whitespace_in_the_middle(self): xml1 = "<elem>foo</elem><elem>bar</elem>" xml2 = "<elem>foo</elem> <elem>bar</elem>" self.assertXMLNotEqual(xml1, xml2) class SkippingExtraTests(TestCase): fixtures = ['should_not_be_loaded.json'] # HACK: This depends on internals of our TestCase subclasses def __call__(self, result=None): # Detect fixture loading by counting SQL queries, should be zero with self.assertNumQueries(0): super(SkippingExtraTests, self).__call__(result) @unittest.skip("Fixture loading should not be performed for skipped tests.") def test_fixtures_are_skipped(self): pass class AssertRaisesMsgTest(SimpleTestCase): def test_assert_raises_message(self): msg = "'Expected message' not found in 'Unexpected message'" # context manager form of assertRaisesMessage() with self.assertRaisesMessage(AssertionError, msg): with self.assertRaisesMessage(ValueError, "Expected message"): raise ValueError("Unexpected message") # callable form def func(): raise ValueError("Unexpected message") with self.assertRaisesMessage(AssertionError, msg): self.assertRaisesMessage(ValueError, "Expected message", func) def test_special_re_chars(self): """assertRaisesMessage shouldn't interpret RE special chars.""" def func1(): raise ValueError("[.*x+]y?") with self.assertRaisesMessage(ValueError, "[.*x+]y?"): func1() @ignore_warnings(category=RemovedInDjango20Warning) def test_callable_obj_param(self): # callable_obj was a documented kwarg in Django 1.8 and older. def func1(): raise ValueError("[.*x+]y?") with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') self.assertRaisesMessage(ValueError, "[.*x+]y?", callable_obj=func1) self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), 'The callable_obj kwarg is deprecated. Pass the callable ' 'as a positional argument instead.' ) class AssertFieldOutputTests(SimpleTestCase): def test_assert_field_output(self): error_invalid = ['Enter a valid email address.'] self.assertFieldOutput(EmailField, {'a@a.com': 'a@a.com'}, {'aaa': error_invalid}) with self.assertRaises(AssertionError): self.assertFieldOutput(EmailField, {'a@a.com': 'a@a.com'}, {'aaa': error_invalid + ['Another error']}) with self.assertRaises(AssertionError): self.assertFieldOutput(EmailField, {'a@a.com': 'Wrong output'}, {'aaa': error_invalid}) with self.assertRaises(AssertionError): self.assertFieldOutput( EmailField, {'a@a.com': 'a@a.com'}, {'aaa': ['Come on, gimme some well formatted data, dude.']} ) def test_custom_required_message(self): class MyCustomField(IntegerField): default_error_messages = { 'required': 'This is really required.', } self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None) class FirstUrls: urlpatterns = [url(r'first/$', empty_response, name='first')] class SecondUrls: urlpatterns = [url(r'second/$', empty_response, name='second')] class OverrideSettingsTests(SimpleTestCase): # #21518 -- If neither override_settings nor a setting_changed receiver # clears the URL cache between tests, then one of test_first or # test_second will fail. @override_settings(ROOT_URLCONF=FirstUrls) def test_urlconf_first(self): reverse('first') @override_settings(ROOT_URLCONF=SecondUrls) def test_urlconf_second(self): reverse('second') def test_urlconf_cache(self): with self.assertRaises(NoReverseMatch): reverse('first') with self.assertRaises(NoReverseMatch): reverse('second') with override_settings(ROOT_URLCONF=FirstUrls): self.client.get(reverse('first')) with self.assertRaises(NoReverseMatch): reverse('second') with override_settings(ROOT_URLCONF=SecondUrls): with self.assertRaises(NoReverseMatch): reverse('first') self.client.get(reverse('second')) self.client.get(reverse('first')) with self.assertRaises(NoReverseMatch): reverse('second') with self.assertRaises(NoReverseMatch): reverse('first') with self.assertRaises(NoReverseMatch): reverse('second') def test_override_media_root(self): """ Overriding the MEDIA_ROOT setting should be reflected in the base_location attribute of django.core.files.storage.default_storage. """ self.assertEqual(default_storage.base_location, '') with self.settings(MEDIA_ROOT='test_value'): self.assertEqual(default_storage.base_location, 'test_value') def test_override_media_url(self): """ Overriding the MEDIA_URL setting should be reflected in the base_url attribute of django.core.files.storage.default_storage. """ self.assertEqual(default_storage.base_location, '') with self.settings(MEDIA_URL='/test_value/'): self.assertEqual(default_storage.base_url, '/test_value/') def test_override_file_upload_permissions(self): """ Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in the file_permissions_mode attribute of django.core.files.storage.default_storage. """ self.assertIsNone(default_storage.file_permissions_mode) with self.settings(FILE_UPLOAD_PERMISSIONS=0o777): self.assertEqual(default_storage.file_permissions_mode, 0o777) def test_override_file_upload_directory_permissions(self): """ Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be reflected in the directory_permissions_mode attribute of django.core.files.storage.default_storage. """ self.assertIsNone(default_storage.directory_permissions_mode) with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777): self.assertEqual(default_storage.directory_permissions_mode, 0o777) def test_override_database_routers(self): """ Overriding DATABASE_ROUTERS should update the master router. """ test_routers = (object(),) with self.settings(DATABASE_ROUTERS=test_routers): self.assertSequenceEqual(router.routers, test_routers) def test_override_static_url(self): """ Overriding the STATIC_URL setting should be reflected in the base_url attribute of django.contrib.staticfiles.storage.staticfiles_storage. """ with self.settings(STATIC_URL='/test/'): self.assertEqual(staticfiles_storage.base_url, '/test/') def test_override_static_root(self): """ Overriding the STATIC_ROOT setting should be reflected in the location attribute of django.contrib.staticfiles.storage.staticfiles_storage. """ with self.settings(STATIC_ROOT='/tmp/test'): self.assertEqual(staticfiles_storage.location, abspathu('/tmp/test')) def test_override_staticfiles_storage(self): """ Overriding the STATICFILES_STORAGE setting should be reflected in the value of django.contrib.staticfiles.storage.staticfiles_storage. """ new_class = 'CachedStaticFilesStorage' new_storage = 'django.contrib.staticfiles.storage.' + new_class with self.settings(STATICFILES_STORAGE=new_storage): self.assertEqual(staticfiles_storage.__class__.__name__, new_class) def test_override_staticfiles_finders(self): """ Overriding the STATICFILES_FINDERS setting should be reflected in the return value of django.contrib.staticfiles.finders.get_finders. """ current = get_finders() self.assertGreater(len(list(current)), 1) finders = ['django.contrib.staticfiles.finders.FileSystemFinder'] with self.settings(STATICFILES_FINDERS=finders): self.assertEqual(len(list(get_finders())), len(finders)) def test_override_staticfiles_dirs(self): """ Overriding the STATICFILES_DIRS setting should be reflected in the locations attribute of the django.contrib.staticfiles.finders.FileSystemFinder instance. """ finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder') test_path = '/tmp/test' expected_location = ('', test_path) self.assertNotIn(expected_location, finder.locations) with self.settings(STATICFILES_DIRS=[test_path]): finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder') self.assertIn(expected_location, finder.locations) class TestBadSetUpTestData(TestCase): """ An exception in setUpTestData() shouldn't leak a transaction which would cascade across the rest of the test suite. """ class MyException(Exception): pass @classmethod def setUpClass(cls): try: super(TestBadSetUpTestData, cls).setUpClass() except cls.MyException: cls._in_atomic_block = connection.in_atomic_block @classmethod def tearDownClass(Cls): # override to avoid a second cls._rollback_atomics() which would fail. # Normal setUpClass() methods won't have exception handling so this # method wouldn't typically be run. pass @classmethod def setUpTestData(cls): # Simulate a broken setUpTestData() method. raise cls.MyException() def test_failure_in_setUpTestData_should_rollback_transaction(self): # setUpTestData() should call _rollback_atomics() so that the # transaction doesn't leak. self.assertFalse(self._in_atomic_block) class DisallowedDatabaseQueriesTests(SimpleTestCase): def test_disallowed_database_queries(self): expected_message = ( "Database queries aren't allowed in SimpleTestCase. " "Either use TestCase or TransactionTestCase to ensure proper test isolation or " "set DisallowedDatabaseQueriesTests.allow_database_queries to True to silence this failure." ) with self.assertRaisesMessage(AssertionError, expected_message): Car.objects.first() class AllowedDatabaseQueriesTests(SimpleTestCase): allow_database_queries = True def test_allowed_database_queries(self): Car.objects.first() @isolate_apps('test_utils', attr_name='class_apps') class IsolatedAppsTests(SimpleTestCase): def test_installed_apps(self): self.assertEqual([app_config.label for app_config in self.class_apps.get_app_configs()], ['test_utils']) def test_class_decoration(self): class ClassDecoration(models.Model): pass self.assertEqual(ClassDecoration._meta.apps, self.class_apps) @isolate_apps('test_utils', kwarg_name='method_apps') def test_method_decoration(self, method_apps): class MethodDecoration(models.Model): pass self.assertEqual(MethodDecoration._meta.apps, method_apps) def test_context_manager(self): with isolate_apps('test_utils') as context_apps: class ContextManager(models.Model): pass self.assertEqual(ContextManager._meta.apps, context_apps) @isolate_apps('test_utils', kwarg_name='method_apps') def test_nested(self, method_apps): class MethodDecoration(models.Model): pass with isolate_apps('test_utils') as context_apps: class ContextManager(models.Model): pass with isolate_apps('test_utils') as nested_context_apps: class NestedContextManager(models.Model): pass self.assertEqual(MethodDecoration._meta.apps, method_apps) self.assertEqual(ContextManager._meta.apps, context_apps) self.assertEqual(NestedContextManager._meta.apps, nested_context_apps)
bsd-3-clause
googleapis/python-compute
google/cloud/compute_v1/services/region_instances/transports/__init__.py
1
1024
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict from typing import Dict, Type from .base import RegionInstancesTransport from .rest import RegionInstancesRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstancesTransport]] _transport_registry["rest"] = RegionInstancesRestTransport __all__ = ( "RegionInstancesTransport", "RegionInstancesRestTransport", )
apache-2.0
sgerhart/ansible
contrib/inventory/ec2.py
21
72932
#!/usr/bin/env python ''' EC2 external inventory script ================================= Generates inventory that Ansible can understand by making API request to AWS EC2 using the Boto library. NOTE: This script assumes Ansible is being executed where the environment variables needed for Boto have already been set: export AWS_ACCESS_KEY_ID='AK123' export AWS_SECRET_ACCESS_KEY='abc123' Optional region environment variable if region is 'auto' This script also assumes that there is an ec2.ini file alongside it. To specify a different path to ec2.ini, define the EC2_INI_PATH environment variable: export EC2_INI_PATH=/path/to/my_ec2.ini If you're using eucalyptus you need to set the above variables and you need to define: export EC2_URL=http://hostname_of_your_cc:port/services/Eucalyptus If you're using boto profiles (requires boto>=2.24.0) you can choose a profile using the --boto-profile command line argument (e.g. ec2.py --boto-profile prod) or using the AWS_PROFILE variable: AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml For more details, see: http://docs.pythonboto.org/en/latest/boto_config_tut.html You can filter for specific EC2 instances by creating an environment variable named EC2_INSTANCE_FILTERS, which has the same format as the instance_filters entry documented in ec2.ini. For example, to find all hosts whose name begins with 'webserver', one might use: export EC2_INSTANCE_FILTERS='tag:Name=webserver*' When run against a specific host, this script returns the following variables: - ec2_ami_launch_index - ec2_architecture - ec2_association - ec2_attachTime - ec2_attachment - ec2_attachmentId - ec2_block_devices - ec2_client_token - ec2_deleteOnTermination - ec2_description - ec2_deviceIndex - ec2_dns_name - ec2_eventsSet - ec2_group_name - ec2_hypervisor - ec2_id - ec2_image_id - ec2_instanceState - ec2_instance_type - ec2_ipOwnerId - ec2_ip_address - ec2_item - ec2_kernel - ec2_key_name - ec2_launch_time - ec2_monitored - ec2_monitoring - ec2_networkInterfaceId - ec2_ownerId - ec2_persistent - ec2_placement - ec2_platform - ec2_previous_state - ec2_private_dns_name - ec2_private_ip_address - ec2_publicIp - ec2_public_dns_name - ec2_ramdisk - ec2_reason - ec2_region - ec2_requester_id - ec2_root_device_name - ec2_root_device_type - ec2_security_group_ids - ec2_security_group_names - ec2_shutdown_state - ec2_sourceDestCheck - ec2_spot_instance_request_id - ec2_state - ec2_state_code - ec2_state_reason - ec2_status - ec2_subnet_id - ec2_tenancy - ec2_virtualization_type - ec2_vpc_id These variables are pulled out of a boto.ec2.instance object. There is a lack of consistency with variable spellings (camelCase and underscores) since this just loops through all variables the object exposes. It is preferred to use the ones with underscores when multiple exist. In addition, if an instance has AWS tags associated with it, each tag is a new variable named: - ec2_tag_[Key] = [Value] Security groups are comma-separated in 'ec2_security_group_ids' and 'ec2_security_group_names'. When destination_format and destination_format_tags are specified the destination_format can be built from the instance tags and attributes. The behavior will first check the user defined tags, then proceed to check instance attributes, and finally if neither are found 'nil' will be used instead. 'my_instance': { 'region': 'us-east-1', # attribute 'availability_zone': 'us-east-1a', # attribute 'private_dns_name': '172.31.0.1', # attribute 'ec2_tag_deployment': 'blue', # tag 'ec2_tag_clusterid': 'ansible', # tag 'ec2_tag_Name': 'webserver', # tag ... } Inside of the ec2.ini file the following settings are specified: ... destination_format: {0}-{1}-{2}-{3} destination_format_tags: Name,clusterid,deployment,private_dns_name ... These settings would produce a destination_format as the following: 'webserver-ansible-blue-172.31.0.1' ''' # (c) 2012, Peter Sankauskas # # This file is part of Ansible, # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ###################################################################### import sys import os import argparse import re from time import time from copy import deepcopy import boto from boto import ec2 from boto import rds from boto import elasticache from boto import route53 from boto import sts import six from ansible.module_utils import ec2 as ec2_utils HAS_BOTO3 = False try: import boto3 # noqa HAS_BOTO3 = True except ImportError: pass from six.moves import configparser from collections import defaultdict import json DEFAULTS = { 'all_elasticache_clusters': 'False', 'all_elasticache_nodes': 'False', 'all_elasticache_replication_groups': 'False', 'all_instances': 'False', 'all_rds_instances': 'False', 'aws_access_key_id': '', 'aws_secret_access_key': '', 'aws_security_token': '', 'boto_profile': '', 'cache_max_age': '300', 'cache_path': '~/.ansible/tmp', 'destination_variable': 'public_dns_name', 'elasticache': 'True', 'eucalyptus': 'False', 'eucalyptus_host': '', 'expand_csv_tags': 'False', 'group_by_ami_id': 'True', 'group_by_availability_zone': 'True', 'group_by_aws_account': 'False', 'group_by_elasticache_cluster': 'True', 'group_by_elasticache_engine': 'True', 'group_by_elasticache_parameter_group': 'True', 'group_by_elasticache_replication_group': 'True', 'group_by_instance_id': 'True', 'group_by_instance_state': 'False', 'group_by_instance_type': 'True', 'group_by_key_pair': 'True', 'group_by_platform': 'True', 'group_by_rds_engine': 'True', 'group_by_rds_parameter_group': 'True', 'group_by_region': 'True', 'group_by_route53_names': 'True', 'group_by_security_group': 'True', 'group_by_tag_keys': 'True', 'group_by_tag_none': 'True', 'group_by_vpc_id': 'True', 'hostname_variable': '', 'iam_role': '', 'include_rds_clusters': 'False', 'nested_groups': 'False', 'pattern_exclude': '', 'pattern_include': '', 'rds': 'False', 'regions': 'all', 'regions_exclude': 'us-gov-west-1, cn-north-1', 'replace_dash_in_groups': 'True', 'route53': 'False', 'route53_excluded_zones': '', 'route53_hostnames': '', 'stack_filters': 'False', 'vpc_destination_variable': 'ip_address' } class Ec2Inventory(object): def _empty_inventory(self): return {"_meta": {"hostvars": {}}} def __init__(self): ''' Main execution path ''' # Inventory grouped by instance IDs, tags, security groups, regions, # and availability zones self.inventory = self._empty_inventory() self.aws_account_id = None # Index of hostname (address) to instance ID self.index = {} # Boto profile to use (if any) self.boto_profile = None # AWS credentials. self.credentials = {} # Read settings and parse CLI arguments self.parse_cli_args() self.read_settings() # Make sure that profile_name is not passed at all if not set # as pre 2.24 boto will fall over otherwise if self.boto_profile: if not hasattr(boto.ec2.EC2Connection, 'profile_name'): self.fail_with_error("boto version must be >= 2.24 to use profile") # Cache if self.args.refresh_cache: self.do_api_calls_update_cache() elif not self.is_cache_valid(): self.do_api_calls_update_cache() # Data to print if self.args.host: data_to_print = self.get_host_info() elif self.args.list: # Display list of instances for inventory if self.inventory == self._empty_inventory(): data_to_print = self.get_inventory_from_cache() else: data_to_print = self.json_format_dict(self.inventory, True) print(data_to_print) def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' if os.path.isfile(self.cache_path_cache): mod_time = os.path.getmtime(self.cache_path_cache) current_time = time() if (mod_time + self.cache_max_age) > current_time: if os.path.isfile(self.cache_path_index): return True return False def read_settings(self): ''' Reads the settings from the ec2.ini file ''' scriptbasename = __file__ scriptbasename = os.path.basename(scriptbasename) scriptbasename = scriptbasename.replace('.py', '') defaults = { 'ec2': { 'ini_fallback': os.path.join(os.path.dirname(__file__), 'ec2.ini'), 'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename) } } if six.PY3: config = configparser.ConfigParser(DEFAULTS) else: config = configparser.SafeConfigParser(DEFAULTS) ec2_ini_path = os.environ.get('EC2_INI_PATH', defaults['ec2']['ini_path']) ec2_ini_path = os.path.expanduser(os.path.expandvars(ec2_ini_path)) if not os.path.isfile(ec2_ini_path): ec2_ini_path = os.path.expanduser(defaults['ec2']['ini_fallback']) if os.path.isfile(ec2_ini_path): config.read(ec2_ini_path) # Add empty sections if they don't exist try: config.add_section('ec2') except configparser.DuplicateSectionError: pass try: config.add_section('credentials') except configparser.DuplicateSectionError: pass # is eucalyptus? self.eucalyptus = config.getboolean('ec2', 'eucalyptus') self.eucalyptus_host = config.get('ec2', 'eucalyptus_host') # Regions self.regions = [] config_regions = config.get('ec2', 'regions') if (config_regions == 'all'): if self.eucalyptus_host: self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name, **self.credentials) else: config_regions_exclude = config.get('ec2', 'regions_exclude') for region_info in ec2.regions(): if region_info.name not in config_regions_exclude: self.regions.append(region_info.name) else: self.regions = config_regions.split(",") if 'auto' in self.regions: env_region = os.environ.get('AWS_REGION') if env_region is None: env_region = os.environ.get('AWS_DEFAULT_REGION') self.regions = [env_region] # Destination addresses self.destination_variable = config.get('ec2', 'destination_variable') self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable') self.hostname_variable = config.get('ec2', 'hostname_variable') if config.has_option('ec2', 'destination_format') and \ config.has_option('ec2', 'destination_format_tags'): self.destination_format = config.get('ec2', 'destination_format') self.destination_format_tags = config.get('ec2', 'destination_format_tags').split(',') else: self.destination_format = None self.destination_format_tags = None # Route53 self.route53_enabled = config.getboolean('ec2', 'route53') self.route53_hostnames = config.get('ec2', 'route53_hostnames') self.route53_excluded_zones = [] self.route53_excluded_zones = [a for a in config.get('ec2', 'route53_excluded_zones').split(',') if a] # Include RDS instances? self.rds_enabled = config.getboolean('ec2', 'rds') # Include RDS cluster instances? self.include_rds_clusters = config.getboolean('ec2', 'include_rds_clusters') # Include ElastiCache instances? self.elasticache_enabled = config.getboolean('ec2', 'elasticache') # Return all EC2 instances? self.all_instances = config.getboolean('ec2', 'all_instances') # Instance states to be gathered in inventory. Default is 'running'. # Setting 'all_instances' to 'yes' overrides this option. ec2_valid_instance_states = [ 'pending', 'running', 'shutting-down', 'terminated', 'stopping', 'stopped' ] self.ec2_instance_states = [] if self.all_instances: self.ec2_instance_states = ec2_valid_instance_states elif config.has_option('ec2', 'instance_states'): for instance_state in config.get('ec2', 'instance_states').split(','): instance_state = instance_state.strip() if instance_state not in ec2_valid_instance_states: continue self.ec2_instance_states.append(instance_state) else: self.ec2_instance_states = ['running'] # Return all RDS instances? (if RDS is enabled) self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances') # Return all ElastiCache replication groups? (if ElastiCache is enabled) self.all_elasticache_replication_groups = config.getboolean('ec2', 'all_elasticache_replication_groups') # Return all ElastiCache clusters? (if ElastiCache is enabled) self.all_elasticache_clusters = config.getboolean('ec2', 'all_elasticache_clusters') # Return all ElastiCache nodes? (if ElastiCache is enabled) self.all_elasticache_nodes = config.getboolean('ec2', 'all_elasticache_nodes') # boto configuration profile (prefer CLI argument then environment variables then config file) self.boto_profile = self.args.boto_profile or \ os.environ.get('AWS_PROFILE') or \ config.get('ec2', 'boto_profile') # AWS credentials (prefer environment variables) if not (self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID') or os.environ.get('AWS_PROFILE')): aws_access_key_id = config.get('credentials', 'aws_access_key_id') aws_secret_access_key = config.get('credentials', 'aws_secret_access_key') aws_security_token = config.get('credentials', 'aws_security_token') if aws_access_key_id: self.credentials = { 'aws_access_key_id': aws_access_key_id, 'aws_secret_access_key': aws_secret_access_key } if aws_security_token: self.credentials['security_token'] = aws_security_token # Cache related cache_dir = os.path.expanduser(config.get('ec2', 'cache_path')) if self.boto_profile: cache_dir = os.path.join(cache_dir, 'profile_' + self.boto_profile) if not os.path.exists(cache_dir): os.makedirs(cache_dir) cache_name = 'ansible-ec2' cache_id = self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID', self.credentials.get('aws_access_key_id')) if cache_id: cache_name = '%s-%s' % (cache_name, cache_id) cache_name += '-' + str(abs(hash(__file__)))[1:7] self.cache_path_cache = os.path.join(cache_dir, "%s.cache" % cache_name) self.cache_path_index = os.path.join(cache_dir, "%s.index" % cache_name) self.cache_max_age = config.getint('ec2', 'cache_max_age') self.expand_csv_tags = config.getboolean('ec2', 'expand_csv_tags') # Configure nested groups instead of flat namespace. self.nested_groups = config.getboolean('ec2', 'nested_groups') # Replace dash or not in group names self.replace_dash_in_groups = config.getboolean('ec2', 'replace_dash_in_groups') # IAM role to assume for connection self.iam_role = config.get('ec2', 'iam_role') # Configure which groups should be created. group_by_options = [a for a in DEFAULTS if a.startswith('group_by')] for option in group_by_options: setattr(self, option, config.getboolean('ec2', option)) # Do we need to just include hosts that match a pattern? self.pattern_include = config.get('ec2', 'pattern_include') if self.pattern_include: self.pattern_include = re.compile(self.pattern_include) # Do we need to exclude hosts that match a pattern? self.pattern_exclude = config.get('ec2', 'pattern_exclude') if self.pattern_exclude: self.pattern_exclude = re.compile(self.pattern_exclude) # Do we want to stack multiple filters? self.stack_filters = config.getboolean('ec2', 'stack_filters') # Instance filters (see boto and EC2 API docs). Ignore invalid filters. self.ec2_instance_filters = [] if config.has_option('ec2', 'instance_filters') or 'EC2_INSTANCE_FILTERS' in os.environ: filters = os.getenv('EC2_INSTANCE_FILTERS', config.get('ec2', 'instance_filters') if config.has_option('ec2', 'instance_filters') else '') if self.stack_filters and '&' in filters: self.fail_with_error("AND filters along with stack_filter enabled is not supported.\n") filter_sets = [f for f in filters.split(',') if f] for filter_set in filter_sets: filters = {} filter_set = filter_set.strip() for instance_filter in filter_set.split("&"): instance_filter = instance_filter.strip() if not instance_filter or '=' not in instance_filter: continue filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)] if not filter_key: continue filters[filter_key] = filter_value self.ec2_instance_filters.append(filters.copy()) def parse_cli_args(self): ''' Command line argument processing ''' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2') parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)') parser.add_argument('--host', action='store', help='Get all the variables about a specific instance') parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)') parser.add_argument('--profile', '--boto-profile', action='store', dest='boto_profile', help='Use boto profile for connections to EC2') self.args = parser.parse_args() def do_api_calls_update_cache(self): ''' Do API calls to each region, and save data in cache files ''' if self.route53_enabled: self.get_route53_records() for region in self.regions: self.get_instances_by_region(region) if self.rds_enabled: self.get_rds_instances_by_region(region) if self.elasticache_enabled: self.get_elasticache_clusters_by_region(region) self.get_elasticache_replication_groups_by_region(region) if self.include_rds_clusters: self.include_rds_clusters_by_region(region) self.write_to_cache(self.inventory, self.cache_path_cache) self.write_to_cache(self.index, self.cache_path_index) def connect(self, region): ''' create connection to api server''' if self.eucalyptus: conn = boto.connect_euca(host=self.eucalyptus_host, **self.credentials) conn.APIVersion = '2010-08-31' else: conn = self.connect_to_aws(ec2, region) return conn def boto_fix_security_token_in_profile(self, connect_args): ''' monkey patch for boto issue boto/boto#2100 ''' profile = 'profile ' + self.boto_profile if boto.config.has_option(profile, 'aws_security_token'): connect_args['security_token'] = boto.config.get(profile, 'aws_security_token') return connect_args def connect_to_aws(self, module, region): connect_args = deepcopy(self.credentials) # only pass the profile name if it's set (as it is not supported by older boto versions) if self.boto_profile: connect_args['profile_name'] = self.boto_profile self.boto_fix_security_token_in_profile(connect_args) if self.iam_role: sts_conn = sts.connect_to_region(region, **connect_args) role = sts_conn.assume_role(self.iam_role, 'ansible_dynamic_inventory') connect_args['aws_access_key_id'] = role.credentials.access_key connect_args['aws_secret_access_key'] = role.credentials.secret_key connect_args['security_token'] = role.credentials.session_token conn = module.connect_to_region(region, **connect_args) # connect_to_region will fail "silently" by returning None if the region name is wrong or not supported if conn is None: self.fail_with_error("region name: %s likely not supported, or AWS is down. connection to region failed." % region) return conn def get_instances_by_region(self, region): ''' Makes an AWS EC2 API call to the list of instances in a particular region ''' try: conn = self.connect(region) reservations = [] if self.ec2_instance_filters: if self.stack_filters: filters_dict = {} for filters in self.ec2_instance_filters: filters_dict.update(filters) reservations.extend(conn.get_all_instances(filters=filters_dict)) else: for filters in self.ec2_instance_filters: reservations.extend(conn.get_all_instances(filters=filters)) else: reservations = conn.get_all_instances() # Pull the tags back in a second step # AWS are on record as saying that the tags fetched in the first `get_all_instances` request are not # reliable and may be missing, and the only way to guarantee they are there is by calling `get_all_tags` instance_ids = [] for reservation in reservations: instance_ids.extend([instance.id for instance in reservation.instances]) max_filter_value = 199 tags = [] for i in range(0, len(instance_ids), max_filter_value): tags.extend(conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids[i:i + max_filter_value]})) tags_by_instance_id = defaultdict(dict) for tag in tags: tags_by_instance_id[tag.res_id][tag.name] = tag.value if (not self.aws_account_id) and reservations: self.aws_account_id = reservations[0].owner_id for reservation in reservations: for instance in reservation.instances: instance.tags = tags_by_instance_id[instance.id] self.add_instance(instance, region) except boto.exception.BotoServerError as e: if e.error_code == 'AuthFailure': error = self.get_auth_error_message() else: backend = 'Eucalyptus' if self.eucalyptus else 'AWS' error = "Error connecting to %s backend.\n%s" % (backend, e.message) self.fail_with_error(error, 'getting EC2 instances') def tags_match_filters(self, tags): ''' return True if given tags match configured filters ''' if not self.ec2_instance_filters: return True for filters in self.ec2_instance_filters: for filter_name, filter_value in filters.items(): if filter_name[:4] != 'tag:': continue filter_name = filter_name[4:] if filter_name not in tags: if self.stack_filters: return False continue if isinstance(filter_value, list): if self.stack_filters and tags[filter_name] not in filter_value: return False if not self.stack_filters and tags[filter_name] in filter_value: return True if isinstance(filter_value, six.string_types): if self.stack_filters and tags[filter_name] != filter_value: return False if not self.stack_filters and tags[filter_name] == filter_value: return True return self.stack_filters def get_rds_instances_by_region(self, region): ''' Makes an AWS API call to the list of RDS instances in a particular region ''' if not HAS_BOTO3: self.fail_with_error("Working with RDS instances requires boto3 - please install boto3 and try again", "getting RDS instances") client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials) db_instances = client.describe_db_instances() try: conn = self.connect_to_aws(rds, region) if conn: marker = None while True: instances = conn.get_all_dbinstances(marker=marker) marker = instances.marker for index, instance in enumerate(instances): # Add tags to instances. instance.arn = db_instances['DBInstances'][index]['DBInstanceArn'] tags = client.list_tags_for_resource(ResourceName=instance.arn)['TagList'] instance.tags = {} for tag in tags: instance.tags[tag['Key']] = tag['Value'] if self.tags_match_filters(instance.tags): self.add_rds_instance(instance, region) if not marker: break except boto.exception.BotoServerError as e: error = e.reason if e.error_code == 'AuthFailure': error = self.get_auth_error_message() elif e.error_code == "OptInRequired": error = "RDS hasn't been enabled for this account yet. " \ "You must either log in to the RDS service through the AWS console to enable it, " \ "or set 'rds = False' in ec2.ini" elif not e.reason == "Forbidden": error = "Looks like AWS RDS is down:\n%s" % e.message self.fail_with_error(error, 'getting RDS instances') def include_rds_clusters_by_region(self, region): if not HAS_BOTO3: self.fail_with_error("Working with RDS clusters requires boto3 - please install boto3 and try again", "getting RDS clusters") client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials) marker, clusters = '', [] while marker is not None: resp = client.describe_db_clusters(Marker=marker) clusters.extend(resp["DBClusters"]) marker = resp.get('Marker', None) account_id = boto.connect_iam().get_user().arn.split(':')[4] c_dict = {} for c in clusters: # remove these datetime objects as there is no serialisation to json # currently in place and we don't need the data yet if 'EarliestRestorableTime' in c: del c['EarliestRestorableTime'] if 'LatestRestorableTime' in c: del c['LatestRestorableTime'] if not self.ec2_instance_filters: matches_filter = True else: matches_filter = False try: # arn:aws:rds:<region>:<account number>:<resourcetype>:<name> tags = client.list_tags_for_resource( ResourceName='arn:aws:rds:' + region + ':' + account_id + ':cluster:' + c['DBClusterIdentifier']) c['Tags'] = tags['TagList'] if self.ec2_instance_filters: for filters in self.ec2_instance_filters: for filter_key, filter_values in filters.items(): # get AWS tag key e.g. tag:env will be 'env' tag_name = filter_key.split(":", 1)[1] # Filter values is a list (if you put multiple values for the same tag name) matches_filter = any(d['Key'] == tag_name and d['Value'] in filter_values for d in c['Tags']) if matches_filter: # it matches a filter, so stop looking for further matches break if matches_filter: break except Exception as e: if e.message.find('DBInstanceNotFound') >= 0: # AWS RDS bug (2016-01-06) means deletion does not fully complete and leave an 'empty' cluster. # Ignore errors when trying to find tags for these pass # ignore empty clusters caused by AWS bug if len(c['DBClusterMembers']) == 0: continue elif matches_filter: c_dict[c['DBClusterIdentifier']] = c self.inventory['db_clusters'] = c_dict def get_elasticache_clusters_by_region(self, region): ''' Makes an AWS API call to the list of ElastiCache clusters (with nodes' info) in a particular region.''' # ElastiCache boto module doesn't provide a get_all_instances method, # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) clusters = [] try: conn = self.connect_to_aws(elasticache, region) if conn: # show_cache_node_info = True # because we also want nodes' information _marker = 1 while _marker: if _marker == 1: _marker = None response = conn.describe_cache_clusters(None, None, _marker, True) _marker = response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['Marker'] try: # Boto also doesn't provide wrapper classes to CacheClusters or # CacheNodes. Because of that we can't make use of the get_list # method in the AWSQueryConnection. Let's do the work manually clusters = clusters + response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters'] except KeyError as e: error = "ElastiCache query to AWS failed (unexpected format)." self.fail_with_error(error, 'getting ElastiCache clusters') except boto.exception.BotoServerError as e: error = e.reason if e.error_code == 'AuthFailure': error = self.get_auth_error_message() elif e.error_code == "OptInRequired": error = "ElastiCache hasn't been enabled for this account yet. " \ "You must either log in to the ElastiCache service through the AWS console to enable it, " \ "or set 'elasticache = False' in ec2.ini" elif not e.reason == "Forbidden": error = "Looks like AWS ElastiCache is down:\n%s" % e.message self.fail_with_error(error, 'getting ElastiCache clusters') for cluster in clusters: self.add_elasticache_cluster(cluster, region) def get_elasticache_replication_groups_by_region(self, region): ''' Makes an AWS API call to the list of ElastiCache replication groups in a particular region.''' # ElastiCache boto module doesn't provide a get_all_instances method, # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) try: conn = self.connect_to_aws(elasticache, region) if conn: response = conn.describe_replication_groups() except boto.exception.BotoServerError as e: error = e.reason if e.error_code == 'AuthFailure': error = self.get_auth_error_message() if not e.reason == "Forbidden": error = "Looks like AWS ElastiCache [Replication Groups] is down:\n%s" % e.message self.fail_with_error(error, 'getting ElastiCache clusters') try: # Boto also doesn't provide wrapper classes to ReplicationGroups # Because of that we can't make use of the get_list method in the # AWSQueryConnection. Let's do the work manually replication_groups = response['DescribeReplicationGroupsResponse']['DescribeReplicationGroupsResult']['ReplicationGroups'] except KeyError as e: error = "ElastiCache [Replication Groups] query to AWS failed (unexpected format)." self.fail_with_error(error, 'getting ElastiCache clusters') for replication_group in replication_groups: self.add_elasticache_replication_group(replication_group, region) def get_auth_error_message(self): ''' create an informative error message if there is an issue authenticating''' errors = ["Authentication error retrieving ec2 inventory."] if None in [os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')]: errors.append(' - No AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment vars found') else: errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct') boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials'] boto_config_found = [p for p in boto_paths if os.path.isfile(os.path.expanduser(p))] if len(boto_config_found) > 0: errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found)) else: errors.append(" - No Boto config found at any expected location '%s'" % ', '.join(boto_paths)) return '\n'.join(errors) def fail_with_error(self, err_msg, err_operation=None): '''log an error to std err for ansible-playbook to consume and exit''' if err_operation: err_msg = 'ERROR: "{err_msg}", while: {err_operation}'.format( err_msg=err_msg, err_operation=err_operation) sys.stderr.write(err_msg) sys.exit(1) def get_instance(self, region, instance_id): conn = self.connect(region) reservations = conn.get_all_instances([instance_id]) for reservation in reservations: for instance in reservation.instances: return instance def add_instance(self, instance, region): ''' Adds an instance to the inventory and index, as long as it is addressable ''' # Only return instances with desired instance states if instance.state not in self.ec2_instance_states: return # Select the best destination address # When destination_format and destination_format_tags are specified # the following code will attempt to find the instance tags first, # then the instance attributes next, and finally if neither are found # assign nil for the desired destination format attribute. if self.destination_format and self.destination_format_tags: dest_vars = [] inst_tags = getattr(instance, 'tags') for tag in self.destination_format_tags: if tag in inst_tags: dest_vars.append(inst_tags[tag]) elif hasattr(instance, tag): dest_vars.append(getattr(instance, tag)) else: dest_vars.append('nil') dest = self.destination_format.format(*dest_vars) elif instance.subnet_id: dest = getattr(instance, self.vpc_destination_variable, None) if dest is None: dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None) else: dest = getattr(instance, self.destination_variable, None) if dest is None: dest = getattr(instance, 'tags').get(self.destination_variable, None) if not dest: # Skip instances we cannot address (e.g. private VPC subnet) return # Set the inventory name hostname = None if self.hostname_variable: if self.hostname_variable.startswith('tag_'): hostname = instance.tags.get(self.hostname_variable[4:], None) else: hostname = getattr(instance, self.hostname_variable) # set the hostname from route53 if self.route53_enabled and self.route53_hostnames: route53_names = self.get_instance_route53_names(instance) for name in route53_names: if name.endswith(self.route53_hostnames): hostname = name # If we can't get a nice hostname, use the destination address if not hostname: hostname = dest # to_safe strips hostname characters like dots, so don't strip route53 hostnames elif self.route53_enabled and self.route53_hostnames and hostname.endswith(self.route53_hostnames): hostname = hostname.lower() else: hostname = self.to_safe(hostname).lower() # if we only want to include hosts that match a pattern, skip those that don't if self.pattern_include and not self.pattern_include.match(hostname): return # if we need to exclude hosts that match a pattern, skip those if self.pattern_exclude and self.pattern_exclude.match(hostname): return # Add to index self.index[hostname] = [region, instance.id] # Inventory: Group by instance ID (always a group of 1) if self.group_by_instance_id: self.inventory[instance.id] = [hostname] if self.nested_groups: self.push_group(self.inventory, 'instances', instance.id) # Inventory: Group by region if self.group_by_region: self.push(self.inventory, region, hostname) if self.nested_groups: self.push_group(self.inventory, 'regions', region) # Inventory: Group by availability zone if self.group_by_availability_zone: self.push(self.inventory, instance.placement, hostname) if self.nested_groups: if self.group_by_region: self.push_group(self.inventory, region, instance.placement) self.push_group(self.inventory, 'zones', instance.placement) # Inventory: Group by Amazon Machine Image (AMI) ID if self.group_by_ami_id: ami_id = self.to_safe(instance.image_id) self.push(self.inventory, ami_id, hostname) if self.nested_groups: self.push_group(self.inventory, 'images', ami_id) # Inventory: Group by instance type if self.group_by_instance_type: type_name = self.to_safe('type_' + instance.instance_type) self.push(self.inventory, type_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'types', type_name) # Inventory: Group by instance state if self.group_by_instance_state: state_name = self.to_safe('instance_state_' + instance.state) self.push(self.inventory, state_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'instance_states', state_name) # Inventory: Group by platform if self.group_by_platform: if instance.platform: platform = self.to_safe('platform_' + instance.platform) else: platform = self.to_safe('platform_undefined') self.push(self.inventory, platform, hostname) if self.nested_groups: self.push_group(self.inventory, 'platforms', platform) # Inventory: Group by key pair if self.group_by_key_pair and instance.key_name: key_name = self.to_safe('key_' + instance.key_name) self.push(self.inventory, key_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'keys', key_name) # Inventory: Group by VPC if self.group_by_vpc_id and instance.vpc_id: vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id) self.push(self.inventory, vpc_id_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'vpcs', vpc_id_name) # Inventory: Group by security group if self.group_by_security_group: try: for group in instance.groups: key = self.to_safe("security_group_" + group.name) self.push(self.inventory, key, hostname) if self.nested_groups: self.push_group(self.inventory, 'security_groups', key) except AttributeError: self.fail_with_error('\n'.join(['Package boto seems a bit older.', 'Please upgrade boto >= 2.3.0.'])) # Inventory: Group by AWS account ID if self.group_by_aws_account: self.push(self.inventory, self.aws_account_id, hostname) if self.nested_groups: self.push_group(self.inventory, 'accounts', self.aws_account_id) # Inventory: Group by tag keys if self.group_by_tag_keys: for k, v in instance.tags.items(): if self.expand_csv_tags and v and ',' in v: values = map(lambda x: x.strip(), v.split(',')) else: values = [v] for v in values: if v: key = self.to_safe("tag_" + k + "=" + v) else: key = self.to_safe("tag_" + k) self.push(self.inventory, key, hostname) if self.nested_groups: self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k)) if v: self.push_group(self.inventory, self.to_safe("tag_" + k), key) # Inventory: Group by Route53 domain names if enabled if self.route53_enabled and self.group_by_route53_names: route53_names = self.get_instance_route53_names(instance) for name in route53_names: self.push(self.inventory, name, hostname) if self.nested_groups: self.push_group(self.inventory, 'route53', name) # Global Tag: instances without tags if self.group_by_tag_none and len(instance.tags) == 0: self.push(self.inventory, 'tag_none', hostname) if self.nested_groups: self.push_group(self.inventory, 'tags', 'tag_none') # Global Tag: tag all EC2 instances self.push(self.inventory, 'ec2', hostname) self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance) self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest def add_rds_instance(self, instance, region): ''' Adds an RDS instance to the inventory and index, as long as it is addressable ''' # Only want available instances unless all_rds_instances is True if not self.all_rds_instances and instance.status != 'available': return # Select the best destination address dest = instance.endpoint[0] if not dest: # Skip instances we cannot address (e.g. private VPC subnet) return # Set the inventory name hostname = None if self.hostname_variable: if self.hostname_variable.startswith('tag_'): hostname = instance.tags.get(self.hostname_variable[4:], None) else: hostname = getattr(instance, self.hostname_variable) # If we can't get a nice hostname, use the destination address if not hostname: hostname = dest hostname = self.to_safe(hostname).lower() # Add to index self.index[hostname] = [region, instance.id] # Inventory: Group by instance ID (always a group of 1) if self.group_by_instance_id: self.inventory[instance.id] = [hostname] if self.nested_groups: self.push_group(self.inventory, 'instances', instance.id) # Inventory: Group by region if self.group_by_region: self.push(self.inventory, region, hostname) if self.nested_groups: self.push_group(self.inventory, 'regions', region) # Inventory: Group by availability zone if self.group_by_availability_zone: self.push(self.inventory, instance.availability_zone, hostname) if self.nested_groups: if self.group_by_region: self.push_group(self.inventory, region, instance.availability_zone) self.push_group(self.inventory, 'zones', instance.availability_zone) # Inventory: Group by instance type if self.group_by_instance_type: type_name = self.to_safe('type_' + instance.instance_class) self.push(self.inventory, type_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'types', type_name) # Inventory: Group by VPC if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id: vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id) self.push(self.inventory, vpc_id_name, hostname) if self.nested_groups: self.push_group(self.inventory, 'vpcs', vpc_id_name) # Inventory: Group by security group if self.group_by_security_group: try: if instance.security_group: key = self.to_safe("security_group_" + instance.security_group.name) self.push(self.inventory, key, hostname) if self.nested_groups: self.push_group(self.inventory, 'security_groups', key) except AttributeError: self.fail_with_error('\n'.join(['Package boto seems a bit older.', 'Please upgrade boto >= 2.3.0.'])) # Inventory: Group by tag keys if self.group_by_tag_keys: for k, v in instance.tags.items(): if self.expand_csv_tags and v and ',' in v: values = map(lambda x: x.strip(), v.split(',')) else: values = [v] for v in values: if v: key = self.to_safe("tag_" + k + "=" + v) else: key = self.to_safe("tag_" + k) self.push(self.inventory, key, hostname) if self.nested_groups: self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k)) if v: self.push_group(self.inventory, self.to_safe("tag_" + k), key) # Inventory: Group by engine if self.group_by_rds_engine: self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname) if self.nested_groups: self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine)) # Inventory: Group by parameter group if self.group_by_rds_parameter_group: self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), hostname) if self.nested_groups: self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name)) # Global Tag: instances without tags if self.group_by_tag_none and len(instance.tags) == 0: self.push(self.inventory, 'tag_none', hostname) if self.nested_groups: self.push_group(self.inventory, 'tags', 'tag_none') # Global Tag: all RDS instances self.push(self.inventory, 'rds', hostname) self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance) self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest def add_elasticache_cluster(self, cluster, region): ''' Adds an ElastiCache cluster to the inventory and index, as long as it's nodes are addressable ''' # Only want available clusters unless all_elasticache_clusters is True if not self.all_elasticache_clusters and cluster['CacheClusterStatus'] != 'available': return # Select the best destination address if 'ConfigurationEndpoint' in cluster and cluster['ConfigurationEndpoint']: # Memcached cluster dest = cluster['ConfigurationEndpoint']['Address'] is_redis = False else: # Redis sigle node cluster # Because all Redis clusters are single nodes, we'll merge the # info from the cluster with info about the node dest = cluster['CacheNodes'][0]['Endpoint']['Address'] is_redis = True if not dest: # Skip clusters we cannot address (e.g. private VPC subnet) return # Add to index self.index[dest] = [region, cluster['CacheClusterId']] # Inventory: Group by instance ID (always a group of 1) if self.group_by_instance_id: self.inventory[cluster['CacheClusterId']] = [dest] if self.nested_groups: self.push_group(self.inventory, 'instances', cluster['CacheClusterId']) # Inventory: Group by region if self.group_by_region and not is_redis: self.push(self.inventory, region, dest) if self.nested_groups: self.push_group(self.inventory, 'regions', region) # Inventory: Group by availability zone if self.group_by_availability_zone and not is_redis: self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest) if self.nested_groups: if self.group_by_region: self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone']) self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone']) # Inventory: Group by node type if self.group_by_instance_type and not is_redis: type_name = self.to_safe('type_' + cluster['CacheNodeType']) self.push(self.inventory, type_name, dest) if self.nested_groups: self.push_group(self.inventory, 'types', type_name) # Inventory: Group by VPC (information not available in the current # AWS API version for ElastiCache) # Inventory: Group by security group if self.group_by_security_group and not is_redis: # Check for the existence of the 'SecurityGroups' key and also if # this key has some value. When the cluster is not placed in a SG # the query can return None here and cause an error. if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None: for security_group in cluster['SecurityGroups']: key = self.to_safe("security_group_" + security_group['SecurityGroupId']) self.push(self.inventory, key, dest) if self.nested_groups: self.push_group(self.inventory, 'security_groups', key) # Inventory: Group by engine if self.group_by_elasticache_engine and not is_redis: self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest) if self.nested_groups: self.push_group(self.inventory, 'elasticache_engines', self.to_safe(cluster['Engine'])) # Inventory: Group by parameter group if self.group_by_elasticache_parameter_group: self.push(self.inventory, self.to_safe("elasticache_parameter_group_" + cluster['CacheParameterGroup']['CacheParameterGroupName']), dest) if self.nested_groups: self.push_group(self.inventory, 'elasticache_parameter_groups', self.to_safe(cluster['CacheParameterGroup']['CacheParameterGroupName'])) # Inventory: Group by replication group if self.group_by_elasticache_replication_group and 'ReplicationGroupId' in cluster and cluster['ReplicationGroupId']: self.push(self.inventory, self.to_safe("elasticache_replication_group_" + cluster['ReplicationGroupId']), dest) if self.nested_groups: self.push_group(self.inventory, 'elasticache_replication_groups', self.to_safe(cluster['ReplicationGroupId'])) # Global Tag: all ElastiCache clusters self.push(self.inventory, 'elasticache_clusters', cluster['CacheClusterId']) host_info = self.get_host_info_dict_from_describe_dict(cluster) self.inventory["_meta"]["hostvars"][dest] = host_info # Add the nodes for node in cluster['CacheNodes']: self.add_elasticache_node(node, cluster, region) def add_elasticache_node(self, node, cluster, region): ''' Adds an ElastiCache node to the inventory and index, as long as it is addressable ''' # Only want available nodes unless all_elasticache_nodes is True if not self.all_elasticache_nodes and node['CacheNodeStatus'] != 'available': return # Select the best destination address dest = node['Endpoint']['Address'] if not dest: # Skip nodes we cannot address (e.g. private VPC subnet) return node_id = self.to_safe(cluster['CacheClusterId'] + '_' + node['CacheNodeId']) # Add to index self.index[dest] = [region, node_id] # Inventory: Group by node ID (always a group of 1) if self.group_by_instance_id: self.inventory[node_id] = [dest] if self.nested_groups: self.push_group(self.inventory, 'instances', node_id) # Inventory: Group by region if self.group_by_region: self.push(self.inventory, region, dest) if self.nested_groups: self.push_group(self.inventory, 'regions', region) # Inventory: Group by availability zone if self.group_by_availability_zone: self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest) if self.nested_groups: if self.group_by_region: self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone']) self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone']) # Inventory: Group by node type if self.group_by_instance_type: type_name = self.to_safe('type_' + cluster['CacheNodeType']) self.push(self.inventory, type_name, dest) if self.nested_groups: self.push_group(self.inventory, 'types', type_name) # Inventory: Group by VPC (information not available in the current # AWS API version for ElastiCache) # Inventory: Group by security group if self.group_by_security_group: # Check for the existence of the 'SecurityGroups' key and also if # this key has some value. When the cluster is not placed in a SG # the query can return None here and cause an error. if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None: for security_group in cluster['SecurityGroups']: key = self.to_safe("security_group_" + security_group['SecurityGroupId']) self.push(self.inventory, key, dest) if self.nested_groups: self.push_group(self.inventory, 'security_groups', key) # Inventory: Group by engine if self.group_by_elasticache_engine: self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest) if self.nested_groups: self.push_group(self.inventory, 'elasticache_engines', self.to_safe("elasticache_" + cluster['Engine'])) # Inventory: Group by parameter group (done at cluster level) # Inventory: Group by replication group (done at cluster level) # Inventory: Group by ElastiCache Cluster if self.group_by_elasticache_cluster: self.push(self.inventory, self.to_safe("elasticache_cluster_" + cluster['CacheClusterId']), dest) # Global Tag: all ElastiCache nodes self.push(self.inventory, 'elasticache_nodes', dest) host_info = self.get_host_info_dict_from_describe_dict(node) if dest in self.inventory["_meta"]["hostvars"]: self.inventory["_meta"]["hostvars"][dest].update(host_info) else: self.inventory["_meta"]["hostvars"][dest] = host_info def add_elasticache_replication_group(self, replication_group, region): ''' Adds an ElastiCache replication group to the inventory and index ''' # Only want available clusters unless all_elasticache_replication_groups is True if not self.all_elasticache_replication_groups and replication_group['Status'] != 'available': return # Skip clusters we cannot address (e.g. private VPC subnet or clustered redis) if replication_group['NodeGroups'][0]['PrimaryEndpoint'] is None or \ replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address'] is None: return # Select the best destination address (PrimaryEndpoint) dest = replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address'] # Add to index self.index[dest] = [region, replication_group['ReplicationGroupId']] # Inventory: Group by ID (always a group of 1) if self.group_by_instance_id: self.inventory[replication_group['ReplicationGroupId']] = [dest] if self.nested_groups: self.push_group(self.inventory, 'instances', replication_group['ReplicationGroupId']) # Inventory: Group by region if self.group_by_region: self.push(self.inventory, region, dest) if self.nested_groups: self.push_group(self.inventory, 'regions', region) # Inventory: Group by availability zone (doesn't apply to replication groups) # Inventory: Group by node type (doesn't apply to replication groups) # Inventory: Group by VPC (information not available in the current # AWS API version for replication groups # Inventory: Group by security group (doesn't apply to replication groups) # Check this value in cluster level # Inventory: Group by engine (replication groups are always Redis) if self.group_by_elasticache_engine: self.push(self.inventory, 'elasticache_redis', dest) if self.nested_groups: self.push_group(self.inventory, 'elasticache_engines', 'redis') # Global Tag: all ElastiCache clusters self.push(self.inventory, 'elasticache_replication_groups', replication_group['ReplicationGroupId']) host_info = self.get_host_info_dict_from_describe_dict(replication_group) self.inventory["_meta"]["hostvars"][dest] = host_info def get_route53_records(self): ''' Get and store the map of resource records to domain names that point to them. ''' if self.boto_profile: r53_conn = route53.Route53Connection(profile_name=self.boto_profile) else: r53_conn = route53.Route53Connection() all_zones = r53_conn.get_zones() route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones] self.route53_records = {} for zone in route53_zones: rrsets = r53_conn.get_all_rrsets(zone.id) for record_set in rrsets: record_name = record_set.name if record_name.endswith('.'): record_name = record_name[:-1] for resource in record_set.resource_records: self.route53_records.setdefault(resource, set()) self.route53_records[resource].add(record_name) def get_instance_route53_names(self, instance): ''' Check if an instance is referenced in the records we have from Route53. If it is, return the list of domain names pointing to said instance. If nothing points to it, return an empty list. ''' instance_attributes = ['public_dns_name', 'private_dns_name', 'ip_address', 'private_ip_address'] name_list = set() for attrib in instance_attributes: try: value = getattr(instance, attrib) except AttributeError: continue if value in self.route53_records: name_list.update(self.route53_records[value]) return list(name_list) def get_host_info_dict_from_instance(self, instance): instance_vars = {} for key in vars(instance): value = getattr(instance, key) key = self.to_safe('ec2_' + key) # Handle complex types # state/previous_state changed to properties in boto in https://github.com/boto/boto/commit/a23c379837f698212252720d2af8dec0325c9518 if key == 'ec2__state': instance_vars['ec2_state'] = instance.state or '' instance_vars['ec2_state_code'] = instance.state_code elif key == 'ec2__previous_state': instance_vars['ec2_previous_state'] = instance.previous_state or '' instance_vars['ec2_previous_state_code'] = instance.previous_state_code elif isinstance(value, (int, bool)): instance_vars[key] = value elif isinstance(value, six.string_types): instance_vars[key] = value.strip() elif value is None: instance_vars[key] = '' elif key == 'ec2_region': instance_vars[key] = value.name elif key == 'ec2__placement': instance_vars['ec2_placement'] = value.zone elif key == 'ec2_tags': for k, v in value.items(): if self.expand_csv_tags and ',' in v: v = list(map(lambda x: x.strip(), v.split(','))) key = self.to_safe('ec2_tag_' + k) instance_vars[key] = v elif key == 'ec2_groups': group_ids = [] group_names = [] for group in value: group_ids.append(group.id) group_names.append(group.name) instance_vars["ec2_security_group_ids"] = ','.join([str(i) for i in group_ids]) instance_vars["ec2_security_group_names"] = ','.join([str(i) for i in group_names]) elif key == 'ec2_block_device_mapping': instance_vars["ec2_block_devices"] = {} for k, v in value.items(): instance_vars["ec2_block_devices"][os.path.basename(k)] = v.volume_id else: pass # TODO Product codes if someone finds them useful # print key # print type(value) # print value instance_vars[self.to_safe('ec2_account_id')] = self.aws_account_id return instance_vars def get_host_info_dict_from_describe_dict(self, describe_dict): ''' Parses the dictionary returned by the API call into a flat list of parameters. This method should be used only when 'describe' is used directly because Boto doesn't provide specific classes. ''' # I really don't agree with prefixing everything with 'ec2' # because EC2, RDS and ElastiCache are different services. # I'm just following the pattern used until now to not break any # compatibility. host_info = {} for key in describe_dict: value = describe_dict[key] key = self.to_safe('ec2_' + self.uncammelize(key)) # Handle complex types # Target: Memcached Cache Clusters if key == 'ec2_configuration_endpoint' and value: host_info['ec2_configuration_endpoint_address'] = value['Address'] host_info['ec2_configuration_endpoint_port'] = value['Port'] # Target: Cache Nodes and Redis Cache Clusters (single node) if key == 'ec2_endpoint' and value: host_info['ec2_endpoint_address'] = value['Address'] host_info['ec2_endpoint_port'] = value['Port'] # Target: Redis Replication Groups if key == 'ec2_node_groups' and value: host_info['ec2_endpoint_address'] = value[0]['PrimaryEndpoint']['Address'] host_info['ec2_endpoint_port'] = value[0]['PrimaryEndpoint']['Port'] replica_count = 0 for node in value[0]['NodeGroupMembers']: if node['CurrentRole'] == 'primary': host_info['ec2_primary_cluster_address'] = node['ReadEndpoint']['Address'] host_info['ec2_primary_cluster_port'] = node['ReadEndpoint']['Port'] host_info['ec2_primary_cluster_id'] = node['CacheClusterId'] elif node['CurrentRole'] == 'replica': host_info['ec2_replica_cluster_address_' + str(replica_count)] = node['ReadEndpoint']['Address'] host_info['ec2_replica_cluster_port_' + str(replica_count)] = node['ReadEndpoint']['Port'] host_info['ec2_replica_cluster_id_' + str(replica_count)] = node['CacheClusterId'] replica_count += 1 # Target: Redis Replication Groups if key == 'ec2_member_clusters' and value: host_info['ec2_member_clusters'] = ','.join([str(i) for i in value]) # Target: All Cache Clusters elif key == 'ec2_cache_parameter_group': host_info["ec2_cache_node_ids_to_reboot"] = ','.join([str(i) for i in value['CacheNodeIdsToReboot']]) host_info['ec2_cache_parameter_group_name'] = value['CacheParameterGroupName'] host_info['ec2_cache_parameter_apply_status'] = value['ParameterApplyStatus'] # Target: Almost everything elif key == 'ec2_security_groups': # Skip if SecurityGroups is None # (it is possible to have the key defined but no value in it). if value is not None: sg_ids = [] for sg in value: sg_ids.append(sg['SecurityGroupId']) host_info["ec2_security_group_ids"] = ','.join([str(i) for i in sg_ids]) # Target: Everything # Preserve booleans and integers elif isinstance(value, (int, bool)): host_info[key] = value # Target: Everything # Sanitize string values elif isinstance(value, six.string_types): host_info[key] = value.strip() # Target: Everything # Replace None by an empty string elif value is None: host_info[key] = '' else: # Remove non-processed complex types pass return host_info def get_host_info(self): ''' Get variables about a specific host ''' if len(self.index) == 0: # Need to load index from cache self.load_index_from_cache() if self.args.host not in self.index: # try updating the cache self.do_api_calls_update_cache() if self.args.host not in self.index: # host might not exist anymore return self.json_format_dict({}, True) (region, instance_id) = self.index[self.args.host] instance = self.get_instance(region, instance_id) return self.json_format_dict(self.get_host_info_dict_from_instance(instance), True) def push(self, my_dict, key, element): ''' Push an element onto an array that may not have been defined in the dict ''' group_info = my_dict.setdefault(key, []) if isinstance(group_info, dict): host_list = group_info.setdefault('hosts', []) host_list.append(element) else: group_info.append(element) def push_group(self, my_dict, key, element): ''' Push a group as a child of another group. ''' parent_group = my_dict.setdefault(key, {}) if not isinstance(parent_group, dict): parent_group = my_dict[key] = {'hosts': parent_group} child_groups = parent_group.setdefault('children', []) if element not in child_groups: child_groups.append(element) def get_inventory_from_cache(self): ''' Reads the inventory from the cache file and returns it as a JSON object ''' with open(self.cache_path_cache, 'r') as f: json_inventory = f.read() return json_inventory def load_index_from_cache(self): ''' Reads the index from the cache file sets self.index ''' with open(self.cache_path_index, 'rb') as f: self.index = json.load(f) def write_to_cache(self, data, filename): ''' Writes data in JSON format to a file ''' json_data = self.json_format_dict(data, True) with open(filename, 'w') as f: f.write(json_data) def uncammelize(self, key): temp = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', key) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', temp).lower() def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' regex = r"[^A-Za-z0-9\_" if not self.replace_dash_in_groups: regex += r"\-" return re.sub(regex + "]", "_", word) def json_format_dict(self, data, pretty=False): ''' Converts a dict to a JSON object and dumps it as a formatted string ''' if pretty: return json.dumps(data, sort_keys=True, indent=2) else: return json.dumps(data) if __name__ == '__main__': # Run the script Ec2Inventory()
mit
bennyhat/me-benbrewer-academic-thesis-ns-3
src/propagation/bindings/modulegen__gcc_LP64.py
10
158617
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers import pybindgen.settings import warnings class ErrorHandler(pybindgen.settings.ErrorHandler): def handle_error(self, wrapper, exception, traceback_): warnings.warn("exception %r in wrapper %s" % (exception, wrapper)) return True pybindgen.settings.error_handler = ErrorHandler() import sys def module_init(): root_module = Module('ns.propagation', cpp_namespace='::ns3') return root_module def register_types(module): root_module = module.get_root() ## propagation-environment.h (module 'propagation'): ns3::CitySize [enumeration] module.add_enum('CitySize', ['SmallCity', 'MediumCity', 'LargeCity']) ## propagation-environment.h (module 'propagation'): ns3::EnvironmentType [enumeration] module.add_enum('EnvironmentType', ['UrbanEnvironment', 'SubUrbanEnvironment', 'OpenAreasEnvironment']) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class] module.add_class('AttributeConstructionList', import_from_module='ns.core') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct] module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList']) ## callback.h (module 'core'): ns3::CallbackBase [class] module.add_class('CallbackBase', import_from_module='ns.core') ## object-base.h (module 'core'): ns3::ObjectBase [class] module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core') ## object.h (module 'core'): ns3::ObjectDeleter [struct] module.add_class('ObjectDeleter', import_from_module='ns.core') ## propagation-cache.h (module 'propagation'): ns3::PropagationCache<ns3::JakesProcess> [class] module.add_class('PropagationCache', template_parameters=['ns3::JakesProcess']) ## random-variable.h (module 'core'): ns3::RandomVariable [class] module.add_class('RandomVariable', import_from_module='ns.core') ## random-variable.h (module 'core'): ns3::SeedManager [class] module.add_class('SeedManager', import_from_module='ns.core') ## random-variable.h (module 'core'): ns3::SequentialVariable [class] module.add_class('SequentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## random-variable.h (module 'core'): ns3::TriangularVariable [class] module.add_class('TriangularVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## type-id.h (module 'core'): ns3::TypeId [class] module.add_class('TypeId', import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration] module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct] module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct] module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## random-variable.h (module 'core'): ns3::UniformVariable [class] module.add_class('UniformVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## vector.h (module 'core'): ns3::Vector2D [class] module.add_class('Vector2D', import_from_module='ns.core') ## vector.h (module 'core'): ns3::Vector3D [class] module.add_class('Vector3D', import_from_module='ns.core') ## random-variable.h (module 'core'): ns3::WeibullVariable [class] module.add_class('WeibullVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::ZetaVariable [class] module.add_class('ZetaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::ZipfVariable [class] module.add_class('ZipfVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## empty.h (module 'core'): ns3::empty [class] module.add_class('empty', import_from_module='ns.core') ## int64x64-double.h (module 'core'): ns3::int64x64_t [class] module.add_class('int64x64_t', import_from_module='ns.core') ## random-variable.h (module 'core'): ns3::ConstantVariable [class] module.add_class('ConstantVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::DeterministicVariable [class] module.add_class('DeterministicVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::EmpiricalVariable [class] module.add_class('EmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::ErlangVariable [class] module.add_class('ErlangVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::ExponentialVariable [class] module.add_class('ExponentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::GammaVariable [class] module.add_class('GammaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::IntEmpiricalVariable [class] module.add_class('IntEmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::EmpiricalVariable']) ## random-variable.h (module 'core'): ns3::LogNormalVariable [class] module.add_class('LogNormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## random-variable.h (module 'core'): ns3::NormalVariable [class] module.add_class('NormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## object.h (module 'core'): ns3::Object [class] module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) ## object.h (module 'core'): ns3::Object::AggregateIterator [class] module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object']) ## random-variable.h (module 'core'): ns3::ParetoVariable [class] module.add_class('ParetoVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable']) ## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel [class] module.add_class('PropagationDelayModel', parent=root_module['ns3::Object']) ## propagation-loss-model.h (module 'propagation'): ns3::PropagationLossModel [class] module.add_class('PropagationLossModel', parent=root_module['ns3::Object']) ## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel [class] module.add_class('RandomPropagationDelayModel', parent=root_module['ns3::PropagationDelayModel']) ## propagation-loss-model.h (module 'propagation'): ns3::RandomPropagationLossModel [class] module.add_class('RandomPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## propagation-loss-model.h (module 'propagation'): ns3::RangePropagationLossModel [class] module.add_class('RangePropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## propagation-loss-model.h (module 'propagation'): ns3::ThreeLogDistancePropagationLossModel [class] module.add_class('ThreeLogDistancePropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## nstime.h (module 'core'): ns3::Time [class] module.add_class('Time', import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time::Unit [enumeration] module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time [class] root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class] module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) ## propagation-loss-model.h (module 'propagation'): ns3::TwoRayGroundPropagationLossModel [class] module.add_class('TwoRayGroundPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## attribute.h (module 'core'): ns3::AttributeAccessor [class] module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) ## attribute.h (module 'core'): ns3::AttributeChecker [class] module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) ## attribute.h (module 'core'): ns3::AttributeValue [class] module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) ## callback.h (module 'core'): ns3::CallbackChecker [class] module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## callback.h (module 'core'): ns3::CallbackImplBase [class] module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) ## callback.h (module 'core'): ns3::CallbackValue [class] module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel [class] module.add_class('ConstantSpeedPropagationDelayModel', parent=root_module['ns3::PropagationDelayModel']) ## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel [class] module.add_class('Cost231PropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Environment [enumeration] module.add_enum('Environment', ['SubUrban', 'MediumCity', 'Metropolitan'], outer_class=root_module['ns3::Cost231PropagationLossModel']) ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class] module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## propagation-loss-model.h (module 'propagation'): ns3::FixedRssLossModel [class] module.add_class('FixedRssLossModel', parent=root_module['ns3::PropagationLossModel']) ## propagation-loss-model.h (module 'propagation'): ns3::FriisPropagationLossModel [class] module.add_class('FriisPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): ns3::ItuR1411LosPropagationLossModel [class] module.add_class('ItuR1411LosPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): ns3::ItuR1411NlosOverRooftopPropagationLossModel [class] module.add_class('ItuR1411NlosOverRooftopPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## jakes-process.h (module 'propagation'): ns3::JakesProcess [class] module.add_class('JakesProcess', parent=root_module['ns3::Object']) ## jakes-propagation-loss-model.h (module 'propagation'): ns3::JakesPropagationLossModel [class] module.add_class('JakesPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## kun-2600-mhz-propagation-loss-model.h (module 'propagation'): ns3::Kun2600MhzPropagationLossModel [class] module.add_class('Kun2600MhzPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## propagation-loss-model.h (module 'propagation'): ns3::LogDistancePropagationLossModel [class] module.add_class('LogDistancePropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## propagation-loss-model.h (module 'propagation'): ns3::MatrixPropagationLossModel [class] module.add_class('MatrixPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## mobility-model.h (module 'mobility'): ns3::MobilityModel [class] module.add_class('MobilityModel', import_from_module='ns.mobility', parent=root_module['ns3::Object']) ## propagation-loss-model.h (module 'propagation'): ns3::NakagamiPropagationLossModel [class] module.add_class('NakagamiPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## okumura-hata-propagation-loss-model.h (module 'propagation'): ns3::OkumuraHataPropagationLossModel [class] module.add_class('OkumuraHataPropagationLossModel', parent=root_module['ns3::PropagationLossModel']) ## random-variable.h (module 'core'): ns3::RandomVariableChecker [class] module.add_class('RandomVariableChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## random-variable.h (module 'core'): ns3::RandomVariableValue [class] module.add_class('RandomVariableValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## nstime.h (module 'core'): ns3::TimeChecker [class] module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## nstime.h (module 'core'): ns3::TimeValue [class] module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## type-id.h (module 'core'): ns3::TypeIdChecker [class] module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## type-id.h (module 'core'): ns3::TypeIdValue [class] module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector2DChecker [class] module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector2DValue [class] module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector3DChecker [class] module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector3DValue [class] module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) typehandlers.add_type_alias('ns3::Vector3DValue', 'ns3::VectorValue') typehandlers.add_type_alias('ns3::Vector3DValue*', 'ns3::VectorValue*') typehandlers.add_type_alias('ns3::Vector3DValue&', 'ns3::VectorValue&') module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue') typehandlers.add_type_alias('ns3::Vector3D', 'ns3::Vector') typehandlers.add_type_alias('ns3::Vector3D*', 'ns3::Vector*') typehandlers.add_type_alias('ns3::Vector3D&', 'ns3::Vector&') module.add_typedef(root_module['ns3::Vector3D'], 'Vector') typehandlers.add_type_alias('ns3::Vector3DChecker', 'ns3::VectorChecker') typehandlers.add_type_alias('ns3::Vector3DChecker*', 'ns3::VectorChecker*') typehandlers.add_type_alias('ns3::Vector3DChecker&', 'ns3::VectorChecker&') module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker') ## Register a nested module for the namespace FatalImpl nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) def register_types_ns3_FatalImpl(module): root_module = module.get_root() def register_methods(root_module): register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList']) register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item']) register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase']) register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase']) register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter']) register_Ns3PropagationCache__Ns3JakesProcess_methods(root_module, root_module['ns3::PropagationCache< ns3::JakesProcess >']) register_Ns3RandomVariable_methods(root_module, root_module['ns3::RandomVariable']) register_Ns3SeedManager_methods(root_module, root_module['ns3::SeedManager']) register_Ns3SequentialVariable_methods(root_module, root_module['ns3::SequentialVariable']) register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) register_Ns3TriangularVariable_methods(root_module, root_module['ns3::TriangularVariable']) register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId']) register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation']) register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation']) register_Ns3UniformVariable_methods(root_module, root_module['ns3::UniformVariable']) register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D']) register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D']) register_Ns3WeibullVariable_methods(root_module, root_module['ns3::WeibullVariable']) register_Ns3ZetaVariable_methods(root_module, root_module['ns3::ZetaVariable']) register_Ns3ZipfVariable_methods(root_module, root_module['ns3::ZipfVariable']) register_Ns3Empty_methods(root_module, root_module['ns3::empty']) register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t']) register_Ns3ConstantVariable_methods(root_module, root_module['ns3::ConstantVariable']) register_Ns3DeterministicVariable_methods(root_module, root_module['ns3::DeterministicVariable']) register_Ns3EmpiricalVariable_methods(root_module, root_module['ns3::EmpiricalVariable']) register_Ns3ErlangVariable_methods(root_module, root_module['ns3::ErlangVariable']) register_Ns3ExponentialVariable_methods(root_module, root_module['ns3::ExponentialVariable']) register_Ns3GammaVariable_methods(root_module, root_module['ns3::GammaVariable']) register_Ns3IntEmpiricalVariable_methods(root_module, root_module['ns3::IntEmpiricalVariable']) register_Ns3LogNormalVariable_methods(root_module, root_module['ns3::LogNormalVariable']) register_Ns3NormalVariable_methods(root_module, root_module['ns3::NormalVariable']) register_Ns3Object_methods(root_module, root_module['ns3::Object']) register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator']) register_Ns3ParetoVariable_methods(root_module, root_module['ns3::ParetoVariable']) register_Ns3PropagationDelayModel_methods(root_module, root_module['ns3::PropagationDelayModel']) register_Ns3PropagationLossModel_methods(root_module, root_module['ns3::PropagationLossModel']) register_Ns3RandomPropagationDelayModel_methods(root_module, root_module['ns3::RandomPropagationDelayModel']) register_Ns3RandomPropagationLossModel_methods(root_module, root_module['ns3::RandomPropagationLossModel']) register_Ns3RangePropagationLossModel_methods(root_module, root_module['ns3::RangePropagationLossModel']) register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) register_Ns3ThreeLogDistancePropagationLossModel_methods(root_module, root_module['ns3::ThreeLogDistancePropagationLossModel']) register_Ns3Time_methods(root_module, root_module['ns3::Time']) register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor']) register_Ns3TwoRayGroundPropagationLossModel_methods(root_module, root_module['ns3::TwoRayGroundPropagationLossModel']) register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor']) register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker']) register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue']) register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker']) register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase']) register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue']) register_Ns3ConstantSpeedPropagationDelayModel_methods(root_module, root_module['ns3::ConstantSpeedPropagationDelayModel']) register_Ns3Cost231PropagationLossModel_methods(root_module, root_module['ns3::Cost231PropagationLossModel']) register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue']) register_Ns3FixedRssLossModel_methods(root_module, root_module['ns3::FixedRssLossModel']) register_Ns3FriisPropagationLossModel_methods(root_module, root_module['ns3::FriisPropagationLossModel']) register_Ns3ItuR1411LosPropagationLossModel_methods(root_module, root_module['ns3::ItuR1411LosPropagationLossModel']) register_Ns3ItuR1411NlosOverRooftopPropagationLossModel_methods(root_module, root_module['ns3::ItuR1411NlosOverRooftopPropagationLossModel']) register_Ns3JakesProcess_methods(root_module, root_module['ns3::JakesProcess']) register_Ns3JakesPropagationLossModel_methods(root_module, root_module['ns3::JakesPropagationLossModel']) register_Ns3Kun2600MhzPropagationLossModel_methods(root_module, root_module['ns3::Kun2600MhzPropagationLossModel']) register_Ns3LogDistancePropagationLossModel_methods(root_module, root_module['ns3::LogDistancePropagationLossModel']) register_Ns3MatrixPropagationLossModel_methods(root_module, root_module['ns3::MatrixPropagationLossModel']) register_Ns3MobilityModel_methods(root_module, root_module['ns3::MobilityModel']) register_Ns3NakagamiPropagationLossModel_methods(root_module, root_module['ns3::NakagamiPropagationLossModel']) register_Ns3OkumuraHataPropagationLossModel_methods(root_module, root_module['ns3::OkumuraHataPropagationLossModel']) register_Ns3RandomVariableChecker_methods(root_module, root_module['ns3::RandomVariableChecker']) register_Ns3RandomVariableValue_methods(root_module, root_module['ns3::RandomVariableValue']) register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker']) register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue']) register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker']) register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue']) register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker']) register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue']) register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker']) register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue']) return def register_Ns3AttributeConstructionList_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')]) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function] cls.add_method('Begin', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function] cls.add_method('End', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('Find', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True) return def register_Ns3AttributeConstructionListItem_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable] cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False) return def register_Ns3CallbackBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function] cls.add_method('GetImpl', 'ns3::Ptr< ns3::CallbackImplBase >', [], is_const=True) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], visibility='protected') ## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function] cls.add_method('Demangle', 'std::string', [param('std::string const &', 'mangled')], is_static=True, visibility='protected') return def register_Ns3ObjectBase_methods(root_module, cls): ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor] cls.add_constructor([]) ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')]) ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function] cls.add_method('GetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')], is_const=True) ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function] cls.add_method('ConstructSelf', 'void', [param('ns3::AttributeConstructionList const &', 'attributes')], visibility='protected') ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function] cls.add_method('NotifyConstructionCompleted', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectDeleter_methods(root_module, cls): ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor] cls.add_constructor([]) ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')]) ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Object *', 'object')], is_static=True) return def register_Ns3PropagationCache__Ns3JakesProcess_methods(root_module, cls): ## propagation-cache.h (module 'propagation'): ns3::PropagationCache<ns3::JakesProcess>::PropagationCache(ns3::PropagationCache<ns3::JakesProcess> const & arg0) [copy constructor] cls.add_constructor([param('ns3::PropagationCache< ns3::JakesProcess > const &', 'arg0')]) ## propagation-cache.h (module 'propagation'): ns3::PropagationCache<ns3::JakesProcess>::PropagationCache() [constructor] cls.add_constructor([]) ## propagation-cache.h (module 'propagation'): ns3::Ptr<ns3::JakesProcess> ns3::PropagationCache<ns3::JakesProcess>::GetPathData(ns3::Ptr<ns3::MobilityModel const> a, ns3::Ptr<ns3::MobilityModel const> b, uint32_t modelUid) [member function] cls.add_method('GetPathData', 'ns3::Ptr< ns3::JakesProcess >', [param('ns3::Ptr< ns3::MobilityModel const >', 'a'), param('ns3::Ptr< ns3::MobilityModel const >', 'b'), param('uint32_t', 'modelUid')]) return def register_Ns3RandomVariable_methods(root_module, cls): cls.add_output_stream_operator() ## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable(ns3::RandomVariable const & o) [copy constructor] cls.add_constructor([param('ns3::RandomVariable const &', 'o')]) ## random-variable.h (module 'core'): uint32_t ns3::RandomVariable::GetInteger() const [member function] cls.add_method('GetInteger', 'uint32_t', [], is_const=True) ## random-variable.h (module 'core'): double ns3::RandomVariable::GetValue() const [member function] cls.add_method('GetValue', 'double', [], is_const=True) return def register_Ns3SeedManager_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::SeedManager::SeedManager() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::SeedManager::SeedManager(ns3::SeedManager const & arg0) [copy constructor] cls.add_constructor([param('ns3::SeedManager const &', 'arg0')]) ## random-variable.h (module 'core'): static bool ns3::SeedManager::CheckSeed(uint32_t seed) [member function] cls.add_method('CheckSeed', 'bool', [param('uint32_t', 'seed')], is_static=True) ## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetRun() [member function] cls.add_method('GetRun', 'uint32_t', [], is_static=True) ## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetSeed() [member function] cls.add_method('GetSeed', 'uint32_t', [], is_static=True) ## random-variable.h (module 'core'): static void ns3::SeedManager::SetRun(uint32_t run) [member function] cls.add_method('SetRun', 'void', [param('uint32_t', 'run')], is_static=True) ## random-variable.h (module 'core'): static void ns3::SeedManager::SetSeed(uint32_t seed) [member function] cls.add_method('SetSeed', 'void', [param('uint32_t', 'seed')], is_static=True) return def register_Ns3SequentialVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(ns3::SequentialVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::SequentialVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, double i=1, uint32_t c=1) [constructor] cls.add_constructor([param('double', 'f'), param('double', 'l'), param('double', 'i', default_value='1'), param('uint32_t', 'c', default_value='1')]) ## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, ns3::RandomVariable const & i, uint32_t c=1) [constructor] cls.add_constructor([param('double', 'f'), param('double', 'l'), param('ns3::RandomVariable const &', 'i'), param('uint32_t', 'c', default_value='1')]) return def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3TriangularVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(ns3::TriangularVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::TriangularVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(double s, double l, double mean) [constructor] cls.add_constructor([param('double', 's'), param('double', 'l'), param('double', 'mean')]) return def register_Ns3TypeId_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor] cls.add_constructor([param('char const *', 'name')]) ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor] cls.add_constructor([param('ns3::TypeId const &', 'o')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function] cls.add_method('GetAttribute', 'ns3::TypeId::AttributeInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function] cls.add_method('GetAttributeFullName', 'std::string', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function] cls.add_method('GetAttributeN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function] cls.add_method('GetConstructor', 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function] cls.add_method('GetGroupName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function] cls.add_method('GetParent', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function] cls.add_method('GetRegistered', 'ns3::TypeId', [param('uint32_t', 'i')], is_static=True) ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function] cls.add_method('GetRegisteredN', 'uint32_t', [], is_static=True) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function] cls.add_method('GetTraceSource', 'ns3::TypeId::TraceSourceInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function] cls.add_method('GetTraceSourceN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function] cls.add_method('GetUid', 'uint16_t', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function] cls.add_method('HasConstructor', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function] cls.add_method('HasParent', 'bool', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function] cls.add_method('HideFromDocumentation', 'ns3::TypeId', []) ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function] cls.add_method('IsChildOf', 'bool', [param('ns3::TypeId', 'other')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function] cls.add_method('LookupAttributeByName', 'bool', [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function] cls.add_method('LookupByName', 'ns3::TypeId', [param('std::string', 'name')], is_static=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function] cls.add_method('MustHideFromDocumentation', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function] cls.add_method('SetAttributeInitialValue', 'bool', [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function] cls.add_method('SetGroupName', 'ns3::TypeId', [param('std::string', 'groupName')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function] cls.add_method('SetParent', 'ns3::TypeId', [param('ns3::TypeId', 'tid')]) ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function] cls.add_method('SetUid', 'void', [param('uint16_t', 'tid')]) return def register_Ns3TypeIdAttributeInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable] cls.add_instance_attribute('flags', 'uint32_t', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable] cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable] cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) return def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) return def register_Ns3UniformVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(ns3::UniformVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::UniformVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(double s, double l) [constructor] cls.add_constructor([param('double', 's'), param('double', 'l')]) ## random-variable.h (module 'core'): uint32_t ns3::UniformVariable::GetInteger(uint32_t s, uint32_t l) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 's'), param('uint32_t', 'l')]) ## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue() const [member function] cls.add_method('GetValue', 'double', [], is_const=True) ## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue(double s, double l) [member function] cls.add_method('GetValue', 'double', [param('double', 's'), param('double', 'l')]) return def register_Ns3Vector2D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector2D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) return def register_Ns3Vector3D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::z [variable] cls.add_instance_attribute('z', 'double', is_const=False) return def register_Ns3WeibullVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(ns3::WeibullVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::WeibullVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m) [constructor] cls.add_constructor([param('double', 'm')]) ## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s) [constructor] cls.add_constructor([param('double', 'm'), param('double', 's')]) ## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s, double b) [constructor] cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')]) return def register_Ns3ZetaVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(ns3::ZetaVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ZetaVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(double alpha) [constructor] cls.add_constructor([param('double', 'alpha')]) ## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable() [constructor] cls.add_constructor([]) return def register_Ns3ZipfVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(ns3::ZipfVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ZipfVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(long int N, double alpha) [constructor] cls.add_constructor([param('long int', 'N'), param('double', 'alpha')]) ## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable() [constructor] cls.add_constructor([]) return def register_Ns3Empty_methods(root_module, cls): ## empty.h (module 'core'): ns3::empty::empty() [constructor] cls.add_constructor([]) ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor] cls.add_constructor([param('ns3::empty const &', 'arg0')]) return def register_Ns3Int64x64_t_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right')) cls.add_unary_numeric_operator('-') cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right')) cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right')) cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right')) cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor] cls.add_constructor([]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor] cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function] cls.add_method('GetHigh', 'int64_t', [], is_const=True) ## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function] cls.add_method('GetLow', 'uint64_t', [], is_const=True) ## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function] cls.add_method('Invert', 'ns3::int64x64_t', [param('uint64_t', 'v')], is_static=True) ## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function] cls.add_method('MulByInvert', 'void', [param('ns3::int64x64_t const &', 'o')]) return def register_Ns3ConstantVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(ns3::ConstantVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ConstantVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(double c) [constructor] cls.add_constructor([param('double', 'c')]) ## random-variable.h (module 'core'): void ns3::ConstantVariable::SetConstant(double c) [member function] cls.add_method('SetConstant', 'void', [param('double', 'c')]) return def register_Ns3DeterministicVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(ns3::DeterministicVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::DeterministicVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(double * d, uint32_t c) [constructor] cls.add_constructor([param('double *', 'd'), param('uint32_t', 'c')]) return def register_Ns3EmpiricalVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable(ns3::EmpiricalVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmpiricalVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): void ns3::EmpiricalVariable::CDF(double v, double c) [member function] cls.add_method('CDF', 'void', [param('double', 'v'), param('double', 'c')]) return def register_Ns3ErlangVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(ns3::ErlangVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ErlangVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(unsigned int k, double lambda) [constructor] cls.add_constructor([param('unsigned int', 'k'), param('double', 'lambda')]) ## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue() const [member function] cls.add_method('GetValue', 'double', [], is_const=True) ## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue(unsigned int k, double lambda) const [member function] cls.add_method('GetValue', 'double', [param('unsigned int', 'k'), param('double', 'lambda')], is_const=True) return def register_Ns3ExponentialVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(ns3::ExponentialVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ExponentialVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m) [constructor] cls.add_constructor([param('double', 'm')]) ## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m, double b) [constructor] cls.add_constructor([param('double', 'm'), param('double', 'b')]) return def register_Ns3GammaVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(ns3::GammaVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::GammaVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(double alpha, double beta) [constructor] cls.add_constructor([param('double', 'alpha'), param('double', 'beta')]) ## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue() const [member function] cls.add_method('GetValue', 'double', [], is_const=True) ## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue(double alpha, double beta) const [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha'), param('double', 'beta')], is_const=True) return def register_Ns3IntEmpiricalVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable(ns3::IntEmpiricalVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntEmpiricalVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable() [constructor] cls.add_constructor([]) return def register_Ns3LogNormalVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(ns3::LogNormalVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::LogNormalVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(double mu, double sigma) [constructor] cls.add_constructor([param('double', 'mu'), param('double', 'sigma')]) return def register_Ns3NormalVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(ns3::NormalVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::NormalVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v) [constructor] cls.add_constructor([param('double', 'm'), param('double', 'v')]) ## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v, double b) [constructor] cls.add_constructor([param('double', 'm'), param('double', 'v'), param('double', 'b')]) return def register_Ns3Object_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::Object() [constructor] cls.add_constructor([]) ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function] cls.add_method('AggregateObject', 'void', [param('ns3::Ptr< ns3::Object >', 'other')]) ## object.h (module 'core'): void ns3::Object::Dispose() [member function] cls.add_method('Dispose', 'void', []) ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function] cls.add_method('GetAggregateIterator', 'ns3::Object::AggregateIterator', [], is_const=True) ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object.h (module 'core'): void ns3::Object::Start() [member function] cls.add_method('Start', 'void', []) ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor] cls.add_constructor([param('ns3::Object const &', 'o')], visibility='protected') ## object.h (module 'core'): void ns3::Object::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::DoStart() [member function] cls.add_method('DoStart', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function] cls.add_method('NotifyNewAggregate', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectAggregateIterator_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')]) ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor] cls.add_constructor([]) ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function] cls.add_method('Next', 'ns3::Ptr< ns3::Object const >', []) return def register_Ns3ParetoVariable_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(ns3::ParetoVariable const & arg0) [copy constructor] cls.add_constructor([param('ns3::ParetoVariable const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m) [constructor] cls.add_constructor([param('double', 'm')]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s) [constructor] cls.add_constructor([param('double', 'm'), param('double', 's')]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s, double b) [constructor] cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params) [constructor] cls.add_constructor([param('std::pair< double, double >', 'params')]) ## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params, double b) [constructor] cls.add_constructor([param('std::pair< double, double >', 'params'), param('double', 'b')]) return def register_Ns3PropagationDelayModel_methods(root_module, cls): ## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel::PropagationDelayModel() [constructor] cls.add_constructor([]) ## propagation-delay-model.h (module 'propagation'): ns3::PropagationDelayModel::PropagationDelayModel(ns3::PropagationDelayModel const & arg0) [copy constructor] cls.add_constructor([param('ns3::PropagationDelayModel const &', 'arg0')]) ## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::PropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetDelay', 'ns3::Time', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_pure_virtual=True, is_const=True, is_virtual=True) ## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::PropagationDelayModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) return def register_Ns3PropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::PropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::PropagationLossModel::PropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::PropagationLossModel::SetNext(ns3::Ptr<ns3::PropagationLossModel> next) [member function] cls.add_method('SetNext', 'void', [param('ns3::Ptr< ns3::PropagationLossModel >', 'next')]) ## propagation-loss-model.h (module 'propagation'): double ns3::PropagationLossModel::CalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('CalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::PropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) return def register_Ns3RandomPropagationDelayModel_methods(root_module, cls): ## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel::RandomPropagationDelayModel(ns3::RandomPropagationDelayModel const & arg0) [copy constructor] cls.add_constructor([param('ns3::RandomPropagationDelayModel const &', 'arg0')]) ## propagation-delay-model.h (module 'propagation'): ns3::RandomPropagationDelayModel::RandomPropagationDelayModel() [constructor] cls.add_constructor([]) ## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::RandomPropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetDelay', 'ns3::Time', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, is_virtual=True) ## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::RandomPropagationDelayModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) return def register_Ns3RandomPropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::RandomPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::RandomPropagationLossModel::RandomPropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): double ns3::RandomPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3RangePropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::RangePropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::RangePropagationLossModel::RangePropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): double ns3::RangePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3ThreeLogDistancePropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::ThreeLogDistancePropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::ThreeLogDistancePropagationLossModel::ThreeLogDistancePropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): double ns3::ThreeLogDistancePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3Time_methods(root_module, cls): cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right')) cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right')) cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## nstime.h (module 'core'): ns3::Time::Time() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor] cls.add_constructor([param('ns3::Time const &', 'o')]) ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor] cls.add_constructor([param('std::string const &', 's')]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'value')]) ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function] cls.add_method('Compare', 'int', [param('ns3::Time const &', 'o')], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function] cls.add_method('FromDouble', 'ns3::Time', [param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function] cls.add_method('FromInteger', 'ns3::Time', [param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function] cls.add_method('GetFemtoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function] cls.add_method('GetInteger', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function] cls.add_method('GetMicroSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function] cls.add_method('GetMilliSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function] cls.add_method('GetNanoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function] cls.add_method('GetPicoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function] cls.add_method('GetResolution', 'ns3::Time::Unit', [], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function] cls.add_method('GetSeconds', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function] cls.add_method('GetTimeStep', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function] cls.add_method('IsNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function] cls.add_method('IsPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function] cls.add_method('IsStrictlyNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function] cls.add_method('IsStrictlyPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function] cls.add_method('IsZero', 'bool', [], is_const=True) ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function] cls.add_method('SetResolution', 'void', [param('ns3::Time::Unit', 'resolution')], is_static=True) ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function] cls.add_method('To', 'ns3::int64x64_t', [param('ns3::Time::Unit', 'timeUnit')], is_const=True) ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function] cls.add_method('ToDouble', 'double', [param('ns3::Time::Unit', 'timeUnit')], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function] cls.add_method('ToInteger', 'int64_t', [param('ns3::Time::Unit', 'timeUnit')], is_const=True) return def register_Ns3TraceSourceAccessor_methods(root_module, cls): ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')]) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor] cls.add_constructor([]) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Connect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('ConnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Disconnect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('DisconnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TwoRayGroundPropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::TwoRayGroundPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::TwoRayGroundPropagationLossModel::TwoRayGroundPropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetLambda(double frequency, double speed) [member function] cls.add_method('SetLambda', 'void', [param('double', 'frequency'), param('double', 'speed')]) ## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetLambda(double lambda) [member function] cls.add_method('SetLambda', 'void', [param('double', 'lambda')]) ## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetSystemLoss(double systemLoss) [member function] cls.add_method('SetSystemLoss', 'void', [param('double', 'systemLoss')]) ## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetMinDistance(double minDistance) [member function] cls.add_method('SetMinDistance', 'void', [param('double', 'minDistance')]) ## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetMinDistance() const [member function] cls.add_method('GetMinDistance', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::GetSystemLoss() const [member function] cls.add_method('GetSystemLoss', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): void ns3::TwoRayGroundPropagationLossModel::SetHeightAboveZ(double heightAboveZ) [member function] cls.add_method('SetHeightAboveZ', 'void', [param('double', 'heightAboveZ')]) ## propagation-loss-model.h (module 'propagation'): double ns3::TwoRayGroundPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3AttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function] cls.add_method('CreateValidValue', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::AttributeValue const &', 'value')], is_const=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3CallbackChecker_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')]) return def register_Ns3CallbackImplBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3CallbackValue_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'base')]) ## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function] cls.add_method('Set', 'void', [param('ns3::CallbackBase', 'base')]) return def register_Ns3ConstantSpeedPropagationDelayModel_methods(root_module, cls): ## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel::ConstantSpeedPropagationDelayModel(ns3::ConstantSpeedPropagationDelayModel const & arg0) [copy constructor] cls.add_constructor([param('ns3::ConstantSpeedPropagationDelayModel const &', 'arg0')]) ## propagation-delay-model.h (module 'propagation'): ns3::ConstantSpeedPropagationDelayModel::ConstantSpeedPropagationDelayModel() [constructor] cls.add_constructor([]) ## propagation-delay-model.h (module 'propagation'): ns3::Time ns3::ConstantSpeedPropagationDelayModel::GetDelay(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetDelay', 'ns3::Time', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, is_virtual=True) ## propagation-delay-model.h (module 'propagation'): double ns3::ConstantSpeedPropagationDelayModel::GetSpeed() const [member function] cls.add_method('GetSpeed', 'double', [], is_const=True) ## propagation-delay-model.h (module 'propagation'): static ns3::TypeId ns3::ConstantSpeedPropagationDelayModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-delay-model.h (module 'propagation'): void ns3::ConstantSpeedPropagationDelayModel::SetSpeed(double speed) [member function] cls.add_method('SetSpeed', 'void', [param('double', 'speed')]) return def register_Ns3Cost231PropagationLossModel_methods(root_module, cls): ## cost231-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::Cost231PropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Cost231PropagationLossModel() [constructor] cls.add_constructor([]) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetLoss', 'double', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetBSAntennaHeight(double height) [member function] cls.add_method('SetBSAntennaHeight', 'void', [param('double', 'height')]) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetSSAntennaHeight(double height) [member function] cls.add_method('SetSSAntennaHeight', 'void', [param('double', 'height')]) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetEnvironment(ns3::Cost231PropagationLossModel::Environment env) [member function] cls.add_method('SetEnvironment', 'void', [param('ns3::Cost231PropagationLossModel::Environment', 'env')]) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetLambda(double lambda) [member function] cls.add_method('SetLambda', 'void', [param('double', 'lambda')]) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetMinDistance(double minDistance) [member function] cls.add_method('SetMinDistance', 'void', [param('double', 'minDistance')]) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetBSAntennaHeight() const [member function] cls.add_method('GetBSAntennaHeight', 'double', [], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetSSAntennaHeight() const [member function] cls.add_method('GetSSAntennaHeight', 'double', [], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): ns3::Cost231PropagationLossModel::Environment ns3::Cost231PropagationLossModel::GetEnvironment() const [member function] cls.add_method('GetEnvironment', 'ns3::Cost231PropagationLossModel::Environment', [], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetMinDistance() const [member function] cls.add_method('GetMinDistance', 'double', [], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetLambda(double frequency, double speed) [member function] cls.add_method('SetLambda', 'void', [param('double', 'frequency'), param('double', 'speed')]) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::GetShadowing() [member function] cls.add_method('GetShadowing', 'double', []) ## cost231-propagation-loss-model.h (module 'propagation'): void ns3::Cost231PropagationLossModel::SetShadowing(double shadowing) [member function] cls.add_method('SetShadowing', 'void', [param('double', 'shadowing')]) ## cost231-propagation-loss-model.h (module 'propagation'): double ns3::Cost231PropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3EmptyAttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, visibility='private', is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], visibility='private', is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3FixedRssLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::FixedRssLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::FixedRssLossModel::FixedRssLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::FixedRssLossModel::SetRss(double rss) [member function] cls.add_method('SetRss', 'void', [param('double', 'rss')]) ## propagation-loss-model.h (module 'propagation'): double ns3::FixedRssLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3FriisPropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::FriisPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::FriisPropagationLossModel::FriisPropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetLambda(double frequency, double speed) [member function] cls.add_method('SetLambda', 'void', [param('double', 'frequency'), param('double', 'speed')]) ## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetLambda(double lambda) [member function] cls.add_method('SetLambda', 'void', [param('double', 'lambda')]) ## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetSystemLoss(double systemLoss) [member function] cls.add_method('SetSystemLoss', 'void', [param('double', 'systemLoss')]) ## propagation-loss-model.h (module 'propagation'): void ns3::FriisPropagationLossModel::SetMinDistance(double minDistance) [member function] cls.add_method('SetMinDistance', 'void', [param('double', 'minDistance')]) ## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetMinDistance() const [member function] cls.add_method('GetMinDistance', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::GetSystemLoss() const [member function] cls.add_method('GetSystemLoss', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): double ns3::FriisPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3ItuR1411LosPropagationLossModel_methods(root_module, cls): ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): ns3::ItuR1411LosPropagationLossModel::ItuR1411LosPropagationLossModel() [constructor] cls.add_constructor([]) ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::ItuR1411LosPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): void ns3::ItuR1411LosPropagationLossModel::SetFrequency(double freq) [member function] cls.add_method('SetFrequency', 'void', [param('double', 'freq')]) ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): double ns3::ItuR1411LosPropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetLoss', 'double', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## itu-r-1411-los-propagation-loss-model.h (module 'propagation'): double ns3::ItuR1411LosPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3ItuR1411NlosOverRooftopPropagationLossModel_methods(root_module, cls): ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): ns3::ItuR1411NlosOverRooftopPropagationLossModel::ItuR1411NlosOverRooftopPropagationLossModel() [constructor] cls.add_constructor([]) ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::ItuR1411NlosOverRooftopPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): void ns3::ItuR1411NlosOverRooftopPropagationLossModel::SetFrequency(double freq) [member function] cls.add_method('SetFrequency', 'void', [param('double', 'freq')]) ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): double ns3::ItuR1411NlosOverRooftopPropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetLoss', 'double', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## itu-r-1411-nlos-over-rooftop-propagation-loss-model.h (module 'propagation'): double ns3::ItuR1411NlosOverRooftopPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3JakesProcess_methods(root_module, cls): ## jakes-process.h (module 'propagation'): ns3::JakesProcess::JakesProcess(ns3::JakesProcess const & arg0) [copy constructor] cls.add_constructor([param('ns3::JakesProcess const &', 'arg0')]) ## jakes-process.h (module 'propagation'): ns3::JakesProcess::JakesProcess() [constructor] cls.add_constructor([]) ## jakes-process.h (module 'propagation'): double ns3::JakesProcess::GetChannelGainDb() const [member function] cls.add_method('GetChannelGainDb', 'double', [], is_const=True) ## jakes-process.h (module 'propagation'): std::complex<double> ns3::JakesProcess::GetComplexGain() const [member function] cls.add_method('GetComplexGain', 'std::complex< double >', [], is_const=True) ## jakes-process.h (module 'propagation'): static ns3::TypeId ns3::JakesProcess::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) return def register_Ns3JakesPropagationLossModel_methods(root_module, cls): ## jakes-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::JakesPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## jakes-propagation-loss-model.h (module 'propagation'): ns3::JakesPropagationLossModel::JakesPropagationLossModel() [constructor] cls.add_constructor([]) ## jakes-propagation-loss-model.h (module 'propagation'): double ns3::JakesPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3Kun2600MhzPropagationLossModel_methods(root_module, cls): ## kun-2600-mhz-propagation-loss-model.h (module 'propagation'): ns3::Kun2600MhzPropagationLossModel::Kun2600MhzPropagationLossModel() [constructor] cls.add_constructor([]) ## kun-2600-mhz-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::Kun2600MhzPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## kun-2600-mhz-propagation-loss-model.h (module 'propagation'): double ns3::Kun2600MhzPropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetLoss', 'double', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## kun-2600-mhz-propagation-loss-model.h (module 'propagation'): double ns3::Kun2600MhzPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3LogDistancePropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::LogDistancePropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::LogDistancePropagationLossModel::LogDistancePropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::LogDistancePropagationLossModel::SetPathLossExponent(double n) [member function] cls.add_method('SetPathLossExponent', 'void', [param('double', 'n')]) ## propagation-loss-model.h (module 'propagation'): double ns3::LogDistancePropagationLossModel::GetPathLossExponent() const [member function] cls.add_method('GetPathLossExponent', 'double', [], is_const=True) ## propagation-loss-model.h (module 'propagation'): void ns3::LogDistancePropagationLossModel::SetReference(double referenceDistance, double referenceLoss) [member function] cls.add_method('SetReference', 'void', [param('double', 'referenceDistance'), param('double', 'referenceLoss')]) ## propagation-loss-model.h (module 'propagation'): double ns3::LogDistancePropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3MatrixPropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::MatrixPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::MatrixPropagationLossModel::MatrixPropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): void ns3::MatrixPropagationLossModel::SetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b, double loss, bool symmetric=true) [member function] cls.add_method('SetLoss', 'void', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b'), param('double', 'loss'), param('bool', 'symmetric', default_value='true')]) ## propagation-loss-model.h (module 'propagation'): void ns3::MatrixPropagationLossModel::SetDefaultLoss(double arg0) [member function] cls.add_method('SetDefaultLoss', 'void', [param('double', 'arg0')]) ## propagation-loss-model.h (module 'propagation'): double ns3::MatrixPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3MobilityModel_methods(root_module, cls): ## mobility-model.h (module 'mobility'): ns3::MobilityModel::MobilityModel(ns3::MobilityModel const & arg0) [copy constructor] cls.add_constructor([param('ns3::MobilityModel const &', 'arg0')]) ## mobility-model.h (module 'mobility'): ns3::MobilityModel::MobilityModel() [constructor] cls.add_constructor([]) ## mobility-model.h (module 'mobility'): double ns3::MobilityModel::GetDistanceFrom(ns3::Ptr<ns3::MobilityModel const> position) const [member function] cls.add_method('GetDistanceFrom', 'double', [param('ns3::Ptr< ns3::MobilityModel const >', 'position')], is_const=True) ## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::GetPosition() const [member function] cls.add_method('GetPosition', 'ns3::Vector', [], is_const=True) ## mobility-model.h (module 'mobility'): double ns3::MobilityModel::GetRelativeSpeed(ns3::Ptr<ns3::MobilityModel const> other) const [member function] cls.add_method('GetRelativeSpeed', 'double', [param('ns3::Ptr< ns3::MobilityModel const >', 'other')], is_const=True) ## mobility-model.h (module 'mobility'): static ns3::TypeId ns3::MobilityModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::GetVelocity() const [member function] cls.add_method('GetVelocity', 'ns3::Vector', [], is_const=True) ## mobility-model.h (module 'mobility'): void ns3::MobilityModel::SetPosition(ns3::Vector const & position) [member function] cls.add_method('SetPosition', 'void', [param('ns3::Vector const &', 'position')]) ## mobility-model.h (module 'mobility'): void ns3::MobilityModel::NotifyCourseChange() const [member function] cls.add_method('NotifyCourseChange', 'void', [], is_const=True, visibility='protected') ## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::DoGetPosition() const [member function] cls.add_method('DoGetPosition', 'ns3::Vector', [], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) ## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::DoGetVelocity() const [member function] cls.add_method('DoGetVelocity', 'ns3::Vector', [], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) ## mobility-model.h (module 'mobility'): void ns3::MobilityModel::DoSetPosition(ns3::Vector const & position) [member function] cls.add_method('DoSetPosition', 'void', [param('ns3::Vector const &', 'position')], is_pure_virtual=True, visibility='private', is_virtual=True) return def register_Ns3NakagamiPropagationLossModel_methods(root_module, cls): ## propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::NakagamiPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## propagation-loss-model.h (module 'propagation'): ns3::NakagamiPropagationLossModel::NakagamiPropagationLossModel() [constructor] cls.add_constructor([]) ## propagation-loss-model.h (module 'propagation'): double ns3::NakagamiPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3OkumuraHataPropagationLossModel_methods(root_module, cls): ## okumura-hata-propagation-loss-model.h (module 'propagation'): ns3::OkumuraHataPropagationLossModel::OkumuraHataPropagationLossModel() [constructor] cls.add_constructor([]) ## okumura-hata-propagation-loss-model.h (module 'propagation'): static ns3::TypeId ns3::OkumuraHataPropagationLossModel::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## okumura-hata-propagation-loss-model.h (module 'propagation'): double ns3::OkumuraHataPropagationLossModel::GetLoss(ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('GetLoss', 'double', [param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True) ## okumura-hata-propagation-loss-model.h (module 'propagation'): double ns3::OkumuraHataPropagationLossModel::DoCalcRxPower(double txPowerDbm, ns3::Ptr<ns3::MobilityModel> a, ns3::Ptr<ns3::MobilityModel> b) const [member function] cls.add_method('DoCalcRxPower', 'double', [param('double', 'txPowerDbm'), param('ns3::Ptr< ns3::MobilityModel >', 'a'), param('ns3::Ptr< ns3::MobilityModel >', 'b')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3RandomVariableChecker_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker(ns3::RandomVariableChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::RandomVariableChecker const &', 'arg0')]) return def register_Ns3RandomVariableValue_methods(root_module, cls): ## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue() [constructor] cls.add_constructor([]) ## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariableValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::RandomVariableValue const &', 'arg0')]) ## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariable const & value) [constructor] cls.add_constructor([param('ns3::RandomVariable const &', 'value')]) ## random-variable.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::RandomVariableValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## random-variable.h (module 'core'): bool ns3::RandomVariableValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## random-variable.h (module 'core'): ns3::RandomVariable ns3::RandomVariableValue::Get() const [member function] cls.add_method('Get', 'ns3::RandomVariable', [], is_const=True) ## random-variable.h (module 'core'): std::string ns3::RandomVariableValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## random-variable.h (module 'core'): void ns3::RandomVariableValue::Set(ns3::RandomVariable const & value) [member function] cls.add_method('Set', 'void', [param('ns3::RandomVariable const &', 'value')]) return def register_Ns3TimeChecker_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')]) return def register_Ns3TimeValue_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeValue const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor] cls.add_constructor([param('ns3::Time const &', 'value')]) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function] cls.add_method('Get', 'ns3::Time', [], is_const=True) ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Time const &', 'value')]) return def register_Ns3TypeIdChecker_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')]) return def register_Ns3TypeIdValue_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'value')]) ## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function] cls.add_method('Get', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function] cls.add_method('Set', 'void', [param('ns3::TypeId const &', 'value')]) return def register_Ns3Vector2DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')]) return def register_Ns3Vector2DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor] cls.add_constructor([param('ns3::Vector2D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector2D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector2D const &', 'value')]) return def register_Ns3Vector3DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')]) return def register_Ns3Vector3DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor] cls.add_constructor([param('ns3::Vector3D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector3D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector3D const &', 'value')]) return def register_functions(root_module): module = root_module register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) return def register_functions_ns3_FatalImpl(module, root_module): return def main(): out = FileCodeSink(sys.stdout) root_module = module_init() register_types(root_module) register_methods(root_module) register_functions(root_module) root_module.generate(out) if __name__ == '__main__': main()
gpl-2.0
BrandonY/gsutil
gslib/commands/compose.py
7
7297
# -*- coding: utf-8 -*- # Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implementation of compose command for Google Cloud Storage.""" from __future__ import absolute_import from gslib.bucket_listing_ref import BucketListingObject from gslib.command import Command from gslib.command_argument import CommandArgument from gslib.cs_api_map import ApiSelector from gslib.encryption_helper import GetEncryptionTuple from gslib.exception import CommandException from gslib.storage_url import ContainsWildcard from gslib.storage_url import StorageUrlFromString from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages from gslib.translation_helper import PreconditionsFromHeaders MAX_COMPONENT_COUNT = 1024 MAX_COMPOSE_ARITY = 32 MAX_COMPONENT_RATE = 200 _SYNOPSIS = """ gsutil compose gs://bucket/obj1 [gs://bucket/obj2 ...] gs://bucket/composite """ _DETAILED_HELP_TEXT = (""" <B>SYNOPSIS</B> """ + _SYNOPSIS + """ <B>DESCRIPTION</B> The compose command creates a new object whose content is the concatenation of a given sequence of component objects under the same bucket. gsutil uses the content type of the first source object to determine the destination object's content type. For more information, please see: https://cloud.google.com/storage/docs/composite-objects Note also that the gsutil cp command can automatically split uploads for large files into multiple component objects, upload them in parallel, and compose them into a final object (which will be subject to the component count limit). This will still perform all uploads from a single machine. For extremely large files and/or very low per-machine bandwidth, you may want to split the file and upload it from multiple machines, and later compose these parts of the file manually. See the 'PARALLEL COMPOSITE UPLOADS' section under 'gsutil help cp' for details. Appending simply entails uploading your new data to a temporary object, composing it with the growing append-target, and deleting the temporary object: $ echo 'new data' | gsutil cp - gs://bucket/data-to-append $ gsutil compose gs://bucket/append-target gs://bucket/data-to-append \\ gs://bucket/append-target $ gsutil rm gs://bucket/data-to-append Note that there is a limit (currently %d) to the number of components that can be composed in a single operation. There is a limit (currently %d) to the total number of components for a given composite object. This means you can append to each object at most %d times. There is a per-project rate limit (currently %d) to the number of components you can compose per second. This rate counts both the components being appended to a composite object as well as the components being copied when the composite object of which they are a part is copied. """ % (MAX_COMPOSE_ARITY, MAX_COMPONENT_COUNT, MAX_COMPONENT_COUNT - 1, MAX_COMPONENT_RATE)) class ComposeCommand(Command): """Implementation of gsutil compose command.""" # Command specification. See base class for documentation. command_spec = Command.CreateCommandSpec( 'compose', command_name_aliases=['concat'], usage_synopsis=_SYNOPSIS, min_args=1, max_args=MAX_COMPOSE_ARITY + 1, supported_sub_args='', # Not files, just object names without gs:// prefix. file_url_ok=False, provider_url_ok=False, urls_start_arg=1, gs_api_support=[ApiSelector.XML, ApiSelector.JSON], gs_default_api=ApiSelector.JSON, argparse_arguments=[ CommandArgument.MakeZeroOrMoreCloudURLsArgument() ] ) # Help specification. See help_provider.py for documentation. help_spec = Command.HelpSpec( help_name='compose', help_name_aliases=['concat'], help_type='command_help', help_one_line_summary=( 'Concatenate a sequence of objects into a new composite object.'), help_text=_DETAILED_HELP_TEXT, subcommand_help_text={}, ) def CheckProvider(self, url): if url.scheme != 'gs': raise CommandException( '"compose" called on URL with unsupported provider (%s).' % str(url)) # Command entry point. def RunCommand(self): """Command entry point for the compose command.""" target_url_str = self.args[-1] self.args = self.args[:-1] target_url = StorageUrlFromString(target_url_str) self.CheckProvider(target_url) if target_url.HasGeneration(): raise CommandException('A version-specific URL (%s) cannot be ' 'the destination for gsutil compose - abort.' % target_url) dst_obj_metadata = apitools_messages.Object(name=target_url.object_name, bucket=target_url.bucket_name) components = [] # Remember the first source object so we can get its content type. first_src_url = None for src_url_str in self.args: if ContainsWildcard(src_url_str): src_url_iter = self.WildcardIterator(src_url_str).IterObjects() else: src_url_iter = [BucketListingObject(StorageUrlFromString(src_url_str))] for blr in src_url_iter: src_url = blr.storage_url self.CheckProvider(src_url) if src_url.bucket_name != target_url.bucket_name: raise CommandException( 'GCS does not support inter-bucket composing.') if not first_src_url: first_src_url = src_url src_obj_metadata = ( apitools_messages.ComposeRequest.SourceObjectsValueListEntry( name=src_url.object_name)) if src_url.HasGeneration(): src_obj_metadata.generation = src_url.generation components.append(src_obj_metadata) # Avoid expanding too many components, and sanity check each name # expansion result. if len(components) > MAX_COMPOSE_ARITY: raise CommandException('"compose" called with too many component ' 'objects. Limit is %d.' % MAX_COMPOSE_ARITY) if not components: raise CommandException('"compose" requires at least 1 component object.') dst_obj_metadata.contentType = self.gsutil_api.GetObjectMetadata( first_src_url.bucket_name, first_src_url.object_name, provider=first_src_url.scheme, fields=['contentType']).contentType preconditions = PreconditionsFromHeaders(self.headers or {}) self.logger.info( 'Composing %s from %d component object(s).', target_url, len(components)) self.gsutil_api.ComposeObject( components, dst_obj_metadata, preconditions=preconditions, provider=target_url.scheme, encryption_tuple=GetEncryptionTuple())
apache-2.0
phuongtg/zerorpc-python
zerorpc/socket.py
134
1737
# -*- coding: utf-8 -*- # Open Source Initiative OSI - The MIT License (MIT):Licensing # # The MIT License (MIT) # Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com) # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is furnished to do # so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .context import Context from .events import Events class SocketBase(object): def __init__(self, zmq_socket_type, context=None): self._context = context or Context.get_instance() self._events = Events(zmq_socket_type, context) def close(self): self._events.close() def connect(self, endpoint, resolve=True): return self._events.connect(endpoint, resolve) def bind(self, endpoint, resolve=True): return self._events.bind(endpoint, resolve)
mit
insomnia-lab/calibre
src/calibre/utils/bibtex.py
19
100975
""" Collection of python utility-methodes commonly used by other bibliograph packages. From http://pypi.python.org/pypi/bibliograph.core/ from Tom Gross <itconsense@gmail.com> Adapted for calibre use Zope Public License (ZPL) Version 2.1 A copyright notice accompanies this license document that identifies the copyright holders. This license has been certified as open source. It has also been designated as GPL compatible by the Free Software Foundation (FSF). Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions in source code must retain the accompanying copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the accompanying copyright notice, this list of conditions, and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Names of the copyright holders must not be used to endorse or promote products derived from this software without prior written permission from the copyright holders. 4. The right to distribute this software or to use it for any purpose does not give you the right to use Servicemarks (sm) or Trademarks (tm) of the copyright holders. Use of them is covered by separate agreement with the copyright holders. 5. If any files are modified, you must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. Disclaimer THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ __author__ = 'sengian <sengian1 at gmail.com>' __docformat__ = 'restructuredtext en' import re, string from calibre.constants import preferred_encoding from calibre.utils.mreplace import MReplace utf8enc2latex_mapping = { # This is a mapping of Unicode characters to LaTeX equivalents. # The information has been extracted from # <http://www.w3.org/2003/entities/xml/unicode.xml>, written by # David Carlisle and Sebastian Rahtz. # # The extraction has been done by the "create_unimap.py" script # located at <http://docutils.sf.net/tools/dev/create_unimap.py>. # Fix some encoding problem between cp1252 and latin1 # from http://www.microsoft.com/typography/unicode/1252.htm u'\x80': '{\\texteuro}', # EURO SIGN u'\x82': '{,}', # SINGLE LOW-9 QUOTATION MARK u'\x83': '$f$', # LATIN SMALL LETTER F WITH HOOK u'\x84': '{,,}', # DOUBLE LOW-9 QUOTATION MARK u'\x85': '{\\ldots}', # HORIZONTAL ELLIPSIS u'\x86': '{\\textdagger}', # DAGGER u'\x87': '{\\textdaggerdbl}', # DOUBLE DAGGER u'\x88': '{\textasciicircum}', # MODIFIER LETTER CIRCUMFLEX ACCENT u'\x89': '{\\textperthousand}', # PER MILLE SIGN u'\x8A': '{\\v{S}}', # LATIN CAPITAL LETTER S WITH CARON u'\x8B': '{\\guilsinglleft}', # SINGLE LEFT-POINTING ANGLE QUOTATION MARK u'\x8C': '{\\OE}', # LATIN CAPITAL LIGATURE OE u'\x8E': '{\\v{Z}}', # LATIN CAPITAL LETTER Z WITH CARON u'\x91': '{`}', # LEFT SINGLE QUOTATION MARK u'\x92': "{'}", # RIGHT SINGLE QUOTATION MARK u'\x93': '{\\textquotedblleft}', # LEFT DOUBLE QUOTATION MARK u'\x94': '{\\textquotedblright}', # RIGHT DOUBLE QUOTATION MARK u'\x95': '{\\textbullet}', # BULLET u'\x96': '{\\textendash}', # EN DASH u'\x97': '{\\textemdash}', # EM DASH u'\x98': '{\\texttildelow}', # SMALL TILDE u'\x99': '{\\texttrademark}', # TRADE MARK SIGN u'\x9A': '{\\v{s}}', # LATIN SMALL LETTER S WITH CARON u'\x9B': '{\\guilsinglright}', # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK u'\x9C': '{\\oe}', # LATIN SMALL LIGATURE OE u'\x9E': '{\\v{z}}', # LATIN SMALL LETTER Z WITH CARON u'\x9F': '{\\"{Y}}', # LATIN CAPITAL LETTER Y WITH DIAERESIS u'\xa0': '$~$', u'\xa1': '{\\textexclamdown}', u'\xa2': '{\\textcent}', u'\xa3': '{\\textsterling}', u'\xa4': '{\\textcurrency}', u'\xa5': '{\\textyen}', u'\xa6': '{\\textbrokenbar}', u'\xa7': '{\\textsection}', u'\xa8': '{\\textasciidieresis}', u'\xa9': '{\\textcopyright}', u'\xaa': '{\\textordfeminine}', u'\xab': '{\\guillemotleft}', u'\xac': '$\\lnot$', u'\xad': '$\\-$', u'\xae': '{\\textregistered}', u'\xaf': '{\\textasciimacron}', u'\xb0': '{\\textdegree}', u'\xb1': '$\\pm$', u'\xb2': '${^2}$', u'\xb3': '${^3}$', u'\xb4': '{\\textasciiacute}', u'\xb5': '$\\mathrm{\\mu}$', u'\xb6': '{\\textparagraph}', u'\xb7': '$\\cdot$', u'\xb8': '{\\c{}}', u'\xb9': '${^1}$', u'\xba': '{\\textordmasculine}', u'\xbb': '{\\guillemotright}', u'\xbc': '{\\textonequarter}', u'\xbd': '{\\textonehalf}', u'\xbe': '{\\textthreequarters}', u'\xbf': '{\\textquestiondown}', u'\xc0': '{\\`{A}}', u'\xc1': "{\\'{A}}", u'\xc2': '{\\^{A}}', u'\xc3': '{\\~{A}}', u'\xc4': '{\\"{A}}', u'\xc5': '{\\AA}', u'\xc6': '{\\AE}', u'\xc7': '{\\c{C}}', u'\xc8': '{\\`{E}}', u'\xc9': "{\\'{E}}", u'\xca': '{\\^{E}}', u'\xcb': '{\\"{E}}', u'\xcc': '{\\`{I}}', u'\xcd': "{\\'{I}}", u'\xce': '{\\^{I}}', u'\xcf': '{\\"{I}}', u'\xd0': '{\\DH}', u'\xd1': '{\\~{N}}', u'\xd2': '{\\`{O}}', u'\xd3': "{\\'{O}}", u'\xd4': '{\\^{O}}', u'\xd5': '{\\~{O}}', u'\xd6': '{\\"{O}}', u'\xd7': '{\\texttimes}', u'\xd8': '{\\O}', u'\xd9': '{\\`{U}}', u'\xda': "{\\'{U}}", u'\xdb': '{\\^{U}}', u'\xdc': '{\\"{U}}', u'\xdd': "{\\'{Y}}", u'\xde': '{\\TH}', u'\xdf': '{\\ss}', u'\xe0': '{\\`{a}}', u'\xe1': "{\\'{a}}", u'\xe2': '{\\^{a}}', u'\xe3': '{\\~{a}}', u'\xe4': '{\\"{a}}', u'\xe5': '{\\aa}', u'\xe6': '{\\ae}', u'\xe7': '{\\c{c}}', u'\xe8': '{\\`{e}}', u'\xe9': "{\\'{e}}", u'\xea': '{\\^{e}}', u'\xeb': '{\\"{e}}', u'\xec': '{\\`{\\i}}', u'\xed': "{\\'{\\i}}", u'\xee': '{\\^{\\i}}', u'\xef': '{\\"{\\i}}', u'\xf0': '{\\dh}', u'\xf1': '{\\~{n}}', u'\xf2': '{\\`{o}}', u'\xf3': "{\\'{o}}", u'\xf4': '{\\^{o}}', u'\xf5': '{\\~{o}}', u'\xf6': '{\\"{o}}', u'\xf7': '$\\div$', u'\xf8': '{\\o}', u'\xf9': '{\\`{u}}', u'\xfa': "{\\'{u}}", u'\xfb': '{\\^{u}}', u'\xfc': '{\\"{u}}', u'\xfd': "{\\'{y}}", u'\xfe': '{\\th}', u'\xff': '{\\"{y}}', u'\u0100': '{\\={A}}', u'\u0101': '{\\={a}}', u'\u0102': '{\\u{A}}', u'\u0103': '{\\u{a}}', u'\u0104': '{\\k{A}}', u'\u0105': '{\\k{a}}', u'\u0106': "{\\'{C}}", u'\u0107': "{\\'{c}}", u'\u0108': '{\\^{C}}', u'\u0109': '{\\^{c}}', u'\u010a': '{\\.{C}}', u'\u010b': '{\\.{c}}', u'\u010c': '{\\v{C}}', u'\u010d': '{\\v{c}}', u'\u010e': '{\\v{D}}', u'\u010f': '{\\v{d}}', u'\u0110': '{\\DJ}', u'\u0111': '{\\dj}', u'\u0112': '{\\={E}}', u'\u0113': '{\\={e}}', u'\u0114': '{\\u{E}}', u'\u0115': '{\\u{e}}', u'\u0116': '{\\.{E}}', u'\u0117': '{\\.{e}}', u'\u0118': '{\\k{E}}', u'\u0119': '{\\k{e}}', u'\u011a': '{\\v{E}}', u'\u011b': '{\\v{e}}', u'\u011c': '{\\^{G}}', u'\u011d': '{\\^{g}}', u'\u011e': '{\\u{G}}', u'\u011f': '{\\u{g}}', u'\u0120': '{\\.{G}}', u'\u0121': '{\\.{g}}', u'\u0122': '{\\c{G}}', u'\u0123': '{\\c{g}}', u'\u0124': '{\\^{H}}', u'\u0125': '{\\^{h}}', u'\u0126': '{{\\fontencoding{LELA}\\selectfont\\char40}}', u'\u0127': '$\\Elzxh$', u'\u0128': '{\\~{I}}', u'\u0129': '{\\~{\\i}}', u'\u012a': '{\\={I}}', u'\u012b': '{\\={\\i}}', u'\u012c': '{\\u{I}}', u'\u012d': '{\\u{\\i}}', u'\u012e': '{\\k{I}}', u'\u012f': '{\\k{i}}', u'\u0130': '{\\.{I}}', u'\u0131': '{\\i}', u'\u0132': '{IJ}', u'\u0133': '{ij}', u'\u0134': '{\\^{J}}', u'\u0135': '{\\^{\\j}}', u'\u0136': '{\\c{K}}', u'\u0137': '{\\c{k}}', u'\u0138': '{{\\fontencoding{LELA}\\selectfont\\char91}}', u'\u0139': "{\\'{L}}", u'\u013a': "{\\'{l}}", u'\u013b': '{\\c{L}}', u'\u013c': '{\\c{l}}', u'\u013d': '{\\v{L}}', u'\u013e': '{\\v{l}}', u'\u013f': '{{\\fontencoding{LELA}\\selectfont\\char201}}', u'\u0140': '{{\\fontencoding{LELA}\\selectfont\\char202}}', u'\u0141': '{\\L}', u'\u0142': '{\\l}', u'\u0143': "{\\'{N}}", u'\u0144': "{\\'{n}}", u'\u0145': '{\\c{N}}', u'\u0146': '{\\c{n}}', u'\u0147': '{\\v{N}}', u'\u0148': '{\\v{n}}', u'\u0149': "{'n}", u'\u014a': '{\\NG}', u'\u014b': '{\\ng}', u'\u014c': '{\\={O}}', u'\u014d': '{\\={o}}', u'\u014e': '{\\u{O}}', u'\u014f': '{\\u{o}}', u'\u0150': '{\\H{O}}', u'\u0151': '{\\H{o}}', u'\u0152': '{\\OE}', u'\u0153': '{\\oe}', u'\u0154': "{\\'{R}}", u'\u0155': "{\\'{r}}", u'\u0156': '{\\c{R}}', u'\u0157': '{\\c{r}}', u'\u0158': '{\\v{R}}', u'\u0159': '{\\v{r}}', u'\u015a': "{\\'{S}}", u'\u015b': "{\\'{s}}", u'\u015c': '{\\^{S}}', u'\u015d': '{\\^{s}}', u'\u015e': '{\\c{S}}', u'\u015f': '{\\c{s}}', u'\u0160': '{\\v{S}}', u'\u0161': '{\\v{s}}', u'\u0162': '{\\c{T}}', u'\u0163': '{\\c{t}}', u'\u0164': '{\\v{T}}', u'\u0165': '{\\v{t}}', u'\u0166': '{{\\fontencoding{LELA}\\selectfont\\char47}}', u'\u0167': '{{\\fontencoding{LELA}\\selectfont\\char63}}', u'\u0168': '{\\~{U}}', u'\u0169': '{\\~{u}}', u'\u016a': '{\\={U}}', u'\u016b': '{\\={u}}', u'\u016c': '{\\u{U}}', u'\u016d': '{\\u{u}}', u'\u016e': '{\\r{U}}', u'\u016f': '{\\r{u}}', u'\u0170': '{\\H{U}}', u'\u0171': '{\\H{u}}', u'\u0172': '{\\k{U}}', u'\u0173': '{\\k{u}}', u'\u0174': '{\\^{W}}', u'\u0175': '{\\^{w}}', u'\u0176': '{\\^{Y}}', u'\u0177': '{\\^{y}}', u'\u0178': '{\\"{Y}}', u'\u0179': "{\\'{Z}}", u'\u017a': "{\\'{z}}", u'\u017b': '{\\.{Z}}', u'\u017c': '{\\.{z}}', u'\u017d': '{\\v{Z}}', u'\u017e': '{\\v{z}}', u'\u0192': '$f$', u'\u0195': '{\\texthvlig}', u'\u019e': '{\\textnrleg}', u'\u01aa': '$\\eth$', u'\u01ba': '{{\\fontencoding{LELA}\\selectfont\\char195}}', u'\u01c2': '{\\textdoublepipe}', u'\u01f5': "{\\'{g}}", u'\u0250': '$\\Elztrna$', u'\u0252': '$\\Elztrnsa$', u'\u0254': '$\\Elzopeno$', u'\u0256': '$\\Elzrtld$', u'\u0258': '{{\\fontencoding{LEIP}\\selectfont\\char61}}', u'\u0259': '$\\Elzschwa$', u'\u025b': '$\\varepsilon$', u'\u0261': '{g}', u'\u0263': '$\\Elzpgamma$', u'\u0264': '$\\Elzpbgam$', u'\u0265': '$\\Elztrnh$', u'\u026c': '$\\Elzbtdl$', u'\u026d': '$\\Elzrtll$', u'\u026f': '$\\Elztrnm$', u'\u0270': '$\\Elztrnmlr$', u'\u0271': '$\\Elzltlmr$', u'\u0272': '{\\Elzltln}', u'\u0273': '$\\Elzrtln$', u'\u0277': '$\\Elzclomeg$', u'\u0278': '{\\textphi}', u'\u0279': '$\\Elztrnr$', u'\u027a': '$\\Elztrnrl$', u'\u027b': '$\\Elzrttrnr$', u'\u027c': '$\\Elzrl$', u'\u027d': '$\\Elzrtlr$', u'\u027e': '$\\Elzfhr$', u'\u027f': '{{\\fontencoding{LEIP}\\selectfont\\char202}}', u'\u0282': '$\\Elzrtls$', u'\u0283': '$\\Elzesh$', u'\u0287': '$\\Elztrnt$', u'\u0288': '$\\Elzrtlt$', u'\u028a': '$\\Elzpupsil$', u'\u028b': '$\\Elzpscrv$', u'\u028c': '$\\Elzinvv$', u'\u028d': '$\\Elzinvw$', u'\u028e': '$\\Elztrny$', u'\u0290': '$\\Elzrtlz$', u'\u0292': '$\\Elzyogh$', u'\u0294': '$\\Elzglst$', u'\u0295': '$\\Elzreglst$', u'\u0296': '$\\Elzinglst$', u'\u029e': '{\\textturnk}', u'\u02a4': '$\\Elzdyogh$', u'\u02a7': '$\\Elztesh$', u'\u02bc': "{'}", u'\u02c7': '{\\textasciicaron}', u'\u02c8': '$\\Elzverts$', u'\u02cc': '$\\Elzverti$', u'\u02d0': '$\\Elzlmrk$', u'\u02d1': '$\\Elzhlmrk$', u'\u02d2': '$\\Elzsbrhr$', u'\u02d3': '$\\Elzsblhr$', u'\u02d4': '$\\Elzrais$', u'\u02d5': '$\\Elzlow$', u'\u02d8': '{\\textasciibreve}', u'\u02d9': '{\\textperiodcentered}', u'\u02da': '{\\r{}}', u'\u02db': '{\\k{}}', u'\u02dc': '{\\texttildelow}', u'\u02dd': '{\\H{}}', u'\u02e5': '{\\tone{55}}', u'\u02e6': '{\\tone{44}}', u'\u02e7': '{\\tone{33}}', u'\u02e8': '{\\tone{22}}', u'\u02e9': '{\\tone{11}}', u'\u0300': '{\\`}', u'\u0301': "{\\'}", u'\u0302': '{\\^}', u'\u0303': '{\\~}', u'\u0304': '{\\=}', u'\u0306': '{\\u}', u'\u0307': '{\\.}', u'\u0308': '{\\"}', u'\u030a': '{\\r}', u'\u030b': '{\\H}', u'\u030c': '{\\v}', u'\u030f': '{\\cyrchar\\C}', u'\u0311': '{{\\fontencoding{LECO}\\selectfont\\char177}}', u'\u0318': '{{\\fontencoding{LECO}\\selectfont\\char184}}', u'\u0319': '{{\\fontencoding{LECO}\\selectfont\\char185}}', u'\u0321': '$\\Elzpalh$', u'\u0322': '{\\Elzrh}', u'\u0327': '{\\c}', u'\u0328': '{\\k}', u'\u032a': '$\\Elzsbbrg$', u'\u032b': '{{\\fontencoding{LECO}\\selectfont\\char203}}', u'\u032f': '{{\\fontencoding{LECO}\\selectfont\\char207}}', u'\u0335': '{\\Elzxl}', u'\u0336': '{\\Elzbar}', u'\u0337': '{{\\fontencoding{LECO}\\selectfont\\char215}}', u'\u0338': '{{\\fontencoding{LECO}\\selectfont\\char216}}', u'\u033a': '{{\\fontencoding{LECO}\\selectfont\\char218}}', u'\u033b': '{{\\fontencoding{LECO}\\selectfont\\char219}}', u'\u033c': '{{\\fontencoding{LECO}\\selectfont\\char220}}', u'\u033d': '{{\\fontencoding{LECO}\\selectfont\\char221}}', u'\u0361': '{{\\fontencoding{LECO}\\selectfont\\char225}}', u'\u0386': "{\\'{A}}", u'\u0388': "{\\'{E}}", u'\u0389': "{\\'{H}}", u'\u038a': "{\\'{}{I}}", u'\u038c': "{\\'{}O}", u'\u038e': "$\\mathrm{'Y}$", u'\u038f': "$\\mathrm{'\\Omega}$", u'\u0390': '$\\acute{\\ddot{\\iota}}$', u'\u0391': '$\\Alpha$', u'\u0392': '$\\Beta$', u'\u0393': '$\\Gamma$', u'\u0394': '$\\Delta$', u'\u0395': '$\\Epsilon$', u'\u0396': '$\\Zeta$', u'\u0397': '$\\Eta$', u'\u0398': '$\\Theta$', u'\u0399': '$\\Iota$', u'\u039a': '$\\Kappa$', u'\u039b': '$\\Lambda$', u'\u039c': '$M$', u'\u039d': '$N$', u'\u039e': '$\\Xi$', u'\u039f': '$O$', u'\u03a0': '$\\Pi$', u'\u03a1': '$\\Rho$', u'\u03a3': '$\\Sigma$', u'\u03a4': '$\\Tau$', u'\u03a5': '$\\Upsilon$', u'\u03a6': '$\\Phi$', u'\u03a7': '$\\Chi$', u'\u03a8': '$\\Psi$', u'\u03a9': '$\\Omega$', u'\u03aa': '$\\mathrm{\\ddot{I}}$', u'\u03ab': '$\\mathrm{\\ddot{Y}}$', u'\u03ac': "{\\'{$\\alpha$}}", u'\u03ad': '$\\acute{\\epsilon}$', u'\u03ae': '$\\acute{\\eta}$', u'\u03af': '$\\acute{\\iota}$', u'\u03b0': '$\\acute{\\ddot{\\upsilon}}$', u'\u03b1': '$\\alpha$', u'\u03b2': '$\\beta$', u'\u03b3': '$\\gamma$', u'\u03b4': '$\\delta$', u'\u03b5': '$\\epsilon$', u'\u03b6': '$\\zeta$', u'\u03b7': '$\\eta$', u'\u03b8': '{\\texttheta}', u'\u03b9': '$\\iota$', u'\u03ba': '$\\kappa$', u'\u03bb': '$\\lambda$', u'\u03bc': '$\\mu$', u'\u03bd': '$\\nu$', u'\u03be': '$\\xi$', u'\u03bf': '$o$', u'\u03c0': '$\\pi$', u'\u03c1': '$\\rho$', u'\u03c2': '$\\varsigma$', u'\u03c3': '$\\sigma$', u'\u03c4': '$\\tau$', u'\u03c5': '$\\upsilon$', u'\u03c6': '$\\varphi$', u'\u03c7': '$\\chi$', u'\u03c8': '$\\psi$', u'\u03c9': '$\\omega$', u'\u03ca': '$\\ddot{\\iota}$', u'\u03cb': '$\\ddot{\\upsilon}$', u'\u03cc': "{\\'{o}}", u'\u03cd': '$\\acute{\\upsilon}$', u'\u03ce': '$\\acute{\\omega}$', u'\u03d0': '{\\Pisymbol{ppi022}{87}}', u'\u03d1': '{\\textvartheta}', u'\u03d2': '$\\Upsilon$', u'\u03d5': '$\\phi$', u'\u03d6': '$\\varpi$', u'\u03da': '$\\Stigma$', u'\u03dc': '$\\Digamma$', u'\u03dd': '$\\digamma$', u'\u03de': '$\\Koppa$', u'\u03e0': '$\\Sampi$', u'\u03f0': '$\\varkappa$', u'\u03f1': '$\\varrho$', u'\u03f4': '{\\textTheta}', u'\u03f6': '$\\backepsilon$', u'\u0401': '{\\cyrchar\\CYRYO}', u'\u0402': '{\\cyrchar\\CYRDJE}', u'\u0403': "{\\cyrchar{\\'\\CYRG}}", u'\u0404': '{\\cyrchar\\CYRIE}', u'\u0405': '{\\cyrchar\\CYRDZE}', u'\u0406': '{\\cyrchar\\CYRII}', u'\u0407': '{\\cyrchar\\CYRYI}', u'\u0408': '{\\cyrchar\\CYRJE}', u'\u0409': '{\\cyrchar\\CYRLJE}', u'\u040a': '{\\cyrchar\\CYRNJE}', u'\u040b': '{\\cyrchar\\CYRTSHE}', u'\u040c': "{\\cyrchar{\\'\\CYRK}}", u'\u040e': '{\\cyrchar\\CYRUSHRT}', u'\u040f': '{\\cyrchar\\CYRDZHE}', u'\u0410': '{\\cyrchar\\CYRA}', u'\u0411': '{\\cyrchar\\CYRB}', u'\u0412': '{\\cyrchar\\CYRV}', u'\u0413': '{\\cyrchar\\CYRG}', u'\u0414': '{\\cyrchar\\CYRD}', u'\u0415': '{\\cyrchar\\CYRE}', u'\u0416': '{\\cyrchar\\CYRZH}', u'\u0417': '{\\cyrchar\\CYRZ}', u'\u0418': '{\\cyrchar\\CYRI}', u'\u0419': '{\\cyrchar\\CYRISHRT}', u'\u041a': '{\\cyrchar\\CYRK}', u'\u041b': '{\\cyrchar\\CYRL}', u'\u041c': '{\\cyrchar\\CYRM}', u'\u041d': '{\\cyrchar\\CYRN}', u'\u041e': '{\\cyrchar\\CYRO}', u'\u041f': '{\\cyrchar\\CYRP}', u'\u0420': '{\\cyrchar\\CYRR}', u'\u0421': '{\\cyrchar\\CYRS}', u'\u0422': '{\\cyrchar\\CYRT}', u'\u0423': '{\\cyrchar\\CYRU}', u'\u0424': '{\\cyrchar\\CYRF}', u'\u0425': '{\\cyrchar\\CYRH}', u'\u0426': '{\\cyrchar\\CYRC}', u'\u0427': '{\\cyrchar\\CYRCH}', u'\u0428': '{\\cyrchar\\CYRSH}', u'\u0429': '{\\cyrchar\\CYRSHCH}', u'\u042a': '{\\cyrchar\\CYRHRDSN}', u'\u042b': '{\\cyrchar\\CYRERY}', u'\u042c': '{\\cyrchar\\CYRSFTSN}', u'\u042d': '{\\cyrchar\\CYREREV}', u'\u042e': '{\\cyrchar\\CYRYU}', u'\u042f': '{\\cyrchar\\CYRYA}', u'\u0430': '{\\cyrchar\\cyra}', u'\u0431': '{\\cyrchar\\cyrb}', u'\u0432': '{\\cyrchar\\cyrv}', u'\u0433': '{\\cyrchar\\cyrg}', u'\u0434': '{\\cyrchar\\cyrd}', u'\u0435': '{\\cyrchar\\cyre}', u'\u0436': '{\\cyrchar\\cyrzh}', u'\u0437': '{\\cyrchar\\cyrz}', u'\u0438': '{\\cyrchar\\cyri}', u'\u0439': '{\\cyrchar\\cyrishrt}', u'\u043a': '{\\cyrchar\\cyrk}', u'\u043b': '{\\cyrchar\\cyrl}', u'\u043c': '{\\cyrchar\\cyrm}', u'\u043d': '{\\cyrchar\\cyrn}', u'\u043e': '{\\cyrchar\\cyro}', u'\u043f': '{\\cyrchar\\cyrp}', u'\u0440': '{\\cyrchar\\cyrr}', u'\u0441': '{\\cyrchar\\cyrs}', u'\u0442': '{\\cyrchar\\cyrt}', u'\u0443': '{\\cyrchar\\cyru}', u'\u0444': '{\\cyrchar\\cyrf}', u'\u0445': '{\\cyrchar\\cyrh}', u'\u0446': '{\\cyrchar\\cyrc}', u'\u0447': '{\\cyrchar\\cyrch}', u'\u0448': '{\\cyrchar\\cyrsh}', u'\u0449': '{\\cyrchar\\cyrshch}', u'\u044a': '{\\cyrchar\\cyrhrdsn}', u'\u044b': '{\\cyrchar\\cyrery}', u'\u044c': '{\\cyrchar\\cyrsftsn}', u'\u044d': '{\\cyrchar\\cyrerev}', u'\u044e': '{\\cyrchar\\cyryu}', u'\u044f': '{\\cyrchar\\cyrya}', u'\u0451': '{\\cyrchar\\cyryo}', u'\u0452': '{\\cyrchar\\cyrdje}', u'\u0453': "{\\cyrchar{\\'\\cyrg}}", u'\u0454': '{\\cyrchar\\cyrie}', u'\u0455': '{\\cyrchar\\cyrdze}', u'\u0456': '{\\cyrchar\\cyrii}', u'\u0457': '{\\cyrchar\\cyryi}', u'\u0458': '{\\cyrchar\\cyrje}', u'\u0459': '{\\cyrchar\\cyrlje}', u'\u045a': '{\\cyrchar\\cyrnje}', u'\u045b': '{\\cyrchar\\cyrtshe}', u'\u045c': "{\\cyrchar{\\'\\cyrk}}", u'\u045e': '{\\cyrchar\\cyrushrt}', u'\u045f': '{\\cyrchar\\cyrdzhe}', u'\u0460': '{\\cyrchar\\CYROMEGA}', u'\u0461': '{\\cyrchar\\cyromega}', u'\u0462': '{\\cyrchar\\CYRYAT}', u'\u0464': '{\\cyrchar\\CYRIOTE}', u'\u0465': '{\\cyrchar\\cyriote}', u'\u0466': '{\\cyrchar\\CYRLYUS}', u'\u0467': '{\\cyrchar\\cyrlyus}', u'\u0468': '{\\cyrchar\\CYRIOTLYUS}', u'\u0469': '{\\cyrchar\\cyriotlyus}', u'\u046a': '{\\cyrchar\\CYRBYUS}', u'\u046c': '{\\cyrchar\\CYRIOTBYUS}', u'\u046d': '{\\cyrchar\\cyriotbyus}', u'\u046e': '{\\cyrchar\\CYRKSI}', u'\u046f': '{\\cyrchar\\cyrksi}', u'\u0470': '{\\cyrchar\\CYRPSI}', u'\u0471': '{\\cyrchar\\cyrpsi}', u'\u0472': '{\\cyrchar\\CYRFITA}', u'\u0474': '{\\cyrchar\\CYRIZH}', u'\u0478': '{\\cyrchar\\CYRUK}', u'\u0479': '{\\cyrchar\\cyruk}', u'\u047a': '{\\cyrchar\\CYROMEGARND}', u'\u047b': '{\\cyrchar\\cyromegarnd}', u'\u047c': '{\\cyrchar\\CYROMEGATITLO}', u'\u047d': '{\\cyrchar\\cyromegatitlo}', u'\u047e': '{\\cyrchar\\CYROT}', u'\u047f': '{\\cyrchar\\cyrot}', u'\u0480': '{\\cyrchar\\CYRKOPPA}', u'\u0481': '{\\cyrchar\\cyrkoppa}', u'\u0482': '{\\cyrchar\\cyrthousands}', u'\u0488': '{\\cyrchar\\cyrhundredthousands}', u'\u0489': '{\\cyrchar\\cyrmillions}', u'\u048c': '{\\cyrchar\\CYRSEMISFTSN}', u'\u048d': '{\\cyrchar\\cyrsemisftsn}', u'\u048e': '{\\cyrchar\\CYRRTICK}', u'\u048f': '{\\cyrchar\\cyrrtick}', u'\u0490': '{\\cyrchar\\CYRGUP}', u'\u0491': '{\\cyrchar\\cyrgup}', u'\u0492': '{\\cyrchar\\CYRGHCRS}', u'\u0493': '{\\cyrchar\\cyrghcrs}', u'\u0494': '{\\cyrchar\\CYRGHK}', u'\u0495': '{\\cyrchar\\cyrghk}', u'\u0496': '{\\cyrchar\\CYRZHDSC}', u'\u0497': '{\\cyrchar\\cyrzhdsc}', u'\u0498': '{\\cyrchar\\CYRZDSC}', u'\u0499': '{\\cyrchar\\cyrzdsc}', u'\u049a': '{\\cyrchar\\CYRKDSC}', u'\u049b': '{\\cyrchar\\cyrkdsc}', u'\u049c': '{\\cyrchar\\CYRKVCRS}', u'\u049d': '{\\cyrchar\\cyrkvcrs}', u'\u049e': '{\\cyrchar\\CYRKHCRS}', u'\u049f': '{\\cyrchar\\cyrkhcrs}', u'\u04a0': '{\\cyrchar\\CYRKBEAK}', u'\u04a1': '{\\cyrchar\\cyrkbeak}', u'\u04a2': '{\\cyrchar\\CYRNDSC}', u'\u04a3': '{\\cyrchar\\cyrndsc}', u'\u04a4': '{\\cyrchar\\CYRNG}', u'\u04a5': '{\\cyrchar\\cyrng}', u'\u04a6': '{\\cyrchar\\CYRPHK}', u'\u04a7': '{\\cyrchar\\cyrphk}', u'\u04a8': '{\\cyrchar\\CYRABHHA}', u'\u04a9': '{\\cyrchar\\cyrabhha}', u'\u04aa': '{\\cyrchar\\CYRSDSC}', u'\u04ab': '{\\cyrchar\\cyrsdsc}', u'\u04ac': '{\\cyrchar\\CYRTDSC}', u'\u04ad': '{\\cyrchar\\cyrtdsc}', u'\u04ae': '{\\cyrchar\\CYRY}', u'\u04af': '{\\cyrchar\\cyry}', u'\u04b0': '{\\cyrchar\\CYRYHCRS}', u'\u04b1': '{\\cyrchar\\cyryhcrs}', u'\u04b2': '{\\cyrchar\\CYRHDSC}', u'\u04b3': '{\\cyrchar\\cyrhdsc}', u'\u04b4': '{\\cyrchar\\CYRTETSE}', u'\u04b5': '{\\cyrchar\\cyrtetse}', u'\u04b6': '{\\cyrchar\\CYRCHRDSC}', u'\u04b7': '{\\cyrchar\\cyrchrdsc}', u'\u04b8': '{\\cyrchar\\CYRCHVCRS}', u'\u04b9': '{\\cyrchar\\cyrchvcrs}', u'\u04ba': '{\\cyrchar\\CYRSHHA}', u'\u04bb': '{\\cyrchar\\cyrshha}', u'\u04bc': '{\\cyrchar\\CYRABHCH}', u'\u04bd': '{\\cyrchar\\cyrabhch}', u'\u04be': '{\\cyrchar\\CYRABHCHDSC}', u'\u04bf': '{\\cyrchar\\cyrabhchdsc}', u'\u04c0': '{\\cyrchar\\CYRpalochka}', u'\u04c3': '{\\cyrchar\\CYRKHK}', u'\u04c4': '{\\cyrchar\\cyrkhk}', u'\u04c7': '{\\cyrchar\\CYRNHK}', u'\u04c8': '{\\cyrchar\\cyrnhk}', u'\u04cb': '{\\cyrchar\\CYRCHLDSC}', u'\u04cc': '{\\cyrchar\\cyrchldsc}', u'\u04d4': '{\\cyrchar\\CYRAE}', u'\u04d5': '{\\cyrchar\\cyrae}', u'\u04d8': '{\\cyrchar\\CYRSCHWA}', u'\u04d9': '{\\cyrchar\\cyrschwa}', u'\u04e0': '{\\cyrchar\\CYRABHDZE}', u'\u04e1': '{\\cyrchar\\cyrabhdze}', u'\u04e8': '{\\cyrchar\\CYROTLD}', u'\u04e9': '{\\cyrchar\\cyrotld}', u'\u2002': '{\\hspace{0.6em}}', u'\u2003': '{\\hspace{1em}}', u'\u2004': '{\\hspace{0.33em}}', u'\u2005': '{\\hspace{0.25em}}', u'\u2006': '{\\hspace{0.166em}}', u'\u2007': '{\\hphantom{0}}', u'\u2008': '{\\hphantom{,}}', u'\u2009': '{\\hspace{0.167em}}', u'\u200a': '$\\mkern1mu$', u'\u2010': '{-}', u'\u2013': '{\\textendash}', u'\u2014': '{\\textemdash}', u'\u2015': '{\\rule{1em}{1pt}}', u'\u2016': '$\\Vert$', u'\u2018': '{`}', u'\u2019': "{'}", u'\u201a': '{,}', u'\u201b': '$\\Elzreapos$', u'\u201c': '{\\textquotedblleft}', u'\u201d': '{\\textquotedblright}', u'\u201e': '{,,}', u'\u2020': '{\\textdagger}', u'\u2021': '{\\textdaggerdbl}', u'\u2022': '{\\textbullet}', u'\u2024': '{.}', u'\u2025': '{..}', u'\u2026': '{\\ldots}', u'\u2030': '{\\textperthousand}', u'\u2031': '{\\textpertenthousand}', u'\u2032': "${'}$", u'\u2033': "${''}$", u'\u2034': "${'''}$", u'\u2035': '$\\backprime$', u'\u2039': '{\\guilsinglleft}', u'\u203a': '{\\guilsinglright}', u'\u2057': "$''''$", u'\u205f': '{\\mkern4mu}', u'\u2060': '{\\nolinebreak}', u'\u20a7': '{\\ensuremath{\\Elzpes}}', u'\u20ac': '{\\texteuro}', u'\u20db': '$\\dddot$', u'\u20dc': '$\\ddddot$', u'\u2102': '$\\mathbb{C}$', u'\u210a': '{\\mathscr{g}}', u'\u210b': '$\\mathscr{H}$', u'\u210c': '$\\mathfrak{H}$', u'\u210d': '$\\mathbb{H}$', u'\u210f': '$\\hslash$', u'\u2110': '$\\mathscr{I}$', u'\u2111': '$\\mathfrak{I}$', u'\u2112': '$\\mathscr{L}$', u'\u2113': '$\\mathscr{l}$', u'\u2115': '$\\mathbb{N}$', u'\u2116': '{\\cyrchar\\textnumero}', u'\u2118': '$\\wp$', u'\u2119': '$\\mathbb{P}$', u'\u211a': '$\\mathbb{Q}$', u'\u211b': '$\\mathscr{R}$', u'\u211c': '$\\mathfrak{R}$', u'\u211d': '$\\mathbb{R}$', u'\u211e': '$\\Elzxrat$', u'\u2122': '{\\texttrademark}', u'\u2124': '$\\mathbb{Z}$', u'\u2126': '$\\Omega$', u'\u2127': '$\\mho$', u'\u2128': '$\\mathfrak{Z}$', u'\u2129': '$\\ElsevierGlyph{2129}$', u'\u212b': '{\\AA}', u'\u212c': '$\\mathscr{B}$', u'\u212d': '$\\mathfrak{C}$', u'\u212f': '$\\mathscr{e}$', u'\u2130': '$\\mathscr{E}$', u'\u2131': '$\\mathscr{F}$', u'\u2133': '$\\mathscr{M}$', u'\u2134': '$\\mathscr{o}$', u'\u2135': '$\\aleph$', u'\u2136': '$\\beth$', u'\u2137': '$\\gimel$', u'\u2138': '$\\daleth$', u'\u2153': '$\\textfrac{1}{3}$', u'\u2154': '$\\textfrac{2}{3}$', u'\u2155': '$\\textfrac{1}{5}$', u'\u2156': '$\\textfrac{2}{5}$', u'\u2157': '$\\textfrac{3}{5}$', u'\u2158': '$\\textfrac{4}{5}$', u'\u2159': '$\\textfrac{1}{6}$', u'\u215a': '$\\textfrac{5}{6}$', u'\u215b': '$\\textfrac{1}{8}$', u'\u215c': '$\\textfrac{3}{8}$', u'\u215d': '$\\textfrac{5}{8}$', u'\u215e': '$\\textfrac{7}{8}$', u'\u2190': '$\\leftarrow$', u'\u2191': '$\\uparrow$', u'\u2192': '$\\rightarrow$', u'\u2193': '$\\downarrow$', u'\u2194': '$\\leftrightarrow$', u'\u2195': '$\\updownarrow$', u'\u2196': '$\\nwarrow$', u'\u2197': '$\\nearrow$', u'\u2198': '$\\searrow$', u'\u2199': '$\\swarrow$', u'\u219a': '$\\nleftarrow$', u'\u219b': '$\\nrightarrow$', u'\u219c': '$\\arrowwaveright$', u'\u219d': '$\\arrowwaveright$', u'\u219e': '$\\twoheadleftarrow$', u'\u21a0': '$\\twoheadrightarrow$', u'\u21a2': '$\\leftarrowtail$', u'\u21a3': '$\\rightarrowtail$', u'\u21a6': '$\\mapsto$', u'\u21a9': '$\\hookleftarrow$', u'\u21aa': '$\\hookrightarrow$', u'\u21ab': '$\\looparrowleft$', u'\u21ac': '$\\looparrowright$', u'\u21ad': '$\\leftrightsquigarrow$', u'\u21ae': '$\\nleftrightarrow$', u'\u21b0': '$\\Lsh$', u'\u21b1': '$\\Rsh$', u'\u21b3': '$\\ElsevierGlyph{21B3}$', u'\u21b6': '$\\curvearrowleft$', u'\u21b7': '$\\curvearrowright$', u'\u21ba': '$\\circlearrowleft$', u'\u21bb': '$\\circlearrowright$', u'\u21bc': '$\\leftharpoonup$', u'\u21bd': '$\\leftharpoondown$', u'\u21be': '$\\upharpoonright$', u'\u21bf': '$\\upharpoonleft$', u'\u21c0': '$\\rightharpoonup$', u'\u21c1': '$\\rightharpoondown$', u'\u21c2': '$\\downharpoonright$', u'\u21c3': '$\\downharpoonleft$', u'\u21c4': '$\\rightleftarrows$', u'\u21c5': '$\\dblarrowupdown$', u'\u21c6': '$\\leftrightarrows$', u'\u21c7': '$\\leftleftarrows$', u'\u21c8': '$\\upuparrows$', u'\u21c9': '$\\rightrightarrows$', u'\u21ca': '$\\downdownarrows$', u'\u21cb': '$\\leftrightharpoons$', u'\u21cc': '$\\rightleftharpoons$', u'\u21cd': '$\\nLeftarrow$', u'\u21ce': '$\\nLeftrightarrow$', u'\u21cf': '$\\nRightarrow$', u'\u21d0': '$\\Leftarrow$', u'\u21d1': '$\\Uparrow$', u'\u21d2': '$\\Rightarrow$', u'\u21d3': '$\\Downarrow$', u'\u21d4': '$\\Leftrightarrow$', u'\u21d5': '$\\Updownarrow$', u'\u21da': '$\\Lleftarrow$', u'\u21db': '$\\Rrightarrow$', u'\u21dd': '$\\rightsquigarrow$', u'\u21f5': '$\\DownArrowUpArrow$', u'\u2200': '$\\forall$', u'\u2201': '$\\complement$', u'\u2202': '$\\partial$', u'\u2203': '$\\exists$', u'\u2204': '$\\nexists$', u'\u2205': '$\\varnothing$', u'\u2207': '$\\nabla$', u'\u2208': '$\\in$', u'\u2209': '$\\not\\in$', u'\u220b': '$\\ni$', u'\u220c': '$\\not\\ni$', u'\u220f': '$\\prod$', u'\u2210': '$\\coprod$', u'\u2211': '$\\sum$', u'\u2212': '{-}', u'\u2213': '$\\mp$', u'\u2214': '$\\dotplus$', u'\u2216': '$\\setminus$', u'\u2217': '${_\\ast}$', u'\u2218': '$\\circ$', u'\u2219': '$\\bullet$', u'\u221a': '$\\surd$', u'\u221d': '$\\propto$', u'\u221e': '$\\infty$', u'\u221f': '$\\rightangle$', u'\u2220': '$\\angle$', u'\u2221': '$\\measuredangle$', u'\u2222': '$\\sphericalangle$', u'\u2223': '$\\mid$', u'\u2224': '$\\nmid$', u'\u2225': '$\\parallel$', u'\u2226': '$\\nparallel$', u'\u2227': '$\\wedge$', u'\u2228': '$\\vee$', u'\u2229': '$\\cap$', u'\u222a': '$\\cup$', u'\u222b': '$\\int$', u'\u222c': '$\\int\\!\\int$', u'\u222d': '$\\int\\!\\int\\!\\int$', u'\u222e': '$\\oint$', u'\u222f': '$\\surfintegral$', u'\u2230': '$\\volintegral$', u'\u2231': '$\\clwintegral$', u'\u2232': '$\\ElsevierGlyph{2232}$', u'\u2233': '$\\ElsevierGlyph{2233}$', u'\u2234': '$\\therefore$', u'\u2235': '$\\because$', u'\u2237': '$\\Colon$', u'\u2238': '$\\ElsevierGlyph{2238}$', u'\u223a': '$\\mathbin{{:}\\!\\!{-}\\!\\!{:}}$', u'\u223b': '$\\homothetic$', u'\u223c': '$\\sim$', u'\u223d': '$\\backsim$', u'\u223e': '$\\lazysinv$', u'\u2240': '$\\wr$', u'\u2241': '$\\not\\sim$', u'\u2242': '$\\ElsevierGlyph{2242}$', u'\u2243': '$\\simeq$', u'\u2244': '$\\not\\simeq$', u'\u2245': '$\\cong$', u'\u2246': '$\\approxnotequal$', u'\u2247': '$\\not\\cong$', u'\u2248': '$\\approx$', u'\u2249': '$\\not\\approx$', u'\u224a': '$\\approxeq$', u'\u224b': '$\\tildetrpl$', u'\u224c': '$\\allequal$', u'\u224d': '$\\asymp$', u'\u224e': '$\\Bumpeq$', u'\u224f': '$\\bumpeq$', u'\u2250': '$\\doteq$', u'\u2251': '$\\doteqdot$', u'\u2252': '$\\fallingdotseq$', u'\u2253': '$\\risingdotseq$', u'\u2254': '{:=}', u'\u2255': '$=:$', u'\u2256': '$\\eqcirc$', u'\u2257': '$\\circeq$', u'\u2259': '$\\estimates$', u'\u225a': '$\\ElsevierGlyph{225A}$', u'\u225b': '$\\starequal$', u'\u225c': '$\\triangleq$', u'\u225f': '$\\ElsevierGlyph{225F}$', u'\u2260': '$\\not =$', u'\u2261': '$\\equiv$', u'\u2262': '$\\not\\equiv$', u'\u2264': '$\\leq$', u'\u2265': '$\\geq$', u'\u2266': '$\\leqq$', u'\u2267': '$\\geqq$', u'\u2268': '$\\lneqq$', u'\u2269': '$\\gneqq$', u'\u226a': '$\\ll$', u'\u226b': '$\\gg$', u'\u226c': '$\\between$', u'\u226d': '$\\not\\kern-0.3em\\times$', u'\u226e': '$\\not<$', u'\u226f': '$\\not>$', u'\u2270': '$\\not\\leq$', u'\u2271': '$\\not\\geq$', u'\u2272': '$\\lessequivlnt$', u'\u2273': '$\\greaterequivlnt$', u'\u2274': '$\\ElsevierGlyph{2274}$', u'\u2275': '$\\ElsevierGlyph{2275}$', u'\u2276': '$\\lessgtr$', u'\u2277': '$\\gtrless$', u'\u2278': '$\\notlessgreater$', u'\u2279': '$\\notgreaterless$', u'\u227a': '$\\prec$', u'\u227b': '$\\succ$', u'\u227c': '$\\preccurlyeq$', u'\u227d': '$\\succcurlyeq$', u'\u227e': '$\\precapprox$', u'\u227f': '$\\succapprox$', u'\u2280': '$\\not\\prec$', u'\u2281': '$\\not\\succ$', u'\u2282': '$\\subset$', u'\u2283': '$\\supset$', u'\u2284': '$\\not\\subset$', u'\u2285': '$\\not\\supset$', u'\u2286': '$\\subseteq$', u'\u2287': '$\\supseteq$', u'\u2288': '$\\not\\subseteq$', u'\u2289': '$\\not\\supseteq$', u'\u228a': '$\\subsetneq$', u'\u228b': '$\\supsetneq$', u'\u228e': '$\\uplus$', u'\u228f': '$\\sqsubset$', u'\u2290': '$\\sqsupset$', u'\u2291': '$\\sqsubseteq$', u'\u2292': '$\\sqsupseteq$', u'\u2293': '$\\sqcap$', u'\u2294': '$\\sqcup$', u'\u2295': '$\\oplus$', u'\u2296': '$\\ominus$', u'\u2297': '$\\otimes$', u'\u2298': '$\\oslash$', u'\u2299': '$\\odot$', u'\u229a': '$\\circledcirc$', u'\u229b': '$\\circledast$', u'\u229d': '$\\circleddash$', u'\u229e': '$\\boxplus$', u'\u229f': '$\\boxminus$', u'\u22a0': '$\\boxtimes$', u'\u22a1': '$\\boxdot$', u'\u22a2': '$\\vdash$', u'\u22a3': '$\\dashv$', u'\u22a4': '$\\top$', u'\u22a5': '$\\perp$', u'\u22a7': '$\\truestate$', u'\u22a8': '$\\forcesextra$', u'\u22a9': '$\\Vdash$', u'\u22aa': '$\\Vvdash$', u'\u22ab': '$\\VDash$', u'\u22ac': '$\\nvdash$', u'\u22ad': '$\\nvDash$', u'\u22ae': '$\\nVdash$', u'\u22af': '$\\nVDash$', u'\u22b2': '$\\vartriangleleft$', u'\u22b3': '$\\vartriangleright$', u'\u22b4': '$\\trianglelefteq$', u'\u22b5': '$\\trianglerighteq$', u'\u22b6': '$\\original$', u'\u22b7': '$\\image$', u'\u22b8': '$\\multimap$', u'\u22b9': '$\\hermitconjmatrix$', u'\u22ba': '$\\intercal$', u'\u22bb': '$\\veebar$', u'\u22be': '$\\rightanglearc$', u'\u22c0': '$\\ElsevierGlyph{22C0}$', u'\u22c1': '$\\ElsevierGlyph{22C1}$', u'\u22c2': '$\\bigcap$', u'\u22c3': '$\\bigcup$', u'\u22c4': '$\\diamond$', u'\u22c5': '$\\cdot$', u'\u22c6': '$\\star$', u'\u22c7': '$\\divideontimes$', u'\u22c8': '$\\bowtie$', u'\u22c9': '$\\ltimes$', u'\u22ca': '$\\rtimes$', u'\u22cb': '$\\leftthreetimes$', u'\u22cc': '$\\rightthreetimes$', u'\u22cd': '$\\backsimeq$', u'\u22ce': '$\\curlyvee$', u'\u22cf': '$\\curlywedge$', u'\u22d0': '$\\Subset$', u'\u22d1': '$\\Supset$', u'\u22d2': '$\\Cap$', u'\u22d3': '$\\Cup$', u'\u22d4': '$\\pitchfork$', u'\u22d6': '$\\lessdot$', u'\u22d7': '$\\gtrdot$', u'\u22d8': '$\\verymuchless$', u'\u22d9': '$\\verymuchgreater$', u'\u22da': '$\\lesseqgtr$', u'\u22db': '$\\gtreqless$', u'\u22de': '$\\curlyeqprec$', u'\u22df': '$\\curlyeqsucc$', u'\u22e2': '$\\not\\sqsubseteq$', u'\u22e3': '$\\not\\sqsupseteq$', u'\u22e5': '$\\Elzsqspne$', u'\u22e6': '$\\lnsim$', u'\u22e7': '$\\gnsim$', u'\u22e8': '$\\precedesnotsimilar$', u'\u22e9': '$\\succnsim$', u'\u22ea': '$\\ntriangleleft$', u'\u22eb': '$\\ntriangleright$', u'\u22ec': '$\\ntrianglelefteq$', u'\u22ed': '$\\ntrianglerighteq$', u'\u22ee': '$\\vdots$', u'\u22ef': '$\\cdots$', u'\u22f0': '$\\upslopeellipsis$', u'\u22f1': '$\\downslopeellipsis$', u'\u2305': '{\\barwedge}', u'\u2306': '$\\perspcorrespond$', u'\u2308': '$\\lceil$', u'\u2309': '$\\rceil$', u'\u230a': '$\\lfloor$', u'\u230b': '$\\rfloor$', u'\u2315': '$\\recorder$', u'\u2316': '$\\mathchar"2208$', u'\u231c': '$\\ulcorner$', u'\u231d': '$\\urcorner$', u'\u231e': '$\\llcorner$', u'\u231f': '$\\lrcorner$', u'\u2322': '$\\frown$', u'\u2323': '$\\smile$', u'\u2329': '$\\langle$', u'\u232a': '$\\rangle$', u'\u233d': '$\\ElsevierGlyph{E838}$', u'\u23a3': '$\\Elzdlcorn$', u'\u23b0': '$\\lmoustache$', u'\u23b1': '$\\rmoustache$', u'\u2423': '{\\textvisiblespace}', u'\u2460': '{\\ding{172}}', u'\u2461': '{\\ding{173}}', u'\u2462': '{\\ding{174}}', u'\u2463': '{\\ding{175}}', u'\u2464': '{\\ding{176}}', u'\u2465': '{\\ding{177}}', u'\u2466': '{\\ding{178}}', u'\u2467': '{\\ding{179}}', u'\u2468': '{\\ding{180}}', u'\u2469': '{\\ding{181}}', u'\u24c8': '$\\circledS$', u'\u2506': '$\\Elzdshfnc$', u'\u2519': '$\\Elzsqfnw$', u'\u2571': '$\\diagup$', u'\u25a0': '{\\ding{110}}', u'\u25a1': '$\\square$', u'\u25aa': '$\\blacksquare$', u'\u25ad': '$\\fbox{~~}$', u'\u25af': '$\\Elzvrecto$', u'\u25b1': '$\\ElsevierGlyph{E381}$', u'\u25b2': '{\\ding{115}}', u'\u25b3': '$\\bigtriangleup$', u'\u25b4': '$\\blacktriangle$', u'\u25b5': '$\\vartriangle$', u'\u25b8': '$\\blacktriangleright$', u'\u25b9': '$\\triangleright$', u'\u25bc': '{\\ding{116}}', u'\u25bd': '$\\bigtriangledown$', u'\u25be': '$\\blacktriangledown$', u'\u25bf': '$\\triangledown$', u'\u25c2': '$\\blacktriangleleft$', u'\u25c3': '$\\triangleleft$', u'\u25c6': '{\\ding{117}}', u'\u25ca': '$\\lozenge$', u'\u25cb': '$\\bigcirc$', u'\u25cf': '{\\ding{108}}', u'\u25d0': '$\\Elzcirfl$', u'\u25d1': '$\\Elzcirfr$', u'\u25d2': '$\\Elzcirfb$', u'\u25d7': '{\\ding{119}}', u'\u25d8': '$\\Elzrvbull$', u'\u25e7': '$\\Elzsqfl$', u'\u25e8': '$\\Elzsqfr$', u'\u25ea': '$\\Elzsqfse$', u'\u25ef': '$\\bigcirc$', u'\u2605': '{\\ding{72}}', u'\u2606': '{\\ding{73}}', u'\u260e': '{\\ding{37}}', u'\u261b': '{\\ding{42}}', u'\u261e': '{\\ding{43}}', u'\u263e': '{\\rightmoon}', u'\u263f': '{\\mercury}', u'\u2640': '{\\venus}', u'\u2642': '{\\male}', u'\u2643': '{\\jupiter}', u'\u2644': '{\\saturn}', u'\u2645': '{\\uranus}', u'\u2646': '{\\neptune}', u'\u2647': '{\\pluto}', u'\u2648': '{\\aries}', u'\u2649': '{\\taurus}', u'\u264a': '{\\gemini}', u'\u264b': '{\\cancer}', u'\u264c': '{\\leo}', u'\u264d': '{\\virgo}', u'\u264e': '{\\libra}', u'\u264f': '{\\scorpio}', u'\u2650': '{\\sagittarius}', u'\u2651': '{\\capricornus}', u'\u2652': '{\\aquarius}', u'\u2653': '{\\pisces}', u'\u2660': '{\\ding{171}}', u'\u2662': '$\\diamond$', u'\u2663': '{\\ding{168}}', u'\u2665': '{\\ding{170}}', u'\u2666': '{\\ding{169}}', u'\u2669': '{\\quarternote}', u'\u266a': '{\\eighthnote}', u'\u266d': '$\\flat$', u'\u266e': '$\\natural$', u'\u266f': '$\\sharp$', u'\u2701': '{\\ding{33}}', u'\u2702': '{\\ding{34}}', u'\u2703': '{\\ding{35}}', u'\u2704': '{\\ding{36}}', u'\u2706': '{\\ding{38}}', u'\u2707': '{\\ding{39}}', u'\u2708': '{\\ding{40}}', u'\u2709': '{\\ding{41}}', u'\u270c': '{\\ding{44}}', u'\u270d': '{\\ding{45}}', u'\u270e': '{\\ding{46}}', u'\u270f': '{\\ding{47}}', u'\u2710': '{\\ding{48}}', u'\u2711': '{\\ding{49}}', u'\u2712': '{\\ding{50}}', u'\u2713': '{\\ding{51}}', u'\u2714': '{\\ding{52}}', u'\u2715': '{\\ding{53}}', u'\u2716': '{\\ding{54}}', u'\u2717': '{\\ding{55}}', u'\u2718': '{\\ding{56}}', u'\u2719': '{\\ding{57}}', u'\u271a': '{\\ding{58}}', u'\u271b': '{\\ding{59}}', u'\u271c': '{\\ding{60}}', u'\u271d': '{\\ding{61}}', u'\u271e': '{\\ding{62}}', u'\u271f': '{\\ding{63}}', u'\u2720': '{\\ding{64}}', u'\u2721': '{\\ding{65}}', u'\u2722': '{\\ding{66}}', u'\u2723': '{\\ding{67}}', u'\u2724': '{\\ding{68}}', u'\u2725': '{\\ding{69}}', u'\u2726': '{\\ding{70}}', u'\u2727': '{\\ding{71}}', u'\u2729': '{\\ding{73}}', u'\u272a': '{\\ding{74}}', u'\u272b': '{\\ding{75}}', u'\u272c': '{\\ding{76}}', u'\u272d': '{\\ding{77}}', u'\u272e': '{\\ding{78}}', u'\u272f': '{\\ding{79}}', u'\u2730': '{\\ding{80}}', u'\u2731': '{\\ding{81}}', u'\u2732': '{\\ding{82}}', u'\u2733': '{\\ding{83}}', u'\u2734': '{\\ding{84}}', u'\u2735': '{\\ding{85}}', u'\u2736': '{\\ding{86}}', u'\u2737': '{\\ding{87}}', u'\u2738': '{\\ding{88}}', u'\u2739': '{\\ding{89}}', u'\u273a': '{\\ding{90}}', u'\u273b': '{\\ding{91}}', u'\u273c': '{\\ding{92}}', u'\u273d': '{\\ding{93}}', u'\u273e': '{\\ding{94}}', u'\u273f': '{\\ding{95}}', u'\u2740': '{\\ding{96}}', u'\u2741': '{\\ding{97}}', u'\u2742': '{\\ding{98}}', u'\u2743': '{\\ding{99}}', u'\u2744': '{\\ding{100}}', u'\u2745': '{\\ding{101}}', u'\u2746': '{\\ding{102}}', u'\u2747': '{\\ding{103}}', u'\u2748': '{\\ding{104}}', u'\u2749': '{\\ding{105}}', u'\u274a': '{\\ding{106}}', u'\u274b': '{\\ding{107}}', u'\u274d': '{\\ding{109}}', u'\u274f': '{\\ding{111}}', u'\u2750': '{\\ding{112}}', u'\u2751': '{\\ding{113}}', u'\u2752': '{\\ding{114}}', u'\u2756': '{\\ding{118}}', u'\u2758': '{\\ding{120}}', u'\u2759': '{\\ding{121}}', u'\u275a': '{\\ding{122}}', u'\u275b': '{\\ding{123}}', u'\u275c': '{\\ding{124}}', u'\u275d': '{\\ding{125}}', u'\u275e': '{\\ding{126}}', u'\u2761': '{\\ding{161}}', u'\u2762': '{\\ding{162}}', u'\u2763': '{\\ding{163}}', u'\u2764': '{\\ding{164}}', u'\u2765': '{\\ding{165}}', u'\u2766': '{\\ding{166}}', u'\u2767': '{\\ding{167}}', u'\u2776': '{\\ding{182}}', u'\u2777': '{\\ding{183}}', u'\u2778': '{\\ding{184}}', u'\u2779': '{\\ding{185}}', u'\u277a': '{\\ding{186}}', u'\u277b': '{\\ding{187}}', u'\u277c': '{\\ding{188}}', u'\u277d': '{\\ding{189}}', u'\u277e': '{\\ding{190}}', u'\u277f': '{\\ding{191}}', u'\u2780': '{\\ding{192}}', u'\u2781': '{\\ding{193}}', u'\u2782': '{\\ding{194}}', u'\u2783': '{\\ding{195}}', u'\u2784': '{\\ding{196}}', u'\u2785': '{\\ding{197}}', u'\u2786': '{\\ding{198}}', u'\u2787': '{\\ding{199}}', u'\u2788': '{\\ding{200}}', u'\u2789': '{\\ding{201}}', u'\u278a': '{\\ding{202}}', u'\u278b': '{\\ding{203}}', u'\u278c': '{\\ding{204}}', u'\u278d': '{\\ding{205}}', u'\u278e': '{\\ding{206}}', u'\u278f': '{\\ding{207}}', u'\u2790': '{\\ding{208}}', u'\u2791': '{\\ding{209}}', u'\u2792': '{\\ding{210}}', u'\u2793': '{\\ding{211}}', u'\u2794': '{\\ding{212}}', u'\u2798': '{\\ding{216}}', u'\u2799': '{\\ding{217}}', u'\u279a': '{\\ding{218}}', u'\u279b': '{\\ding{219}}', u'\u279c': '{\\ding{220}}', u'\u279d': '{\\ding{221}}', u'\u279e': '{\\ding{222}}', u'\u279f': '{\\ding{223}}', u'\u27a0': '{\\ding{224}}', u'\u27a1': '{\\ding{225}}', u'\u27a2': '{\\ding{226}}', u'\u27a3': '{\\ding{227}}', u'\u27a4': '{\\ding{228}}', u'\u27a5': '{\\ding{229}}', u'\u27a6': '{\\ding{230}}', u'\u27a7': '{\\ding{231}}', u'\u27a8': '{\\ding{232}}', u'\u27a9': '{\\ding{233}}', u'\u27aa': '{\\ding{234}}', u'\u27ab': '{\\ding{235}}', u'\u27ac': '{\\ding{236}}', u'\u27ad': '{\\ding{237}}', u'\u27ae': '{\\ding{238}}', u'\u27af': '{\\ding{239}}', u'\u27b1': '{\\ding{241}}', u'\u27b2': '{\\ding{242}}', u'\u27b3': '{\\ding{243}}', u'\u27b4': '{\\ding{244}}', u'\u27b5': '{\\ding{245}}', u'\u27b6': '{\\ding{246}}', u'\u27b7': '{\\ding{247}}', u'\u27b8': '{\\ding{248}}', u'\u27b9': '{\\ding{249}}', u'\u27ba': '{\\ding{250}}', u'\u27bb': '{\\ding{251}}', u'\u27bc': '{\\ding{252}}', u'\u27bd': '{\\ding{253}}', u'\u27be': '{\\ding{254}}', u'\u27f5': '$\\longleftarrow$', u'\u27f6': '$\\longrightarrow$', u'\u27f7': '$\\longleftrightarrow$', u'\u27f8': '$\\Longleftarrow$', u'\u27f9': '$\\Longrightarrow$', u'\u27fa': '$\\Longleftrightarrow$', u'\u27fc': '$\\longmapsto$', u'\u27ff': '$\\sim\\joinrel\\leadsto$', u'\u2905': '$\\ElsevierGlyph{E212}$', u'\u2912': '$\\UpArrowBar$', u'\u2913': '$\\DownArrowBar$', u'\u2923': '$\\ElsevierGlyph{E20C}$', u'\u2924': '$\\ElsevierGlyph{E20D}$', u'\u2925': '$\\ElsevierGlyph{E20B}$', u'\u2926': '$\\ElsevierGlyph{E20A}$', u'\u2927': '$\\ElsevierGlyph{E211}$', u'\u2928': '$\\ElsevierGlyph{E20E}$', u'\u2929': '$\\ElsevierGlyph{E20F}$', u'\u292a': '$\\ElsevierGlyph{E210}$', u'\u2933': '$\\ElsevierGlyph{E21C}$', u'\u2936': '$\\ElsevierGlyph{E21A}$', u'\u2937': '$\\ElsevierGlyph{E219}$', u'\u2940': '$\\Elolarr$', u'\u2941': '$\\Elorarr$', u'\u2942': '$\\ElzRlarr$', u'\u2944': '$\\ElzrLarr$', u'\u2947': '$\\Elzrarrx$', u'\u294e': '$\\LeftRightVector$', u'\u294f': '$\\RightUpDownVector$', u'\u2950': '$\\DownLeftRightVector$', u'\u2951': '$\\LeftUpDownVector$', u'\u2952': '$\\LeftVectorBar$', u'\u2953': '$\\RightVectorBar$', u'\u2954': '$\\RightUpVectorBar$', u'\u2955': '$\\RightDownVectorBar$', u'\u2956': '$\\DownLeftVectorBar$', u'\u2957': '$\\DownRightVectorBar$', u'\u2958': '$\\LeftUpVectorBar$', u'\u2959': '$\\LeftDownVectorBar$', u'\u295a': '$\\LeftTeeVector$', u'\u295b': '$\\RightTeeVector$', u'\u295c': '$\\RightUpTeeVector$', u'\u295d': '$\\RightDownTeeVector$', u'\u295e': '$\\DownLeftTeeVector$', u'\u295f': '$\\DownRightTeeVector$', u'\u2960': '$\\LeftUpTeeVector$', u'\u2961': '$\\LeftDownTeeVector$', u'\u296e': '$\\UpEquilibrium$', u'\u296f': '$\\ReverseUpEquilibrium$', u'\u2970': '$\\RoundImplies$', u'\u297c': '$\\ElsevierGlyph{E214}$', u'\u297d': '$\\ElsevierGlyph{E215}$', u'\u2980': '$\\Elztfnc$', u'\u2985': '$\\ElsevierGlyph{3018}$', u'\u2986': '$\\Elroang$', u'\u2993': '$<\\kern-0.58em($', u'\u2994': '$\\ElsevierGlyph{E291}$', u'\u2999': '$\\Elzddfnc$', u'\u299c': '$\\Angle$', u'\u29a0': '$\\Elzlpargt$', u'\u29b5': '$\\ElsevierGlyph{E260}$', u'\u29b6': '$\\ElsevierGlyph{E61B}$', u'\u29ca': '$\\ElzLap$', u'\u29cb': '$\\Elzdefas$', u'\u29cf': '$\\LeftTriangleBar$', u'\u29d0': '$\\RightTriangleBar$', u'\u29dc': '$\\ElsevierGlyph{E372}$', u'\u29eb': '$\\blacklozenge$', u'\u29f4': '$\\RuleDelayed$', u'\u2a04': '$\\Elxuplus$', u'\u2a05': '$\\ElzThr$', u'\u2a06': '$\\Elxsqcup$', u'\u2a07': '$\\ElzInf$', u'\u2a08': '$\\ElzSup$', u'\u2a0d': '$\\ElzCint$', u'\u2a0f': '$\\clockoint$', u'\u2a10': '$\\ElsevierGlyph{E395}$', u'\u2a16': '$\\sqrint$', u'\u2a25': '$\\ElsevierGlyph{E25A}$', u'\u2a2a': '$\\ElsevierGlyph{E25B}$', u'\u2a2d': '$\\ElsevierGlyph{E25C}$', u'\u2a2e': '$\\ElsevierGlyph{E25D}$', u'\u2a2f': '$\\ElzTimes$', u'\u2a34': '$\\ElsevierGlyph{E25E}$', u'\u2a35': '$\\ElsevierGlyph{E25E}$', u'\u2a3c': '$\\ElsevierGlyph{E259}$', u'\u2a3f': '$\\amalg$', u'\u2a53': '$\\ElzAnd$', u'\u2a54': '$\\ElzOr$', u'\u2a55': '$\\ElsevierGlyph{E36E}$', u'\u2a56': '$\\ElOr$', u'\u2a5e': '$\\perspcorrespond$', u'\u2a5f': '$\\Elzminhat$', u'\u2a63': '$\\ElsevierGlyph{225A}$', u'\u2a6e': '$\\stackrel{*}{=}$', u'\u2a75': '$\\Equal$', u'\u2a7d': '$\\leqslant$', u'\u2a7e': '$\\geqslant$', u'\u2a85': '$\\lessapprox$', u'\u2a86': '$\\gtrapprox$', u'\u2a87': '$\\lneq$', u'\u2a88': '$\\gneq$', u'\u2a89': '$\\lnapprox$', u'\u2a8a': '$\\gnapprox$', u'\u2a8b': '$\\lesseqqgtr$', u'\u2a8c': '$\\gtreqqless$', u'\u2a95': '$\\eqslantless$', u'\u2a96': '$\\eqslantgtr$', u'\u2a9d': '$\\Pisymbol{ppi020}{117}$', u'\u2a9e': '$\\Pisymbol{ppi020}{105}$', u'\u2aa1': '$\\NestedLessLess$', u'\u2aa2': '$\\NestedGreaterGreater$', u'\u2aaf': '$\\preceq$', u'\u2ab0': '$\\succeq$', u'\u2ab5': '$\\precneqq$', u'\u2ab6': '$\\succneqq$', u'\u2ab7': '$\\precapprox$', u'\u2ab8': '$\\succapprox$', u'\u2ab9': '$\\precnapprox$', u'\u2aba': '$\\succnapprox$', u'\u2ac5': '$\\subseteqq$', u'\u2ac6': '$\\supseteqq$', u'\u2acb': '$\\subsetneqq$', u'\u2acc': '$\\supsetneqq$', u'\u2aeb': '$\\ElsevierGlyph{E30D}$', u'\u2af6': '$\\Elztdcol$', u'\u2afd': '${{/}\\!\\!{/}}$', u'\u300a': '$\\ElsevierGlyph{300A}$', u'\u300b': '$\\ElsevierGlyph{300B}$', u'\u3018': '$\\ElsevierGlyph{3018}$', u'\u3019': '$\\ElsevierGlyph{3019}$', u'\u301a': '$\\openbracketleft$', u'\u301b': '$\\openbracketright$', u'\ufb00': '{ff}', u'\ufb01': '{fi}', u'\ufb02': '{fl}', u'\ufb03': '{ffi}', u'\ufb04': '{ffl}', u'\U0001d400': '$\\mathbf{A}$', u'\U0001d401': '$\\mathbf{B}$', u'\U0001d402': '$\\mathbf{C}$', u'\U0001d403': '$\\mathbf{D}$', u'\U0001d404': '$\\mathbf{E}$', u'\U0001d405': '$\\mathbf{F}$', u'\U0001d406': '$\\mathbf{G}$', u'\U0001d407': '$\\mathbf{H}$', u'\U0001d408': '$\\mathbf{I}$', u'\U0001d409': '$\\mathbf{J}$', u'\U0001d40a': '$\\mathbf{K}$', u'\U0001d40b': '$\\mathbf{L}$', u'\U0001d40c': '$\\mathbf{M}$', u'\U0001d40d': '$\\mathbf{N}$', u'\U0001d40e': '$\\mathbf{O}$', u'\U0001d40f': '$\\mathbf{P}$', u'\U0001d410': '$\\mathbf{Q}$', u'\U0001d411': '$\\mathbf{R}$', u'\U0001d412': '$\\mathbf{S}$', u'\U0001d413': '$\\mathbf{T}$', u'\U0001d414': '$\\mathbf{U}$', u'\U0001d415': '$\\mathbf{V}$', u'\U0001d416': '$\\mathbf{W}$', u'\U0001d417': '$\\mathbf{X}$', u'\U0001d418': '$\\mathbf{Y}$', u'\U0001d419': '$\\mathbf{Z}$', u'\U0001d41a': '$\\mathbf{a}$', u'\U0001d41b': '$\\mathbf{b}$', u'\U0001d41c': '$\\mathbf{c}$', u'\U0001d41d': '$\\mathbf{d}$', u'\U0001d41e': '$\\mathbf{e}$', u'\U0001d41f': '$\\mathbf{f}$', u'\U0001d420': '$\\mathbf{g}$', u'\U0001d421': '$\\mathbf{h}$', u'\U0001d422': '$\\mathbf{i}$', u'\U0001d423': '$\\mathbf{j}$', u'\U0001d424': '$\\mathbf{k}$', u'\U0001d425': '$\\mathbf{l}$', u'\U0001d426': '$\\mathbf{m}$', u'\U0001d427': '$\\mathbf{n}$', u'\U0001d428': '$\\mathbf{o}$', u'\U0001d429': '$\\mathbf{p}$', u'\U0001d42a': '$\\mathbf{q}$', u'\U0001d42b': '$\\mathbf{r}$', u'\U0001d42c': '$\\mathbf{s}$', u'\U0001d42d': '$\\mathbf{t}$', u'\U0001d42e': '$\\mathbf{u}$', u'\U0001d42f': '$\\mathbf{v}$', u'\U0001d430': '$\\mathbf{w}$', u'\U0001d431': '$\\mathbf{x}$', u'\U0001d432': '$\\mathbf{y}$', u'\U0001d433': '$\\mathbf{z}$', u'\U0001d434': '$\\mathsl{A}$', u'\U0001d435': '$\\mathsl{B}$', u'\U0001d436': '$\\mathsl{C}$', u'\U0001d437': '$\\mathsl{D}$', u'\U0001d438': '$\\mathsl{E}$', u'\U0001d439': '$\\mathsl{F}$', u'\U0001d43a': '$\\mathsl{G}$', u'\U0001d43b': '$\\mathsl{H}$', u'\U0001d43c': '$\\mathsl{I}$', u'\U0001d43d': '$\\mathsl{J}$', u'\U0001d43e': '$\\mathsl{K}$', u'\U0001d43f': '$\\mathsl{L}$', u'\U0001d440': '$\\mathsl{M}$', u'\U0001d441': '$\\mathsl{N}$', u'\U0001d442': '$\\mathsl{O}$', u'\U0001d443': '$\\mathsl{P}$', u'\U0001d444': '$\\mathsl{Q}$', u'\U0001d445': '$\\mathsl{R}$', u'\U0001d446': '$\\mathsl{S}$', u'\U0001d447': '$\\mathsl{T}$', u'\U0001d448': '$\\mathsl{U}$', u'\U0001d449': '$\\mathsl{V}$', u'\U0001d44a': '$\\mathsl{W}$', u'\U0001d44b': '$\\mathsl{X}$', u'\U0001d44c': '$\\mathsl{Y}$', u'\U0001d44d': '$\\mathsl{Z}$', u'\U0001d44e': '$\\mathsl{a}$', u'\U0001d44f': '$\\mathsl{b}$', u'\U0001d450': '$\\mathsl{c}$', u'\U0001d451': '$\\mathsl{d}$', u'\U0001d452': '$\\mathsl{e}$', u'\U0001d453': '$\\mathsl{f}$', u'\U0001d454': '$\\mathsl{g}$', u'\U0001d456': '$\\mathsl{i}$', u'\U0001d457': '$\\mathsl{j}$', u'\U0001d458': '$\\mathsl{k}$', u'\U0001d459': '$\\mathsl{l}$', u'\U0001d45a': '$\\mathsl{m}$', u'\U0001d45b': '$\\mathsl{n}$', u'\U0001d45c': '$\\mathsl{o}$', u'\U0001d45d': '$\\mathsl{p}$', u'\U0001d45e': '$\\mathsl{q}$', u'\U0001d45f': '$\\mathsl{r}$', u'\U0001d460': '$\\mathsl{s}$', u'\U0001d461': '$\\mathsl{t}$', u'\U0001d462': '$\\mathsl{u}$', u'\U0001d463': '$\\mathsl{v}$', u'\U0001d464': '$\\mathsl{w}$', u'\U0001d465': '$\\mathsl{x}$', u'\U0001d466': '$\\mathsl{y}$', u'\U0001d467': '$\\mathsl{z}$', u'\U0001d468': '$\\mathbit{A}$', u'\U0001d469': '$\\mathbit{B}$', u'\U0001d46a': '$\\mathbit{C}$', u'\U0001d46b': '$\\mathbit{D}$', u'\U0001d46c': '$\\mathbit{E}$', u'\U0001d46d': '$\\mathbit{F}$', u'\U0001d46e': '$\\mathbit{G}$', u'\U0001d46f': '$\\mathbit{H}$', u'\U0001d470': '$\\mathbit{I}$', u'\U0001d471': '$\\mathbit{J}$', u'\U0001d472': '$\\mathbit{K}$', u'\U0001d473': '$\\mathbit{L}$', u'\U0001d474': '$\\mathbit{M}$', u'\U0001d475': '$\\mathbit{N}$', u'\U0001d476': '$\\mathbit{O}$', u'\U0001d477': '$\\mathbit{P}$', u'\U0001d478': '$\\mathbit{Q}$', u'\U0001d479': '$\\mathbit{R}$', u'\U0001d47a': '$\\mathbit{S}$', u'\U0001d47b': '$\\mathbit{T}$', u'\U0001d47c': '$\\mathbit{U}$', u'\U0001d47d': '$\\mathbit{V}$', u'\U0001d47e': '$\\mathbit{W}$', u'\U0001d47f': '$\\mathbit{X}$', u'\U0001d480': '$\\mathbit{Y}$', u'\U0001d481': '$\\mathbit{Z}$', u'\U0001d482': '$\\mathbit{a}$', u'\U0001d483': '$\\mathbit{b}$', u'\U0001d484': '$\\mathbit{c}$', u'\U0001d485': '$\\mathbit{d}$', u'\U0001d486': '$\\mathbit{e}$', u'\U0001d487': '$\\mathbit{f}$', u'\U0001d488': '$\\mathbit{g}$', u'\U0001d489': '$\\mathbit{h}$', u'\U0001d48a': '$\\mathbit{i}$', u'\U0001d48b': '$\\mathbit{j}$', u'\U0001d48c': '$\\mathbit{k}$', u'\U0001d48d': '$\\mathbit{l}$', u'\U0001d48e': '$\\mathbit{m}$', u'\U0001d48f': '$\\mathbit{n}$', u'\U0001d490': '$\\mathbit{o}$', u'\U0001d491': '$\\mathbit{p}$', u'\U0001d492': '$\\mathbit{q}$', u'\U0001d493': '$\\mathbit{r}$', u'\U0001d494': '$\\mathbit{s}$', u'\U0001d495': '$\\mathbit{t}$', u'\U0001d496': '$\\mathbit{u}$', u'\U0001d497': '$\\mathbit{v}$', u'\U0001d498': '$\\mathbit{w}$', u'\U0001d499': '$\\mathbit{x}$', u'\U0001d49a': '$\\mathbit{y}$', u'\U0001d49b': '$\\mathbit{z}$', u'\U0001d49c': '$\\mathscr{A}$', u'\U0001d49e': '$\\mathscr{C}$', u'\U0001d49f': '$\\mathscr{D}$', u'\U0001d4a2': '$\\mathscr{G}$', u'\U0001d4a5': '$\\mathscr{J}$', u'\U0001d4a6': '$\\mathscr{K}$', u'\U0001d4a9': '$\\mathscr{N}$', u'\U0001d4aa': '$\\mathscr{O}$', u'\U0001d4ab': '$\\mathscr{P}$', u'\U0001d4ac': '$\\mathscr{Q}$', u'\U0001d4ae': '$\\mathscr{S}$', u'\U0001d4af': '$\\mathscr{T}$', u'\U0001d4b0': '$\\mathscr{U}$', u'\U0001d4b1': '$\\mathscr{V}$', u'\U0001d4b2': '$\\mathscr{W}$', u'\U0001d4b3': '$\\mathscr{X}$', u'\U0001d4b4': '$\\mathscr{Y}$', u'\U0001d4b5': '$\\mathscr{Z}$', u'\U0001d4b6': '$\\mathscr{a}$', u'\U0001d4b7': '$\\mathscr{b}$', u'\U0001d4b8': '$\\mathscr{c}$', u'\U0001d4b9': '$\\mathscr{d}$', u'\U0001d4bb': '$\\mathscr{f}$', u'\U0001d4bd': '$\\mathscr{h}$', u'\U0001d4be': '$\\mathscr{i}$', u'\U0001d4bf': '$\\mathscr{j}$', u'\U0001d4c0': '$\\mathscr{k}$', u'\U0001d4c1': '$\\mathscr{l}$', u'\U0001d4c2': '$\\mathscr{m}$', u'\U0001d4c3': '$\\mathscr{n}$', u'\U0001d4c5': '$\\mathscr{p}$', u'\U0001d4c6': '$\\mathscr{q}$', u'\U0001d4c7': '$\\mathscr{r}$', u'\U0001d4c8': '$\\mathscr{s}$', u'\U0001d4c9': '$\\mathscr{t}$', u'\U0001d4ca': '$\\mathscr{u}$', u'\U0001d4cb': '$\\mathscr{v}$', u'\U0001d4cc': '$\\mathscr{w}$', u'\U0001d4cd': '$\\mathscr{x}$', u'\U0001d4ce': '$\\mathscr{y}$', u'\U0001d4cf': '$\\mathscr{z}$', u'\U0001d4d0': '$\\mathmit{A}$', u'\U0001d4d1': '$\\mathmit{B}$', u'\U0001d4d2': '$\\mathmit{C}$', u'\U0001d4d3': '$\\mathmit{D}$', u'\U0001d4d4': '$\\mathmit{E}$', u'\U0001d4d5': '$\\mathmit{F}$', u'\U0001d4d6': '$\\mathmit{G}$', u'\U0001d4d7': '$\\mathmit{H}$', u'\U0001d4d8': '$\\mathmit{I}$', u'\U0001d4d9': '$\\mathmit{J}$', u'\U0001d4da': '$\\mathmit{K}$', u'\U0001d4db': '$\\mathmit{L}$', u'\U0001d4dc': '$\\mathmit{M}$', u'\U0001d4dd': '$\\mathmit{N}$', u'\U0001d4de': '$\\mathmit{O}$', u'\U0001d4df': '$\\mathmit{P}$', u'\U0001d4e0': '$\\mathmit{Q}$', u'\U0001d4e1': '$\\mathmit{R}$', u'\U0001d4e2': '$\\mathmit{S}$', u'\U0001d4e3': '$\\mathmit{T}$', u'\U0001d4e4': '$\\mathmit{U}$', u'\U0001d4e5': '$\\mathmit{V}$', u'\U0001d4e6': '$\\mathmit{W}$', u'\U0001d4e7': '$\\mathmit{X}$', u'\U0001d4e8': '$\\mathmit{Y}$', u'\U0001d4e9': '$\\mathmit{Z}$', u'\U0001d4ea': '$\\mathmit{a}$', u'\U0001d4eb': '$\\mathmit{b}$', u'\U0001d4ec': '$\\mathmit{c}$', u'\U0001d4ed': '$\\mathmit{d}$', u'\U0001d4ee': '$\\mathmit{e}$', u'\U0001d4ef': '$\\mathmit{f}$', u'\U0001d4f0': '$\\mathmit{g}$', u'\U0001d4f1': '$\\mathmit{h}$', u'\U0001d4f2': '$\\mathmit{i}$', u'\U0001d4f3': '$\\mathmit{j}$', u'\U0001d4f4': '$\\mathmit{k}$', u'\U0001d4f5': '$\\mathmit{l}$', u'\U0001d4f6': '$\\mathmit{m}$', u'\U0001d4f7': '$\\mathmit{n}$', u'\U0001d4f8': '$\\mathmit{o}$', u'\U0001d4f9': '$\\mathmit{p}$', u'\U0001d4fa': '$\\mathmit{q}$', u'\U0001d4fb': '$\\mathmit{r}$', u'\U0001d4fc': '$\\mathmit{s}$', u'\U0001d4fd': '$\\mathmit{t}$', u'\U0001d4fe': '$\\mathmit{u}$', u'\U0001d4ff': '$\\mathmit{v}$', u'\U0001d500': '$\\mathmit{w}$', u'\U0001d501': '$\\mathmit{x}$', u'\U0001d502': '$\\mathmit{y}$', u'\U0001d503': '$\\mathmit{z}$', u'\U0001d504': '$\\mathfrak{A}$', u'\U0001d505': '$\\mathfrak{B}$', u'\U0001d507': '$\\mathfrak{D}$', u'\U0001d508': '$\\mathfrak{E}$', u'\U0001d509': '$\\mathfrak{F}$', u'\U0001d50a': '$\\mathfrak{G}$', u'\U0001d50d': '$\\mathfrak{J}$', u'\U0001d50e': '$\\mathfrak{K}$', u'\U0001d50f': '$\\mathfrak{L}$', u'\U0001d510': '$\\mathfrak{M}$', u'\U0001d511': '$\\mathfrak{N}$', u'\U0001d512': '$\\mathfrak{O}$', u'\U0001d513': '$\\mathfrak{P}$', u'\U0001d514': '$\\mathfrak{Q}$', u'\U0001d516': '$\\mathfrak{S}$', u'\U0001d517': '$\\mathfrak{T}$', u'\U0001d518': '$\\mathfrak{U}$', u'\U0001d519': '$\\mathfrak{V}$', u'\U0001d51a': '$\\mathfrak{W}$', u'\U0001d51b': '$\\mathfrak{X}$', u'\U0001d51c': '$\\mathfrak{Y}$', u'\U0001d51e': '$\\mathfrak{a}$', u'\U0001d51f': '$\\mathfrak{b}$', u'\U0001d520': '$\\mathfrak{c}$', u'\U0001d521': '$\\mathfrak{d}$', u'\U0001d522': '$\\mathfrak{e}$', u'\U0001d523': '$\\mathfrak{f}$', u'\U0001d524': '$\\mathfrak{g}$', u'\U0001d525': '$\\mathfrak{h}$', u'\U0001d526': '$\\mathfrak{i}$', u'\U0001d527': '$\\mathfrak{j}$', u'\U0001d528': '$\\mathfrak{k}$', u'\U0001d529': '$\\mathfrak{l}$', u'\U0001d52a': '$\\mathfrak{m}$', u'\U0001d52b': '$\\mathfrak{n}$', u'\U0001d52c': '$\\mathfrak{o}$', u'\U0001d52d': '$\\mathfrak{p}$', u'\U0001d52e': '$\\mathfrak{q}$', u'\U0001d52f': '$\\mathfrak{r}$', u'\U0001d530': '$\\mathfrak{s}$', u'\U0001d531': '$\\mathfrak{t}$', u'\U0001d532': '$\\mathfrak{u}$', u'\U0001d533': '$\\mathfrak{v}$', u'\U0001d534': '$\\mathfrak{w}$', u'\U0001d535': '$\\mathfrak{x}$', u'\U0001d536': '$\\mathfrak{y}$', u'\U0001d537': '$\\mathfrak{z}$', u'\U0001d538': '$\\mathbb{A}$', u'\U0001d539': '$\\mathbb{B}$', u'\U0001d53b': '$\\mathbb{D}$', u'\U0001d53c': '$\\mathbb{E}$', u'\U0001d53d': '$\\mathbb{F}$', u'\U0001d53e': '$\\mathbb{G}$', u'\U0001d540': '$\\mathbb{I}$', u'\U0001d541': '$\\mathbb{J}$', u'\U0001d542': '$\\mathbb{K}$', u'\U0001d543': '$\\mathbb{L}$', u'\U0001d544': '$\\mathbb{M}$', u'\U0001d546': '$\\mathbb{O}$', u'\U0001d54a': '$\\mathbb{S}$', u'\U0001d54b': '$\\mathbb{T}$', u'\U0001d54c': '$\\mathbb{U}$', u'\U0001d54d': '$\\mathbb{V}$', u'\U0001d54e': '$\\mathbb{W}$', u'\U0001d54f': '$\\mathbb{X}$', u'\U0001d550': '$\\mathbb{Y}$', u'\U0001d552': '$\\mathbb{a}$', u'\U0001d553': '$\\mathbb{b}$', u'\U0001d554': '$\\mathbb{c}$', u'\U0001d555': '$\\mathbb{d}$', u'\U0001d556': '$\\mathbb{e}$', u'\U0001d557': '$\\mathbb{f}$', u'\U0001d558': '$\\mathbb{g}$', u'\U0001d559': '$\\mathbb{h}$', u'\U0001d55a': '$\\mathbb{i}$', u'\U0001d55b': '$\\mathbb{j}$', u'\U0001d55c': '$\\mathbb{k}$', u'\U0001d55d': '$\\mathbb{l}$', u'\U0001d55e': '$\\mathbb{m}$', u'\U0001d55f': '$\\mathbb{n}$', u'\U0001d560': '$\\mathbb{o}$', u'\U0001d561': '$\\mathbb{p}$', u'\U0001d562': '$\\mathbb{q}$', u'\U0001d563': '$\\mathbb{r}$', u'\U0001d564': '$\\mathbb{s}$', u'\U0001d565': '$\\mathbb{t}$', u'\U0001d566': '$\\mathbb{u}$', u'\U0001d567': '$\\mathbb{v}$', u'\U0001d568': '$\\mathbb{w}$', u'\U0001d569': '$\\mathbb{x}$', u'\U0001d56a': '$\\mathbb{y}$', u'\U0001d56b': '$\\mathbb{z}$', u'\U0001d56c': '$\\mathslbb{A}$', u'\U0001d56d': '$\\mathslbb{B}$', u'\U0001d56e': '$\\mathslbb{C}$', u'\U0001d56f': '$\\mathslbb{D}$', u'\U0001d570': '$\\mathslbb{E}$', u'\U0001d571': '$\\mathslbb{F}$', u'\U0001d572': '$\\mathslbb{G}$', u'\U0001d573': '$\\mathslbb{H}$', u'\U0001d574': '$\\mathslbb{I}$', u'\U0001d575': '$\\mathslbb{J}$', u'\U0001d576': '$\\mathslbb{K}$', u'\U0001d577': '$\\mathslbb{L}$', u'\U0001d578': '$\\mathslbb{M}$', u'\U0001d579': '$\\mathslbb{N}$', u'\U0001d57a': '$\\mathslbb{O}$', u'\U0001d57b': '$\\mathslbb{P}$', u'\U0001d57c': '$\\mathslbb{Q}$', u'\U0001d57d': '$\\mathslbb{R}$', u'\U0001d57e': '$\\mathslbb{S}$', u'\U0001d57f': '$\\mathslbb{T}$', u'\U0001d580': '$\\mathslbb{U}$', u'\U0001d581': '$\\mathslbb{V}$', u'\U0001d582': '$\\mathslbb{W}$', u'\U0001d583': '$\\mathslbb{X}$', u'\U0001d584': '$\\mathslbb{Y}$', u'\U0001d585': '$\\mathslbb{Z}$', u'\U0001d586': '$\\mathslbb{a}$', u'\U0001d587': '$\\mathslbb{b}$', u'\U0001d588': '$\\mathslbb{c}$', u'\U0001d589': '$\\mathslbb{d}$', u'\U0001d58a': '$\\mathslbb{e}$', u'\U0001d58b': '$\\mathslbb{f}$', u'\U0001d58c': '$\\mathslbb{g}$', u'\U0001d58d': '$\\mathslbb{h}$', u'\U0001d58e': '$\\mathslbb{i}$', u'\U0001d58f': '$\\mathslbb{j}$', u'\U0001d590': '$\\mathslbb{k}$', u'\U0001d591': '$\\mathslbb{l}$', u'\U0001d592': '$\\mathslbb{m}$', u'\U0001d593': '$\\mathslbb{n}$', u'\U0001d594': '$\\mathslbb{o}$', u'\U0001d595': '$\\mathslbb{p}$', u'\U0001d596': '$\\mathslbb{q}$', u'\U0001d597': '$\\mathslbb{r}$', u'\U0001d598': '$\\mathslbb{s}$', u'\U0001d599': '$\\mathslbb{t}$', u'\U0001d59a': '$\\mathslbb{u}$', u'\U0001d59b': '$\\mathslbb{v}$', u'\U0001d59c': '$\\mathslbb{w}$', u'\U0001d59d': '$\\mathslbb{x}$', u'\U0001d59e': '$\\mathslbb{y}$', u'\U0001d59f': '$\\mathslbb{z}$', u'\U0001d5a0': '$\\mathsf{A}$', u'\U0001d5a1': '$\\mathsf{B}$', u'\U0001d5a2': '$\\mathsf{C}$', u'\U0001d5a3': '$\\mathsf{D}$', u'\U0001d5a4': '$\\mathsf{E}$', u'\U0001d5a5': '$\\mathsf{F}$', u'\U0001d5a6': '$\\mathsf{G}$', u'\U0001d5a7': '$\\mathsf{H}$', u'\U0001d5a8': '$\\mathsf{I}$', u'\U0001d5a9': '$\\mathsf{J}$', u'\U0001d5aa': '$\\mathsf{K}$', u'\U0001d5ab': '$\\mathsf{L}$', u'\U0001d5ac': '$\\mathsf{M}$', u'\U0001d5ad': '$\\mathsf{N}$', u'\U0001d5ae': '$\\mathsf{O}$', u'\U0001d5af': '$\\mathsf{P}$', u'\U0001d5b0': '$\\mathsf{Q}$', u'\U0001d5b1': '$\\mathsf{R}$', u'\U0001d5b2': '$\\mathsf{S}$', u'\U0001d5b3': '$\\mathsf{T}$', u'\U0001d5b4': '$\\mathsf{U}$', u'\U0001d5b5': '$\\mathsf{V}$', u'\U0001d5b6': '$\\mathsf{W}$', u'\U0001d5b7': '$\\mathsf{X}$', u'\U0001d5b8': '$\\mathsf{Y}$', u'\U0001d5b9': '$\\mathsf{Z}$', u'\U0001d5ba': '$\\mathsf{a}$', u'\U0001d5bb': '$\\mathsf{b}$', u'\U0001d5bc': '$\\mathsf{c}$', u'\U0001d5bd': '$\\mathsf{d}$', u'\U0001d5be': '$\\mathsf{e}$', u'\U0001d5bf': '$\\mathsf{f}$', u'\U0001d5c0': '$\\mathsf{g}$', u'\U0001d5c1': '$\\mathsf{h}$', u'\U0001d5c2': '$\\mathsf{i}$', u'\U0001d5c3': '$\\mathsf{j}$', u'\U0001d5c4': '$\\mathsf{k}$', u'\U0001d5c5': '$\\mathsf{l}$', u'\U0001d5c6': '$\\mathsf{m}$', u'\U0001d5c7': '$\\mathsf{n}$', u'\U0001d5c8': '$\\mathsf{o}$', u'\U0001d5c9': '$\\mathsf{p}$', u'\U0001d5ca': '$\\mathsf{q}$', u'\U0001d5cb': '$\\mathsf{r}$', u'\U0001d5cc': '$\\mathsf{s}$', u'\U0001d5cd': '$\\mathsf{t}$', u'\U0001d5ce': '$\\mathsf{u}$', u'\U0001d5cf': '$\\mathsf{v}$', u'\U0001d5d0': '$\\mathsf{w}$', u'\U0001d5d1': '$\\mathsf{x}$', u'\U0001d5d2': '$\\mathsf{y}$', u'\U0001d5d3': '$\\mathsf{z}$', u'\U0001d5d4': '$\\mathsfbf{A}$', u'\U0001d5d5': '$\\mathsfbf{B}$', u'\U0001d5d6': '$\\mathsfbf{C}$', u'\U0001d5d7': '$\\mathsfbf{D}$', u'\U0001d5d8': '$\\mathsfbf{E}$', u'\U0001d5d9': '$\\mathsfbf{F}$', u'\U0001d5da': '$\\mathsfbf{G}$', u'\U0001d5db': '$\\mathsfbf{H}$', u'\U0001d5dc': '$\\mathsfbf{I}$', u'\U0001d5dd': '$\\mathsfbf{J}$', u'\U0001d5de': '$\\mathsfbf{K}$', u'\U0001d5df': '$\\mathsfbf{L}$', u'\U0001d5e0': '$\\mathsfbf{M}$', u'\U0001d5e1': '$\\mathsfbf{N}$', u'\U0001d5e2': '$\\mathsfbf{O}$', u'\U0001d5e3': '$\\mathsfbf{P}$', u'\U0001d5e4': '$\\mathsfbf{Q}$', u'\U0001d5e5': '$\\mathsfbf{R}$', u'\U0001d5e6': '$\\mathsfbf{S}$', u'\U0001d5e7': '$\\mathsfbf{T}$', u'\U0001d5e8': '$\\mathsfbf{U}$', u'\U0001d5e9': '$\\mathsfbf{V}$', u'\U0001d5ea': '$\\mathsfbf{W}$', u'\U0001d5eb': '$\\mathsfbf{X}$', u'\U0001d5ec': '$\\mathsfbf{Y}$', u'\U0001d5ed': '$\\mathsfbf{Z}$', u'\U0001d5ee': '$\\mathsfbf{a}$', u'\U0001d5ef': '$\\mathsfbf{b}$', u'\U0001d5f0': '$\\mathsfbf{c}$', u'\U0001d5f1': '$\\mathsfbf{d}$', u'\U0001d5f2': '$\\mathsfbf{e}$', u'\U0001d5f3': '$\\mathsfbf{f}$', u'\U0001d5f4': '$\\mathsfbf{g}$', u'\U0001d5f5': '$\\mathsfbf{h}$', u'\U0001d5f6': '$\\mathsfbf{i}$', u'\U0001d5f7': '$\\mathsfbf{j}$', u'\U0001d5f8': '$\\mathsfbf{k}$', u'\U0001d5f9': '$\\mathsfbf{l}$', u'\U0001d5fa': '$\\mathsfbf{m}$', u'\U0001d5fb': '$\\mathsfbf{n}$', u'\U0001d5fc': '$\\mathsfbf{o}$', u'\U0001d5fd': '$\\mathsfbf{p}$', u'\U0001d5fe': '$\\mathsfbf{q}$', u'\U0001d5ff': '$\\mathsfbf{r}$', u'\U0001d600': '$\\mathsfbf{s}$', u'\U0001d601': '$\\mathsfbf{t}$', u'\U0001d602': '$\\mathsfbf{u}$', u'\U0001d603': '$\\mathsfbf{v}$', u'\U0001d604': '$\\mathsfbf{w}$', u'\U0001d605': '$\\mathsfbf{x}$', u'\U0001d606': '$\\mathsfbf{y}$', u'\U0001d607': '$\\mathsfbf{z}$', u'\U0001d608': '$\\mathsfsl{A}$', u'\U0001d609': '$\\mathsfsl{B}$', u'\U0001d60a': '$\\mathsfsl{C}$', u'\U0001d60b': '$\\mathsfsl{D}$', u'\U0001d60c': '$\\mathsfsl{E}$', u'\U0001d60d': '$\\mathsfsl{F}$', u'\U0001d60e': '$\\mathsfsl{G}$', u'\U0001d60f': '$\\mathsfsl{H}$', u'\U0001d610': '$\\mathsfsl{I}$', u'\U0001d611': '$\\mathsfsl{J}$', u'\U0001d612': '$\\mathsfsl{K}$', u'\U0001d613': '$\\mathsfsl{L}$', u'\U0001d614': '$\\mathsfsl{M}$', u'\U0001d615': '$\\mathsfsl{N}$', u'\U0001d616': '$\\mathsfsl{O}$', u'\U0001d617': '$\\mathsfsl{P}$', u'\U0001d618': '$\\mathsfsl{Q}$', u'\U0001d619': '$\\mathsfsl{R}$', u'\U0001d61a': '$\\mathsfsl{S}$', u'\U0001d61b': '$\\mathsfsl{T}$', u'\U0001d61c': '$\\mathsfsl{U}$', u'\U0001d61d': '$\\mathsfsl{V}$', u'\U0001d61e': '$\\mathsfsl{W}$', u'\U0001d61f': '$\\mathsfsl{X}$', u'\U0001d620': '$\\mathsfsl{Y}$', u'\U0001d621': '$\\mathsfsl{Z}$', u'\U0001d622': '$\\mathsfsl{a}$', u'\U0001d623': '$\\mathsfsl{b}$', u'\U0001d624': '$\\mathsfsl{c}$', u'\U0001d625': '$\\mathsfsl{d}$', u'\U0001d626': '$\\mathsfsl{e}$', u'\U0001d627': '$\\mathsfsl{f}$', u'\U0001d628': '$\\mathsfsl{g}$', u'\U0001d629': '$\\mathsfsl{h}$', u'\U0001d62a': '$\\mathsfsl{i}$', u'\U0001d62b': '$\\mathsfsl{j}$', u'\U0001d62c': '$\\mathsfsl{k}$', u'\U0001d62d': '$\\mathsfsl{l}$', u'\U0001d62e': '$\\mathsfsl{m}$', u'\U0001d62f': '$\\mathsfsl{n}$', u'\U0001d630': '$\\mathsfsl{o}$', u'\U0001d631': '$\\mathsfsl{p}$', u'\U0001d632': '$\\mathsfsl{q}$', u'\U0001d633': '$\\mathsfsl{r}$', u'\U0001d634': '$\\mathsfsl{s}$', u'\U0001d635': '$\\mathsfsl{t}$', u'\U0001d636': '$\\mathsfsl{u}$', u'\U0001d637': '$\\mathsfsl{v}$', u'\U0001d638': '$\\mathsfsl{w}$', u'\U0001d639': '$\\mathsfsl{x}$', u'\U0001d63a': '$\\mathsfsl{y}$', u'\U0001d63b': '$\\mathsfsl{z}$', u'\U0001d63c': '$\\mathsfbfsl{A}$', u'\U0001d63d': '$\\mathsfbfsl{B}$', u'\U0001d63e': '$\\mathsfbfsl{C}$', u'\U0001d63f': '$\\mathsfbfsl{D}$', u'\U0001d640': '$\\mathsfbfsl{E}$', u'\U0001d641': '$\\mathsfbfsl{F}$', u'\U0001d642': '$\\mathsfbfsl{G}$', u'\U0001d643': '$\\mathsfbfsl{H}$', u'\U0001d644': '$\\mathsfbfsl{I}$', u'\U0001d645': '$\\mathsfbfsl{J}$', u'\U0001d646': '$\\mathsfbfsl{K}$', u'\U0001d647': '$\\mathsfbfsl{L}$', u'\U0001d648': '$\\mathsfbfsl{M}$', u'\U0001d649': '$\\mathsfbfsl{N}$', u'\U0001d64a': '$\\mathsfbfsl{O}$', u'\U0001d64b': '$\\mathsfbfsl{P}$', u'\U0001d64c': '$\\mathsfbfsl{Q}$', u'\U0001d64d': '$\\mathsfbfsl{R}$', u'\U0001d64e': '$\\mathsfbfsl{S}$', u'\U0001d64f': '$\\mathsfbfsl{T}$', u'\U0001d650': '$\\mathsfbfsl{U}$', u'\U0001d651': '$\\mathsfbfsl{V}$', u'\U0001d652': '$\\mathsfbfsl{W}$', u'\U0001d653': '$\\mathsfbfsl{X}$', u'\U0001d654': '$\\mathsfbfsl{Y}$', u'\U0001d655': '$\\mathsfbfsl{Z}$', u'\U0001d656': '$\\mathsfbfsl{a}$', u'\U0001d657': '$\\mathsfbfsl{b}$', u'\U0001d658': '$\\mathsfbfsl{c}$', u'\U0001d659': '$\\mathsfbfsl{d}$', u'\U0001d65a': '$\\mathsfbfsl{e}$', u'\U0001d65b': '$\\mathsfbfsl{f}$', u'\U0001d65c': '$\\mathsfbfsl{g}$', u'\U0001d65d': '$\\mathsfbfsl{h}$', u'\U0001d65e': '$\\mathsfbfsl{i}$', u'\U0001d65f': '$\\mathsfbfsl{j}$', u'\U0001d660': '$\\mathsfbfsl{k}$', u'\U0001d661': '$\\mathsfbfsl{l}$', u'\U0001d662': '$\\mathsfbfsl{m}$', u'\U0001d663': '$\\mathsfbfsl{n}$', u'\U0001d664': '$\\mathsfbfsl{o}$', u'\U0001d665': '$\\mathsfbfsl{p}$', u'\U0001d666': '$\\mathsfbfsl{q}$', u'\U0001d667': '$\\mathsfbfsl{r}$', u'\U0001d668': '$\\mathsfbfsl{s}$', u'\U0001d669': '$\\mathsfbfsl{t}$', u'\U0001d66a': '$\\mathsfbfsl{u}$', u'\U0001d66b': '$\\mathsfbfsl{v}$', u'\U0001d66c': '$\\mathsfbfsl{w}$', u'\U0001d66d': '$\\mathsfbfsl{x}$', u'\U0001d66e': '$\\mathsfbfsl{y}$', u'\U0001d66f': '$\\mathsfbfsl{z}$', u'\U0001d670': '$\\mathtt{A}$', u'\U0001d671': '$\\mathtt{B}$', u'\U0001d672': '$\\mathtt{C}$', u'\U0001d673': '$\\mathtt{D}$', u'\U0001d674': '$\\mathtt{E}$', u'\U0001d675': '$\\mathtt{F}$', u'\U0001d676': '$\\mathtt{G}$', u'\U0001d677': '$\\mathtt{H}$', u'\U0001d678': '$\\mathtt{I}$', u'\U0001d679': '$\\mathtt{J}$', u'\U0001d67a': '$\\mathtt{K}$', u'\U0001d67b': '$\\mathtt{L}$', u'\U0001d67c': '$\\mathtt{M}$', u'\U0001d67d': '$\\mathtt{N}$', u'\U0001d67e': '$\\mathtt{O}$', u'\U0001d67f': '$\\mathtt{P}$', u'\U0001d680': '$\\mathtt{Q}$', u'\U0001d681': '$\\mathtt{R}$', u'\U0001d682': '$\\mathtt{S}$', u'\U0001d683': '$\\mathtt{T}$', u'\U0001d684': '$\\mathtt{U}$', u'\U0001d685': '$\\mathtt{V}$', u'\U0001d686': '$\\mathtt{W}$', u'\U0001d687': '$\\mathtt{X}$', u'\U0001d688': '$\\mathtt{Y}$', u'\U0001d689': '$\\mathtt{Z}$', u'\U0001d68a': '$\\mathtt{a}$', u'\U0001d68b': '$\\mathtt{b}$', u'\U0001d68c': '$\\mathtt{c}$', u'\U0001d68d': '$\\mathtt{d}$', u'\U0001d68e': '$\\mathtt{e}$', u'\U0001d68f': '$\\mathtt{f}$', u'\U0001d690': '$\\mathtt{g}$', u'\U0001d691': '$\\mathtt{h}$', u'\U0001d692': '$\\mathtt{i}$', u'\U0001d693': '$\\mathtt{j}$', u'\U0001d694': '$\\mathtt{k}$', u'\U0001d695': '$\\mathtt{l}$', u'\U0001d696': '$\\mathtt{m}$', u'\U0001d697': '$\\mathtt{n}$', u'\U0001d698': '$\\mathtt{o}$', u'\U0001d699': '$\\mathtt{p}$', u'\U0001d69a': '$\\mathtt{q}$', u'\U0001d69b': '$\\mathtt{r}$', u'\U0001d69c': '$\\mathtt{s}$', u'\U0001d69d': '$\\mathtt{t}$', u'\U0001d69e': '$\\mathtt{u}$', u'\U0001d69f': '$\\mathtt{v}$', u'\U0001d6a0': '$\\mathtt{w}$', u'\U0001d6a1': '$\\mathtt{x}$', u'\U0001d6a2': '$\\mathtt{y}$', u'\U0001d6a3': '$\\mathtt{z}$', u'\U0001d6a8': '$\\mathbf{\\Alpha}$', u'\U0001d6a9': '$\\mathbf{\\Beta}$', u'\U0001d6aa': '$\\mathbf{\\Gamma}$', u'\U0001d6ab': '$\\mathbf{\\Delta}$', u'\U0001d6ac': '$\\mathbf{\\Epsilon}$', u'\U0001d6ad': '$\\mathbf{\\Zeta}$', u'\U0001d6ae': '$\\mathbf{\\Eta}$', u'\U0001d6af': '$\\mathbf{\\Theta}$', u'\U0001d6b0': '$\\mathbf{\\Iota}$', u'\U0001d6b1': '$\\mathbf{\\Kappa}$', u'\U0001d6b2': '$\\mathbf{\\Lambda}$', u'\U0001d6b3': '$M$', u'\U0001d6b4': '$N$', u'\U0001d6b5': '$\\mathbf{\\Xi}$', u'\U0001d6b6': '$O$', u'\U0001d6b7': '$\\mathbf{\\Pi}$', u'\U0001d6b8': '$\\mathbf{\\Rho}$', u'\U0001d6b9': '{\\mathbf{\\vartheta}}', u'\U0001d6ba': '$\\mathbf{\\Sigma}$', u'\U0001d6bb': '$\\mathbf{\\Tau}$', u'\U0001d6bc': '$\\mathbf{\\Upsilon}$', u'\U0001d6bd': '$\\mathbf{\\Phi}$', u'\U0001d6be': '$\\mathbf{\\Chi}$', u'\U0001d6bf': '$\\mathbf{\\Psi}$', u'\U0001d6c0': '$\\mathbf{\\Omega}$', u'\U0001d6c1': '$\\mathbf{\\nabla}$', u'\U0001d6c2': '$\\mathbf{\\Alpha}$', u'\U0001d6c3': '$\\mathbf{\\Beta}$', u'\U0001d6c4': '$\\mathbf{\\Gamma}$', u'\U0001d6c5': '$\\mathbf{\\Delta}$', u'\U0001d6c6': '$\\mathbf{\\Epsilon}$', u'\U0001d6c7': '$\\mathbf{\\Zeta}$', u'\U0001d6c8': '$\\mathbf{\\Eta}$', u'\U0001d6c9': '$\\mathbf{\\theta}$', u'\U0001d6ca': '$\\mathbf{\\Iota}$', u'\U0001d6cb': '$\\mathbf{\\Kappa}$', u'\U0001d6cc': '$\\mathbf{\\Lambda}$', u'\U0001d6cd': '$M$', u'\U0001d6ce': '$N$', u'\U0001d6cf': '$\\mathbf{\\Xi}$', u'\U0001d6d0': '$O$', u'\U0001d6d1': '$\\mathbf{\\Pi}$', u'\U0001d6d2': '$\\mathbf{\\Rho}$', u'\U0001d6d3': '$\\mathbf{\\varsigma}$', u'\U0001d6d4': '$\\mathbf{\\Sigma}$', u'\U0001d6d5': '$\\mathbf{\\Tau}$', u'\U0001d6d6': '$\\mathbf{\\Upsilon}$', u'\U0001d6d7': '$\\mathbf{\\Phi}$', u'\U0001d6d8': '$\\mathbf{\\Chi}$', u'\U0001d6d9': '$\\mathbf{\\Psi}$', u'\U0001d6da': '$\\mathbf{\\Omega}$', u'\U0001d6db': '$\\partial$', u'\U0001d6dc': '$\\in$', u'\U0001d6dd': '{\\mathbf{\\vartheta}}', u'\U0001d6de': '{\\mathbf{\\varkappa}}', u'\U0001d6df': '{\\mathbf{\\phi}}', u'\U0001d6e0': '{\\mathbf{\\varrho}}', u'\U0001d6e1': '{\\mathbf{\\varpi}}', u'\U0001d6e2': '$\\mathsl{\\Alpha}$', u'\U0001d6e3': '$\\mathsl{\\Beta}$', u'\U0001d6e4': '$\\mathsl{\\Gamma}$', u'\U0001d6e5': '$\\mathsl{\\Delta}$', u'\U0001d6e6': '$\\mathsl{\\Epsilon}$', u'\U0001d6e7': '$\\mathsl{\\Zeta}$', u'\U0001d6e8': '$\\mathsl{\\Eta}$', u'\U0001d6e9': '$\\mathsl{\\Theta}$', u'\U0001d6ea': '$\\mathsl{\\Iota}$', u'\U0001d6eb': '$\\mathsl{\\Kappa}$', u'\U0001d6ec': '$\\mathsl{\\Lambda}$', u'\U0001d6ed': '$M$', u'\U0001d6ee': '$N$', u'\U0001d6ef': '$\\mathsl{\\Xi}$', u'\U0001d6f0': '$O$', u'\U0001d6f1': '$\\mathsl{\\Pi}$', u'\U0001d6f2': '$\\mathsl{\\Rho}$', u'\U0001d6f3': '{\\mathsl{\\vartheta}}', u'\U0001d6f4': '$\\mathsl{\\Sigma}$', u'\U0001d6f5': '$\\mathsl{\\Tau}$', u'\U0001d6f6': '$\\mathsl{\\Upsilon}$', u'\U0001d6f7': '$\\mathsl{\\Phi}$', u'\U0001d6f8': '$\\mathsl{\\Chi}$', u'\U0001d6f9': '$\\mathsl{\\Psi}$', u'\U0001d6fa': '$\\mathsl{\\Omega}$', u'\U0001d6fb': '$\\mathsl{\\nabla}$', u'\U0001d6fc': '$\\mathsl{\\Alpha}$', u'\U0001d6fd': '$\\mathsl{\\Beta}$', u'\U0001d6fe': '$\\mathsl{\\Gamma}$', u'\U0001d6ff': '$\\mathsl{\\Delta}$', u'\U0001d700': '$\\mathsl{\\Epsilon}$', u'\U0001d701': '$\\mathsl{\\Zeta}$', u'\U0001d702': '$\\mathsl{\\Eta}$', u'\U0001d703': '$\\mathsl{\\Theta}$', u'\U0001d704': '$\\mathsl{\\Iota}$', u'\U0001d705': '$\\mathsl{\\Kappa}$', u'\U0001d706': '$\\mathsl{\\Lambda}$', u'\U0001d707': '$M$', u'\U0001d708': '$N$', u'\U0001d709': '$\\mathsl{\\Xi}$', u'\U0001d70a': '$O$', u'\U0001d70b': '$\\mathsl{\\Pi}$', u'\U0001d70c': '$\\mathsl{\\Rho}$', u'\U0001d70d': '$\\mathsl{\\varsigma}$', u'\U0001d70e': '$\\mathsl{\\Sigma}$', u'\U0001d70f': '$\\mathsl{\\Tau}$', u'\U0001d710': '$\\mathsl{\\Upsilon}$', u'\U0001d711': '$\\mathsl{\\Phi}$', u'\U0001d712': '$\\mathsl{\\Chi}$', u'\U0001d713': '$\\mathsl{\\Psi}$', u'\U0001d714': '$\\mathsl{\\Omega}$', u'\U0001d715': '$\\partial$', u'\U0001d716': '$\\in$', u'\U0001d717': '{\\mathsl{\\vartheta}}', u'\U0001d718': '{\\mathsl{\\varkappa}}', u'\U0001d719': '{\\mathsl{\\phi}}', u'\U0001d71a': '{\\mathsl{\\varrho}}', u'\U0001d71b': '{\\mathsl{\\varpi}}', u'\U0001d71c': '$\\mathbit{\\Alpha}$', u'\U0001d71d': '$\\mathbit{\\Beta}$', u'\U0001d71e': '$\\mathbit{\\Gamma}$', u'\U0001d71f': '$\\mathbit{\\Delta}$', u'\U0001d720': '$\\mathbit{\\Epsilon}$', u'\U0001d721': '$\\mathbit{\\Zeta}$', u'\U0001d722': '$\\mathbit{\\Eta}$', u'\U0001d723': '$\\mathbit{\\Theta}$', u'\U0001d724': '$\\mathbit{\\Iota}$', u'\U0001d725': '$\\mathbit{\\Kappa}$', u'\U0001d726': '$\\mathbit{\\Lambda}$', u'\U0001d727': '$M$', u'\U0001d728': '$N$', u'\U0001d729': '$\\mathbit{\\Xi}$', u'\U0001d72a': '$O$', u'\U0001d72b': '$\\mathbit{\\Pi}$', u'\U0001d72c': '$\\mathbit{\\Rho}$', u'\U0001d72d': '{\\mathbit{O}}', u'\U0001d72e': '$\\mathbit{\\Sigma}$', u'\U0001d72f': '$\\mathbit{\\Tau}$', u'\U0001d730': '$\\mathbit{\\Upsilon}$', u'\U0001d731': '$\\mathbit{\\Phi}$', u'\U0001d732': '$\\mathbit{\\Chi}$', u'\U0001d733': '$\\mathbit{\\Psi}$', u'\U0001d734': '$\\mathbit{\\Omega}$', u'\U0001d735': '$\\mathbit{\\nabla}$', u'\U0001d736': '$\\mathbit{\\Alpha}$', u'\U0001d737': '$\\mathbit{\\Beta}$', u'\U0001d738': '$\\mathbit{\\Gamma}$', u'\U0001d739': '$\\mathbit{\\Delta}$', u'\U0001d73a': '$\\mathbit{\\Epsilon}$', u'\U0001d73b': '$\\mathbit{\\Zeta}$', u'\U0001d73c': '$\\mathbit{\\Eta}$', u'\U0001d73d': '$\\mathbit{\\Theta}$', u'\U0001d73e': '$\\mathbit{\\Iota}$', u'\U0001d73f': '$\\mathbit{\\Kappa}$', u'\U0001d740': '$\\mathbit{\\Lambda}$', u'\U0001d741': '$M$', u'\U0001d742': '$N$', u'\U0001d743': '$\\mathbit{\\Xi}$', u'\U0001d744': '$O$', u'\U0001d745': '$\\mathbit{\\Pi}$', u'\U0001d746': '$\\mathbit{\\Rho}$', u'\U0001d747': '$\\mathbit{\\varsigma}$', u'\U0001d748': '$\\mathbit{\\Sigma}$', u'\U0001d749': '$\\mathbit{\\Tau}$', u'\U0001d74a': '$\\mathbit{\\Upsilon}$', u'\U0001d74b': '$\\mathbit{\\Phi}$', u'\U0001d74c': '$\\mathbit{\\Chi}$', u'\U0001d74d': '$\\mathbit{\\Psi}$', u'\U0001d74e': '$\\mathbit{\\Omega}$', u'\U0001d74f': '$\\partial$', u'\U0001d750': '$\\in$', u'\U0001d751': '{\\mathbit{\\vartheta}}', u'\U0001d752': '{\\mathbit{\\varkappa}}', u'\U0001d753': '{\\mathbit{\\phi}}', u'\U0001d754': '{\\mathbit{\\varrho}}', u'\U0001d755': '{\\mathbit{\\varpi}}', u'\U0001d756': '$\\mathsfbf{\\Alpha}$', u'\U0001d757': '$\\mathsfbf{\\Beta}$', u'\U0001d758': '$\\mathsfbf{\\Gamma}$', u'\U0001d759': '$\\mathsfbf{\\Delta}$', u'\U0001d75a': '$\\mathsfbf{\\Epsilon}$', u'\U0001d75b': '$\\mathsfbf{\\Zeta}$', u'\U0001d75c': '$\\mathsfbf{\\Eta}$', u'\U0001d75d': '$\\mathsfbf{\\Theta}$', u'\U0001d75e': '$\\mathsfbf{\\Iota}$', u'\U0001d75f': '$\\mathsfbf{\\Kappa}$', u'\U0001d760': '$\\mathsfbf{\\Lambda}$', u'\U0001d761': '$M$', u'\U0001d762': '$N$', u'\U0001d763': '$\\mathsfbf{\\Xi}$', u'\U0001d764': '$O$', u'\U0001d765': '$\\mathsfbf{\\Pi}$', u'\U0001d766': '$\\mathsfbf{\\Rho}$', u'\U0001d767': '{\\mathsfbf{\\vartheta}}', u'\U0001d768': '$\\mathsfbf{\\Sigma}$', u'\U0001d769': '$\\mathsfbf{\\Tau}$', u'\U0001d76a': '$\\mathsfbf{\\Upsilon}$', u'\U0001d76b': '$\\mathsfbf{\\Phi}$', u'\U0001d76c': '$\\mathsfbf{\\Chi}$', u'\U0001d76d': '$\\mathsfbf{\\Psi}$', u'\U0001d76e': '$\\mathsfbf{\\Omega}$', u'\U0001d76f': '$\\mathsfbf{\\nabla}$', u'\U0001d770': '$\\mathsfbf{\\Alpha}$', u'\U0001d771': '$\\mathsfbf{\\Beta}$', u'\U0001d772': '$\\mathsfbf{\\Gamma}$', u'\U0001d773': '$\\mathsfbf{\\Delta}$', u'\U0001d774': '$\\mathsfbf{\\Epsilon}$', u'\U0001d775': '$\\mathsfbf{\\Zeta}$', u'\U0001d776': '$\\mathsfbf{\\Eta}$', u'\U0001d777': '$\\mathsfbf{\\Theta}$', u'\U0001d778': '$\\mathsfbf{\\Iota}$', u'\U0001d779': '$\\mathsfbf{\\Kappa}$', u'\U0001d77a': '$\\mathsfbf{\\Lambda}$', u'\U0001d77b': '$M$', u'\U0001d77c': '$N$', u'\U0001d77d': '$\\mathsfbf{\\Xi}$', u'\U0001d77e': '$O$', u'\U0001d77f': '$\\mathsfbf{\\Pi}$', u'\U0001d780': '$\\mathsfbf{\\Rho}$', u'\U0001d781': '$\\mathsfbf{\\varsigma}$', u'\U0001d782': '$\\mathsfbf{\\Sigma}$', u'\U0001d783': '$\\mathsfbf{\\Tau}$', u'\U0001d784': '$\\mathsfbf{\\Upsilon}$', u'\U0001d785': '$\\mathsfbf{\\Phi}$', u'\U0001d786': '$\\mathsfbf{\\Chi}$', u'\U0001d787': '$\\mathsfbf{\\Psi}$', u'\U0001d788': '$\\mathsfbf{\\Omega}$', u'\U0001d789': '$\\partial$', u'\U0001d78a': '$\\in$', u'\U0001d78b': '{\\mathsfbf{\\vartheta}}', u'\U0001d78c': '{\\mathsfbf{\\varkappa}}', u'\U0001d78d': '{\\mathsfbf{\\phi}}', u'\U0001d78e': '{\\mathsfbf{\\varrho}}', u'\U0001d78f': '{\\mathsfbf{\\varpi}}', u'\U0001d790': '$\\mathsfbfsl{\\Alpha}$', u'\U0001d791': '$\\mathsfbfsl{\\Beta}$', u'\U0001d792': '$\\mathsfbfsl{\\Gamma}$', u'\U0001d793': '$\\mathsfbfsl{\\Delta}$', u'\U0001d794': '$\\mathsfbfsl{\\Epsilon}$', u'\U0001d795': '$\\mathsfbfsl{\\Zeta}$', u'\U0001d796': '$\\mathsfbfsl{\\Eta}$', u'\U0001d797': '$\\mathsfbfsl{\\vartheta}$', u'\U0001d798': '$\\mathsfbfsl{\\Iota}$', u'\U0001d799': '$\\mathsfbfsl{\\Kappa}$', u'\U0001d79a': '$\\mathsfbfsl{\\Lambda}$', u'\U0001d79b': '$M$', u'\U0001d79c': '$N$', u'\U0001d79d': '$\\mathsfbfsl{\\Xi}$', u'\U0001d79e': '$O$', u'\U0001d79f': '$\\mathsfbfsl{\\Pi}$', u'\U0001d7a0': '$\\mathsfbfsl{\\Rho}$', u'\U0001d7a1': '{\\mathsfbfsl{\\vartheta}}', u'\U0001d7a2': '$\\mathsfbfsl{\\Sigma}$', u'\U0001d7a3': '$\\mathsfbfsl{\\Tau}$', u'\U0001d7a4': '$\\mathsfbfsl{\\Upsilon}$', u'\U0001d7a5': '$\\mathsfbfsl{\\Phi}$', u'\U0001d7a6': '$\\mathsfbfsl{\\Chi}$', u'\U0001d7a7': '$\\mathsfbfsl{\\Psi}$', u'\U0001d7a8': '$\\mathsfbfsl{\\Omega}$', u'\U0001d7a9': '$\\mathsfbfsl{\\nabla}$', u'\U0001d7aa': '$\\mathsfbfsl{\\Alpha}$', u'\U0001d7ab': '$\\mathsfbfsl{\\Beta}$', u'\U0001d7ac': '$\\mathsfbfsl{\\Gamma}$', u'\U0001d7ad': '$\\mathsfbfsl{\\Delta}$', u'\U0001d7ae': '$\\mathsfbfsl{\\Epsilon}$', u'\U0001d7af': '$\\mathsfbfsl{\\Zeta}$', u'\U0001d7b0': '$\\mathsfbfsl{\\Eta}$', u'\U0001d7b1': '$\\mathsfbfsl{\\vartheta}$', u'\U0001d7b2': '$\\mathsfbfsl{\\Iota}$', u'\U0001d7b3': '$\\mathsfbfsl{\\Kappa}$', u'\U0001d7b4': '$\\mathsfbfsl{\\Lambda}$', u'\U0001d7b5': '$M$', u'\U0001d7b6': '$N$', u'\U0001d7b7': '$\\mathsfbfsl{\\Xi}$', u'\U0001d7b8': '$O$', u'\U0001d7b9': '$\\mathsfbfsl{\\Pi}$', u'\U0001d7ba': '$\\mathsfbfsl{\\Rho}$', u'\U0001d7bb': '$\\mathsfbfsl{\\varsigma}$', u'\U0001d7bc': '$\\mathsfbfsl{\\Sigma}$', u'\U0001d7bd': '$\\mathsfbfsl{\\Tau}$', u'\U0001d7be': '$\\mathsfbfsl{\\Upsilon}$', u'\U0001d7bf': '$\\mathsfbfsl{\\Phi}$', u'\U0001d7c0': '$\\mathsfbfsl{\\Chi}$', u'\U0001d7c1': '$\\mathsfbfsl{\\Psi}$', u'\U0001d7c2': '$\\mathsfbfsl{\\Omega}$', u'\U0001d7c3': '$\\partial$', u'\U0001d7c4': '$\\in$', u'\U0001d7c5': '{\\mathsfbfsl{\\vartheta}}', u'\U0001d7c6': '{\\mathsfbfsl{\\varkappa}}', u'\U0001d7c7': '{\\mathsfbfsl{\\phi}}', u'\U0001d7c8': '{\\mathsfbfsl{\\varrho}}', u'\U0001d7c9': '{\\mathsfbfsl{\\varpi}}', u'\U0001d7ce': '$\\mathbf{0}$', u'\U0001d7cf': '$\\mathbf{1}$', u'\U0001d7d0': '$\\mathbf{2}$', u'\U0001d7d1': '$\\mathbf{3}$', u'\U0001d7d2': '$\\mathbf{4}$', u'\U0001d7d3': '$\\mathbf{5}$', u'\U0001d7d4': '$\\mathbf{6}$', u'\U0001d7d5': '$\\mathbf{7}$', u'\U0001d7d6': '$\\mathbf{8}$', u'\U0001d7d7': '$\\mathbf{9}$', u'\U0001d7d8': '$\\mathbb{0}$', u'\U0001d7d9': '$\\mathbb{1}$', u'\U0001d7da': '$\\mathbb{2}$', u'\U0001d7db': '$\\mathbb{3}$', u'\U0001d7dc': '$\\mathbb{4}$', u'\U0001d7dd': '$\\mathbb{5}$', u'\U0001d7de': '$\\mathbb{6}$', u'\U0001d7df': '$\\mathbb{7}$', u'\U0001d7e0': '$\\mathbb{8}$', u'\U0001d7e1': '$\\mathbb{9}$', u'\U0001d7e2': '$\\mathsf{0}$', u'\U0001d7e3': '$\\mathsf{1}$', u'\U0001d7e4': '$\\mathsf{2}$', u'\U0001d7e5': '$\\mathsf{3}$', u'\U0001d7e6': '$\\mathsf{4}$', u'\U0001d7e7': '$\\mathsf{5}$', u'\U0001d7e8': '$\\mathsf{6}$', u'\U0001d7e9': '$\\mathsf{7}$', u'\U0001d7ea': '$\\mathsf{8}$', u'\U0001d7eb': '$\\mathsf{9}$', u'\U0001d7ec': '$\\mathsfbf{0}$', u'\U0001d7ed': '$\\mathsfbf{1}$', u'\U0001d7ee': '$\\mathsfbf{2}$', u'\U0001d7ef': '$\\mathsfbf{3}$', u'\U0001d7f0': '$\\mathsfbf{4}$', u'\U0001d7f1': '$\\mathsfbf{5}$', u'\U0001d7f2': '$\\mathsfbf{6}$', u'\U0001d7f3': '$\\mathsfbf{7}$', u'\U0001d7f4': '$\\mathsfbf{8}$', u'\U0001d7f5': '$\\mathsfbf{9}$', u'\U0001d7f6': '$\\mathtt{0}$', u'\U0001d7f7': '$\\mathtt{1}$', u'\U0001d7f8': '$\\mathtt{2}$', u'\U0001d7f9': '$\\mathtt{3}$', u'\U0001d7fa': '$\\mathtt{4}$', u'\U0001d7fb': '$\\mathtt{5}$', u'\U0001d7fc': '$\\mathtt{6}$', u'\U0001d7fd': '$\\mathtt{7}$', u'\U0001d7fe': '$\\mathtt{8}$', u'\U0001d7ff': '$\\mathtt{9}$', # Items from simple list u'\u0106': "{\\a\\'C}", u'\u0408': '{\\CYRJE}', u'\u20ac': '{\\texteuro}', u'\u2191': '{\\textuparrow}', u'\u0493': '{\\cyrghcrs}', u'\u2116': '{\\textnumero}', u'\u0418': '{\\CYRI}', u'\u04a3': '{\\cyrndsc}', u'\u2126': '{\\textohm}', u'\u0428': '{\\CYRSH}', u'\u04b3': '{\\cyrhdsc}', u'\u0438': '{\\cyri}', u'\u03bd': '{$\\nu$}', u'\u04c3': '{\\CYRKHK}', u'\u0448': '{\\cyrsh}', u'\xcb': '{\\"E}', u'\u0458': '{\\cyrje}', u'\xdb': '{\\^U}', u'\xeb': '{\\"e}', u'\xfb': '{\\^u}', u'\u0413': '{\\CYRG}', u'\u0498': '{\\CYRZDSC}', u'\xa0': '{~}', u'\u0423': '{\\CYRU}', u'\u04a8': '{\\CYRABHHA}', u'\u0433': '{\\cyrg}', u'\u04b8': '{\\CYRCHVCRS}', u'\u203b': '{\\textreferencemark}', u'\u211e': '{\\textrecipe}', u'\xc0': '{\\`A}', u'\u0443': '{\\cyru}', u'\u04c8': '{\\cyrnhk}', u'\u0151': '{\\H o}', u'\u04d8': '{\\CYRSCHWA}', u'\u0161': '{\\v s}', u'\xe0': '{\\`a}', u'\u0463': '{\\cyryat}', u'\u04e8': '{\\CYROTLD}', u'\u0171': '{\\H u}', u'\u0473': '{\\cyrfita}', u'\u20ab': '{\\textdong}', u'\u2103': '{\\textcelsius}', u'\u040e': '{\\CYRUSHRT}', u'\u2212': '{\\textminus}', u'\u2016': '{\\textbardbl}', u'\u0499': '{\\cyrzdsc}', u'\u041e': '{\\CYRO}', u'\u2120': '{\\textservicemark}', u'\u03a7': '{$\\chi$}', u'\u2026': '{\\textellipsis}', u'\u04a9': '{\\cyrabhha}', u'\u042e': '{\\CYRYU}', u'\xb1': '{\\textpm}', u'\u0130': '{\\.I}', u'\u04b9': '{\\cyrchvcrs}', u'\u043e': '{\\cyro}', u'\xc1': "{\\'A}", u'\u044e': '{\\cyryu}', u'\xd1': '{\\~N}', u'\u0150': '{\\H O}', u'\u04d9': '{\\cyrschwa}', u'\u02dd': '{\\textacutedbl}', u'\u045e': '{\\cyrushrt}', u'\xe1': "{\\'a}", u'\u0160': '{\\v S}', u'\u04e9': '{\\cyrotld}', u'\u266a': '{\\textmusicalnote}', u'\xf1': '{\\~n}', u'\u0170': '{\\H U}', u'\u0409': '{\\CYRLJE}', u'\u048e': '{\\CYRRTICK}', u'\u2190': '{\\leftarrow}', u'\u0419': '{\\CYRISHRT}', u'\u011b': '{\\v e}', u'\u049e': '{\\CYRKHCRS}', u'\u0429': '{\\CYRSHCH}', u'\u04ae': '{\\CYRY}', u'\u0439': '{\\cyrishrt}', u'\u04be': '{\\CYRABHCHDSC}', u'\u0449': '{\\cyrshch}', u'\u04ce': '{\\cyrmdsc}', u'\xd6': '{\\"O}', u'\u0459': '{\\cyrlje}', u'\u015b': "{\\'s}", u'\u20a6': '{\\textnaira}', u'\xf6': '{\\"o}', u'\u017b': '{\\.Z}', u'\u0102': '{\\u A}', u'\u0404': '{\\CYRIE}', u'\u200c': '{\\textcompwordmark}', u'\u048f': '{\\cyrrtick}', u'\u0414': '{\\CYRD}', u'\u049f': '{\\cyrkhcrs}', u'\u0424': '{\\CYRF}', u'\u04af': '{\\cyry}', u'\u0434': '{\\cyrd}', u'\xb7': '{\\textperiodcentered}', u'\u04bf': '{\\cyrabhchdsc}', u'\u0444': '{\\cyrf}', u'\xc7': '{\\c C}', u'\u0454': '{\\cyrie}', u'\u0162': '{\\c T}', u'\xe7': '{\\c c}', u'\u0474': '{\\CYRIZH}', u'\xf7': '{\\textdiv}', u'\u010d': '{\\v c}', u'\u040f': '{\\CYRDZHE}', u'\u0192': '{\\textflorin}', u'\u0494': '{\\CYRGHK}', u'\u041f': '{\\CYRP}', u'\u04a4': '{\\CYRNG}', u'\xac': '{\\textlnot}', u'\u042f': '{\\CYRYA}', u'\u04b4': '{\\CYRTETSE}', u'\u013d': '{\\v L}', u'\u043f': '{\\cyrp}', u'\u04c4': '{\\cyrkhk}', u'\xcc': '{\\`I}', u'\u044f': '{\\cyrya}', u'\u2422': '{\\textblank}', u'\u04d4': '{\\CYRAE}', u'\xdc': '{\\"U}', u'\u045f': '{\\cyrdzhe}', u'\xec': '{\\`i}', u'\u017d': '{\\v Z}', u'\xfc': '{\\"u}', u'\u040a': '{\\CYRNJE}', u'\u010c': '{\\v C}', u'\u0495': '{\\cyrghk}', u'\u041a': '{\\CYRK}', u'\u04a5': '{\\cyrng}', u'\u042a': '{\\CYRHRDSN}', u'\u2032': '{$\\prime$}', u'\u04b5': '{\\cyrtetse}', u'\u043a': '{\\cyrk}', u'\u04c5': '{\\CYRLDSC}', u'\u044a': '{\\cyrhrdsn}', u'\xcd': "{\\'I}", u'\u2052': '{\\textdiscount}', u'\u04d5': '{\\cyrae}', u'\u045a': '{\\cyrnje}', u'\xdd': "{\\'Y}", u'\u046a': '{\\CYRBYUS}', u'\xed': "{\\'\\i}", u'\u25ef': '{\\textbigcircle}', u'\xfd': "{\\'y}", u'\u017c': '{\\.z}', u'\u0405': '{\\CYRDZE}', u'\u0107': "{\\'c}", u'\u0415': '{\\CYRE}', u'\u2117': '{\\textcircledP}', u'\u049a': '{\\CYRKDSC}', u'\u0425': '{\\CYRH}', u'\u2127': '{\\textmho}', u'\u04aa': '{\\CYRSDSC}', u'\xb2': '{\\texttwosuperior}', u'\u0435': '{\\cyre}', u'\u04ba': '{\\CYRSHHA}', u'\u203d': '{\\textinterrobang}', u'\xc2': '{\\^A}', u'\u0445': '{\\cyrh}', u'\u0147': '{\\v N}', u'\u02c6': '{\\textasciicircum}', u'\xd2': '{\\`O}', u'\u0455': '{\\cyrdze}', u'\u2261': '{$\\equiv$}', u'\xe2': '{\\^a}', u'\xf2': '{\\`o}', u'\u0475': '{\\cyrizh}', u'\u010e': '{\\v D}', u'\u0410': '{\\CYRA}', u'\u2018': '{\\textquoteleft}', u'\u049b': '{\\cyrkdsc}', u'\u011e': '{\\u G}', u'\u0420': '{\\CYRR}', u'\u04ab': '{\\cyrsdsc}', u'\u232a': '{\\textrangle}', u'\u212e': '{\\textestimated}', u'\u0430': '{\\cyra}', u'\xb3': '{\\textthreesuperior}', u'\u04bb': '{\\cyrshha}', u'\u013e': '{\\v l}', u'\u0440': '{\\cyrr}', u'\xc3': '{\\~A}', u'\u04cb': '{\\CYRCHLDSC}', u'\xd3': "{\\'O}", u'\u015e': '{\\c S}', u'\xe3': '{\\~a}', u'\u016e': '{\\r U}', u'\xf3': "{\\'o}", u'\u017e': '{\\v z}', u'\u040b': '{\\CYRTSHE}', u'\u0490': '{\\CYRGUP}', u'\u0119': '{\\k e}', u'\u041b': '{\\CYRL}', u'\u04a0': '{\\CYRKBEAK}', u'\u042b': '{\\CYRERY}', u'\u04b0': '{\\CYRYHCRS}', u'\u0e37': '{\\textbaht}', u'\u0139': "{\\'L}", u'\xb8': '{\\c\\ }', u'\u043b': '{\\cyrl}', u'\u04c0': '{\\CYRpalochka}', u'\xc8': '{\\`E}', u'\u044b': '{\\cyrery}', u'\u0159': '{\\v r}', u'\u045b': '{\\cyrtshe}', u'\u04e0': '{\\CYRABHDZE}', u'\u25e6': '{\\textopenbullet}', u'\xe8': '{\\`e}', u'\u046b': '{\\cyrbyus}', u'\u0179': "{\\'Z}", u'\u0406': '{\\CYRII}', u'\u0491': '{\\cyrgup}', u'\u2193': '{\\textdownarrow}', u'\u2192': '{\\textrightarrow}', u'\u0416': '{\\CYRZH}', u'\u0118': '{\\k E}', u'\u201e': '{\\quotedblbase}', u'\u04a1': '{\\cyrkbeak}', u'\u0426': '{\\CYRC}', u'\u04b1': '{\\cyryhcrs}', u'\u0436': '{\\cyrzh}', u'\xb9': '{\\textonesuperior}', u'\u0446': '{\\cyrc}', u'\xc9': "{\\'E}", u'\u0148': '{\\v n}', u'\u204e': '{\\textasteriskcentered}', u'\u0456': '{\\cyrii}', u'\xd9': '{\\`U}', u'\u0158': '{\\v R}', u'\u04e1': '{\\cyrabhdze}', u'\xe9': "{\\'e}", u'\xf9': '{\\`u}', u'\u0178': '{\\"Y}', u'\u0401': '{\\CYRYO}', u'\u0103': '{\\u a}', u'\u0411': '{\\CYRB}', u'\u0496': '{\\CYRZHDSC}', u'\u2019': '{\\textquoteright}', u'\u0421': '{\\CYRS}', u'\u04a6': '{\\CYRPHK}', u'\u0431': '{\\cyrb}', u'\u04b6': '{\\CYRCHRDSC}', u'\u0441': '{\\cyrs}', u'\u0143': "{\\'N}", u'\u04c6': '{\\cyrldsc}', u'\u02ca': '{\\textasciitilde}', u'\xce': '{\\^I}', u'\u0451': '{\\cyryo}', u'\u0163': '{\\c t}', u'\xee': '{\\^\\i}', u'\u0497': '{\\cyrzhdsc}', u'\u011a': '{\\v E}', u'\u041c': '{\\CYRM}', u'\u04a7': '{\\cyrphk}', u'\u042c': '{\\CYRSFTSN}', u'\u04b7': '{\\cyrchrdsc}', u'\u013a': "{\\'l}", u'\u043c': '{\\cyrm}', u'\u2044': '{\\textfractionsolidus}', u'\u04c7': '{\\CYRNHK}', u'\u044c': '{\\cyrsftsn}', u'\xcf': '{\\"I}', u'\u015a': "{\\'S}", u'\xef': '{\\"\\i}', u'\u017a': "{\\'z}", u'\xff': '{\\"y}', u'\u0105': '{\\k a}', u'\u0407': '{\\CYRYI}', u'\u048c': '{\\CYRSEMISFTSN}', u'\u0417': '{\\CYRZ}', u'\u049c': '{\\CYRKVCRS}', u'\u0427': '{\\CYRCH}', u'\u2329': '{\\textlangle}', u'\u04ac': '{\\CYRTDSC}', u'\u0437': '{\\cyrz}', u'\u04bc': '{\\CYRABHCH}', u'\u20a1': '{\\textcolonmonetary}', u'\xc4': '{\\"A}', u'\u0447': '{\\cyrch}', u'\u04cc': '{\\cyrchldsc}', u'\u0155': "{\\'r}", u'\xd4': '{\\^O}', u'\u0457': '{\\cyryi}', u'\u0165': '{\\v t}', u'\xe4': '{\\"a}', u'\u20a9': '{\\textwon}', u'\xf4': '{\\^o}', u'\u0402': '{\\CYRDJE}', u'\u0104': '{\\k A}', u'\u048d': '{\\cyrsemisftsn}', u'\u0412': '{\\CYRV}', u'\u201a': '{\\quotesinglbase}', u'\u049d': '{\\cyrkvcrs}', u'\u20b1': '{\\textpeso}', u'\u0422': '{\\CYRT}', u'\u04ad': '{\\cyrtdsc}', u'\u0432': '{\\cyrv}', u'\xb5': '{\\textmu}', u'\u04bd': '{\\cyrabhch}', u'\u0442': '{\\cyrt}', u'\xc5': '{\\r A}', u'\u0144': "{\\'n}", u'\u04cd': '{\\CYRMDSC}', u'\u0452': '{\\cyrdje}', u'\xd5': '{\\~O}', u'\u0154': "{\\'R}", u'\u0462': '{\\CYRYAT}', u'\xe5': '{\\r a}', u'\u0164': '{\\v T}', u'\u0472': '{\\CYRFITA}', u'\xf5': '{\\~o}', u'\u010f': '{\\v d}', u'\u0492': '{\\CYRGHCRS}', u'\u041d': '{\\CYRN}', u'\u011f': '{\\u g}', u'\u04a2': '{\\CYRNDSC}', u'\u042d': '{\\CYREREV}', u'\u04b2': '{\\CYRHDSC}', u'\u043d': '{\\cyrn}', u'\xca': '{\\^E}', u'\u044d': '{\\cyrerev}', u'\xda': "{\\'U}", u'\u015f': '{\\c s}', u'\u20a4': '{\\textlira}', u'\xea': '{\\^e}', u'\u016f': '{\\r u}', u'\xfa': "{\\'u}" # Items to add at a latter date (check first) # u'\u0000': r'{$\alpha$}', # u'\u0000': r'{$\beta$}', # u'\u0000': r'{$\gamma$}', # u'\u0000': r'{$\delta$}', # u'\u0000': r'{$\epsilon$}', # u'\u0000': r'{$\varepsilon$}', # u'\u0000': r'{$\zeta$}', # u'\u0000': r'{$\eta$}', # u'\u0000': r'{$\theta$}', # u'\u0000': r'{$\vartheta$}', # u'\u0000': r'{$\iota$}', # u'\u0000': r'{$\kappa$}', # u'\u0000': r'{$\lambda$}', # u'\u0000': r'{$\mu$}', # u'\u0000': r'{$\xi$}', # u'\u0000': r'{$\pi$}', # u'\u0000': r'{$\varpi$}', # u'\u0000': r'{$\rho$}', # u'\u0000': r'{$\varrho$}', # u'\u0000': r'{$\sigma$}', # u'\u0000': r'{$\varsigma$}', # u'\u0000': r'{$\tau$}', # u'\u0000': r'{$\upsilon$}', # u'\u0000': r'{$\phi$}', # u'\u0000': r'{$\varphi$}', # u'\u0000': r'{$\psi$}', # u'\u0000': r'{$\omega$}', # u'\u0000': r'{$\Gamma$}', # u'\u0000': r'{$\Delta$}', # u'\u0000': r'{$\Theta$}', # u'\u0000': r'{$\Lambda$}', # u'\u0000': r'{$\Xi$}', # u'\u0000': r'{$\Pi$}', # u'\u0000': r'{$\Sigma$}', # u'\u0000': r'{$\Upsilon$}', # u'\u0000': r'{$\Phi$}', # u'\u0000': r'{$\Psi$}', # u'\u0000': r'{$\Omega$}', } entity_mapping = { '&mdash;':'{---}', '&ndash;':'{--}', '"':'{"}', } class BibTeX: def __init__(self): self.rep_utf8 = MReplace(utf8enc2latex_mapping) self.rep_ent = MReplace(entity_mapping) # Set default conversion to ASCII BibTeX self.ascii_bibtex = True # This substitution is based on the description of cite key restrictions at # http://bibdesk.sourceforge.net/manual/BibDesk%20Help_2.html self.invalid_cit = re.compile(u'[ "@\',\\#}{~%&$^]') self.upper = re.compile(u'[' + string.uppercase.decode(preferred_encoding) + u']') self.escape = re.compile(u'[~#&%_]') def ValidateCitationKey(self, text): """ Removes characters not allowed in BibTeX keys """ return self.invalid_cit.sub(u'', text) def braceUppercase(self, text): """ Convert uppercase letters to bibtex encoded uppercase """ return self.upper.sub(lambda m: u'{%s}' % m.group(), text) def resolveEntities(self, text): return self.rep_ent.mreplace(text) def resolveUnicode(self, text): # UTF-8 text as entry text = self.rep_utf8.mreplace(text) return text.replace(u'$}{$', u'') def escapeSpecialCharacters(self, text): """ Latex escaping some (not all) special characters """ text.replace('\\', '\\\\') return self.escape.sub(lambda m: u'\\%s' % m.group(), text) # Calibre functions: Option to go to official ASCII Bibtex or unofficial UTF-8 def utf8ToBibtex(self, text): """ Go from an unicode entry to ASCII Bibtex format without encoding """ if len(text) == 0: return '' text.replace('\\', '\\\\') text = self.resolveEntities(text) if self.ascii_bibtex : text = self.resolveUnicode(text) return self.escapeSpecialCharacters(text) def bibtex_author_format(self, item): """ Format authors for Bibtex compliance (get a list as input) """ return self.utf8ToBibtex(u' and '.join([author for author in item])) def stripUnmatchedSyntax(self, text, open_character, close_character): """ Strips unmatched BibTeX syntax """ stack = [] assert len(open_character) == 1 and len(close_character) == 1 remove = [] for i, ch in enumerate(text): if ch == open_character: stack.append(i) elif ch == close_character: try: stack.pop() except IndexError: # Remove unmatched closing char remove.append(i) # Remove unmatched opening chars remove.extend(stack) if remove: text = list(text) for i in sorted(remove, reverse=True): text.pop(i) text = ''.join(text) return text
gpl-3.0
fkazimierczak/flask
examples/minitwit/test_minitwit.py
156
4798
# -*- coding: utf-8 -*- """ MiniTwit Tests ~~~~~~~~~~~~~~ Tests the MiniTwit application. :copyright: (c) 2015 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import os import minitwit import tempfile import pytest @pytest.fixture def client(request): db_fd, minitwit.app.config['DATABASE'] = tempfile.mkstemp() client = minitwit.app.test_client() with minitwit.app.app_context(): minitwit.init_db() def teardown(): """Get rid of the database again after each test.""" os.close(db_fd) os.unlink(minitwit.app.config['DATABASE']) request.addfinalizer(teardown) return client def register(client, username, password, password2=None, email=None): """Helper function to register a user""" if password2 is None: password2 = password if email is None: email = username + '@example.com' return client.post('/register', data={ 'username': username, 'password': password, 'password2': password2, 'email': email, }, follow_redirects=True) def login(client, username, password): """Helper function to login""" return client.post('/login', data={ 'username': username, 'password': password }, follow_redirects=True) def register_and_login(client, username, password): """Registers and logs in in one go""" register(client, username, password) return login(client, username, password) def logout(client): """Helper function to logout""" return client.get('/logout', follow_redirects=True) def add_message(client, text): """Records a message""" rv = client.post('/add_message', data={'text': text}, follow_redirects=True) if text: assert b'Your message was recorded' in rv.data return rv def test_register(client): """Make sure registering works""" rv = register(client, 'user1', 'default') assert b'You were successfully registered ' \ b'and can login now' in rv.data rv = register(client, 'user1', 'default') assert b'The username is already taken' in rv.data rv = register(client, '', 'default') assert b'You have to enter a username' in rv.data rv = register(client, 'meh', '') assert b'You have to enter a password' in rv.data rv = register(client, 'meh', 'x', 'y') assert b'The two passwords do not match' in rv.data rv = register(client, 'meh', 'foo', email='broken') assert b'You have to enter a valid email address' in rv.data def test_login_logout(client): """Make sure logging in and logging out works""" rv = register_and_login(client, 'user1', 'default') assert b'You were logged in' in rv.data rv = logout(client) assert b'You were logged out' in rv.data rv = login(client, 'user1', 'wrongpassword') assert b'Invalid password' in rv.data rv = login(client, 'user2', 'wrongpassword') assert b'Invalid username' in rv.data def test_message_recording(client): """Check if adding messages works""" register_and_login(client, 'foo', 'default') add_message(client, 'test message 1') add_message(client, '<test message 2>') rv = client.get('/') assert b'test message 1' in rv.data assert b'&lt;test message 2&gt;' in rv.data def test_timelines(client): """Make sure that timelines work""" register_and_login(client, 'foo', 'default') add_message(client, 'the message by foo') logout(client) register_and_login(client, 'bar', 'default') add_message(client, 'the message by bar') rv = client.get('/public') assert b'the message by foo' in rv.data assert b'the message by bar' in rv.data # bar's timeline should just show bar's message rv = client.get('/') assert b'the message by foo' not in rv.data assert b'the message by bar' in rv.data # now let's follow foo rv = client.get('/foo/follow', follow_redirects=True) assert b'You are now following &#34;foo&#34;' in rv.data # we should now see foo's message rv = client.get('/') assert b'the message by foo' in rv.data assert b'the message by bar' in rv.data # but on the user's page we only want the user's message rv = client.get('/bar') assert b'the message by foo' not in rv.data assert b'the message by bar' in rv.data rv = client.get('/foo') assert b'the message by foo' in rv.data assert b'the message by bar' not in rv.data # now unfollow and check if that worked rv = client.get('/foo/unfollow', follow_redirects=True) assert b'You are no longer following &#34;foo&#34;' in rv.data rv = client.get('/') assert b'the message by foo' not in rv.data assert b'the message by bar' in rv.data
bsd-3-clause
quoniammm/happy-machine-learning
Udacity-ML/boston_housing-master_0/visuals.py
6
5008
########################################### # Suppress matplotlib user warnings # Necessary for newer version of matplotlib import warnings warnings.filterwarnings("ignore", category = UserWarning, module = "matplotlib") # # Display inline matplotlib plots with IPython from IPython import get_ipython get_ipython().run_line_magic('matplotlib', 'inline') ########################################### import matplotlib.pyplot as pl import numpy as np from sklearn.model_selection import learning_curve, validation_curve from sklearn.tree import DecisionTreeRegressor from sklearn.model_selection import ShuffleSplit, train_test_split def ModelLearning(X, y): """ Calculates the performance of several models with varying sizes of training data. The learning and testing scores for each model are then plotted. """ # Create 10 cross-validation sets for training and testing cv = ShuffleSplit(n_splits = 10, test_size = 0.2, random_state = 0) # Generate the training set sizes increasing by 50 train_sizes = np.rint(np.linspace(1, X.shape[0]*0.8 - 1, 9)).astype(int) # Create the figure window fig = pl.figure(figsize=(10,7)) # Create three different models based on max_depth for k, depth in enumerate([1,3,6,10]): # Create a Decision tree regressor at max_depth = depth regressor = DecisionTreeRegressor(max_depth = depth) # Calculate the training and testing scores sizes, train_scores, test_scores = learning_curve(regressor, X, y, \ cv = cv, train_sizes = train_sizes, scoring = 'r2') # Find the mean and standard deviation for smoothing train_std = np.std(train_scores, axis = 1) train_mean = np.mean(train_scores, axis = 1) test_std = np.std(test_scores, axis = 1) test_mean = np.mean(test_scores, axis = 1) # Subplot the learning curve ax = fig.add_subplot(2, 2, k+1) ax.plot(sizes, train_mean, 'o-', color = 'r', label = 'Training Score') ax.plot(sizes, test_mean, 'o-', color = 'g', label = 'Testing Score') ax.fill_between(sizes, train_mean - train_std, \ train_mean + train_std, alpha = 0.15, color = 'r') ax.fill_between(sizes, test_mean - test_std, \ test_mean + test_std, alpha = 0.15, color = 'g') # Labels ax.set_title('max_depth = %s'%(depth)) ax.set_xlabel('Number of Training Points') ax.set_ylabel('Score') ax.set_xlim([0, X.shape[0]*0.8]) ax.set_ylim([-0.05, 1.05]) # Visual aesthetics ax.legend(bbox_to_anchor=(1.05, 2.05), loc='lower left', borderaxespad = 0.) fig.suptitle('Decision Tree Regressor Learning Performances', fontsize = 16, y = 1.03) fig.tight_layout() fig.show() def ModelComplexity(X, y): """ Calculates the performance of the model as model complexity increases. The learning and testing errors rates are then plotted. """ # Create 10 cross-validation sets for training and testing cv = ShuffleSplit(n_splits = 10, test_size = 0.2, random_state = 0) # Vary the max_depth parameter from 1 to 10 max_depth = np.arange(1,11) # Calculate the training and testing scores train_scores, test_scores = validation_curve(DecisionTreeRegressor(), X, y, \ param_name = "max_depth", param_range = max_depth, cv = cv, scoring = 'r2') # Find the mean and standard deviation for smoothing train_mean = np.mean(train_scores, axis=1) train_std = np.std(train_scores, axis=1) test_mean = np.mean(test_scores, axis=1) test_std = np.std(test_scores, axis=1) # Plot the validation curve pl.figure(figsize=(7, 5)) pl.title('Decision Tree Regressor Complexity Performance') pl.plot(max_depth, train_mean, 'o-', color = 'r', label = 'Training Score') pl.plot(max_depth, test_mean, 'o-', color = 'g', label = 'Validation Score') pl.fill_between(max_depth, train_mean - train_std, \ train_mean + train_std, alpha = 0.15, color = 'r') pl.fill_between(max_depth, test_mean - test_std, \ test_mean + test_std, alpha = 0.15, color = 'g') # Visual aesthetics pl.legend(loc = 'lower right') pl.xlabel('Maximum Depth') pl.ylabel('Score') pl.ylim([-0.05,1.05]) pl.show() def PredictTrials(X, y, fitter, data): """ Performs trials of fitting and predicting data. """ # Store the predicted prices prices = [] for k in range(10): # Split the data X_train, X_test, y_train, y_test = train_test_split(X, y, \ test_size = 0.2, random_state = k) # Fit the data reg = fitter(X_train, y_train) # Make a prediction pred = reg.predict([data[0]])[0] prices.append(pred) # Result print "Trial {}: ${:,.2f}".format(k+1, pred) # Display price range print "\nRange in prices: ${:,.2f}".format(max(prices) - min(prices))
mit
ZENGXH/scikit-learn
examples/applications/plot_stock_market.py
227
8284
""" ======================================= Visualizing the stock market structure ======================================= This example employs several unsupervised learning techniques to extract the stock market structure from variations in historical quotes. The quantity that we use is the daily variation in quote price: quotes that are linked tend to cofluctuate during a day. .. _stock_market: Learning a graph structure -------------------------- We use sparse inverse covariance estimation to find which quotes are correlated conditionally on the others. Specifically, sparse inverse covariance gives us a graph, that is a list of connection. For each symbol, the symbols that it is connected too are those useful to explain its fluctuations. Clustering ---------- We use clustering to group together quotes that behave similarly. Here, amongst the :ref:`various clustering techniques <clustering>` available in the scikit-learn, we use :ref:`affinity_propagation` as it does not enforce equal-size clusters, and it can choose automatically the number of clusters from the data. Note that this gives us a different indication than the graph, as the graph reflects conditional relations between variables, while the clustering reflects marginal properties: variables clustered together can be considered as having a similar impact at the level of the full stock market. Embedding in 2D space --------------------- For visualization purposes, we need to lay out the different symbols on a 2D canvas. For this we use :ref:`manifold` techniques to retrieve 2D embedding. Visualization ------------- The output of the 3 models are combined in a 2D graph where nodes represents the stocks and edges the: - cluster labels are used to define the color of the nodes - the sparse covariance model is used to display the strength of the edges - the 2D embedding is used to position the nodes in the plan This example has a fair amount of visualization-related code, as visualization is crucial here to display the graph. One of the challenge is to position the labels minimizing overlap. For this we use an heuristic based on the direction of the nearest neighbor along each axis. """ print(__doc__) # Author: Gael Varoquaux gael.varoquaux@normalesup.org # License: BSD 3 clause import datetime import numpy as np import matplotlib.pyplot as plt from matplotlib import finance from matplotlib.collections import LineCollection from sklearn import cluster, covariance, manifold ############################################################################### # Retrieve the data from Internet # Choose a time period reasonnably calm (not too long ago so that we get # high-tech firms, and before the 2008 crash) d1 = datetime.datetime(2003, 1, 1) d2 = datetime.datetime(2008, 1, 1) # kraft symbol has now changed from KFT to MDLZ in yahoo symbol_dict = { 'TOT': 'Total', 'XOM': 'Exxon', 'CVX': 'Chevron', 'COP': 'ConocoPhillips', 'VLO': 'Valero Energy', 'MSFT': 'Microsoft', 'IBM': 'IBM', 'TWX': 'Time Warner', 'CMCSA': 'Comcast', 'CVC': 'Cablevision', 'YHOO': 'Yahoo', 'DELL': 'Dell', 'HPQ': 'HP', 'AMZN': 'Amazon', 'TM': 'Toyota', 'CAJ': 'Canon', 'MTU': 'Mitsubishi', 'SNE': 'Sony', 'F': 'Ford', 'HMC': 'Honda', 'NAV': 'Navistar', 'NOC': 'Northrop Grumman', 'BA': 'Boeing', 'KO': 'Coca Cola', 'MMM': '3M', 'MCD': 'Mc Donalds', 'PEP': 'Pepsi', 'MDLZ': 'Kraft Foods', 'K': 'Kellogg', 'UN': 'Unilever', 'MAR': 'Marriott', 'PG': 'Procter Gamble', 'CL': 'Colgate-Palmolive', 'GE': 'General Electrics', 'WFC': 'Wells Fargo', 'JPM': 'JPMorgan Chase', 'AIG': 'AIG', 'AXP': 'American express', 'BAC': 'Bank of America', 'GS': 'Goldman Sachs', 'AAPL': 'Apple', 'SAP': 'SAP', 'CSCO': 'Cisco', 'TXN': 'Texas instruments', 'XRX': 'Xerox', 'LMT': 'Lookheed Martin', 'WMT': 'Wal-Mart', 'WBA': 'Walgreen', 'HD': 'Home Depot', 'GSK': 'GlaxoSmithKline', 'PFE': 'Pfizer', 'SNY': 'Sanofi-Aventis', 'NVS': 'Novartis', 'KMB': 'Kimberly-Clark', 'R': 'Ryder', 'GD': 'General Dynamics', 'RTN': 'Raytheon', 'CVS': 'CVS', 'CAT': 'Caterpillar', 'DD': 'DuPont de Nemours'} symbols, names = np.array(list(symbol_dict.items())).T quotes = [finance.quotes_historical_yahoo(symbol, d1, d2, asobject=True) for symbol in symbols] open = np.array([q.open for q in quotes]).astype(np.float) close = np.array([q.close for q in quotes]).astype(np.float) # The daily variations of the quotes are what carry most information variation = close - open ############################################################################### # Learn a graphical structure from the correlations edge_model = covariance.GraphLassoCV() # standardize the time series: using correlations rather than covariance # is more efficient for structure recovery X = variation.copy().T X /= X.std(axis=0) edge_model.fit(X) ############################################################################### # Cluster using affinity propagation _, labels = cluster.affinity_propagation(edge_model.covariance_) n_labels = labels.max() for i in range(n_labels + 1): print('Cluster %i: %s' % ((i + 1), ', '.join(names[labels == i]))) ############################################################################### # Find a low-dimension embedding for visualization: find the best position of # the nodes (the stocks) on a 2D plane # We use a dense eigen_solver to achieve reproducibility (arpack is # initiated with random vectors that we don't control). In addition, we # use a large number of neighbors to capture the large-scale structure. node_position_model = manifold.LocallyLinearEmbedding( n_components=2, eigen_solver='dense', n_neighbors=6) embedding = node_position_model.fit_transform(X.T).T ############################################################################### # Visualization plt.figure(1, facecolor='w', figsize=(10, 8)) plt.clf() ax = plt.axes([0., 0., 1., 1.]) plt.axis('off') # Display a graph of the partial correlations partial_correlations = edge_model.precision_.copy() d = 1 / np.sqrt(np.diag(partial_correlations)) partial_correlations *= d partial_correlations *= d[:, np.newaxis] non_zero = (np.abs(np.triu(partial_correlations, k=1)) > 0.02) # Plot the nodes using the coordinates of our embedding plt.scatter(embedding[0], embedding[1], s=100 * d ** 2, c=labels, cmap=plt.cm.spectral) # Plot the edges start_idx, end_idx = np.where(non_zero) #a sequence of (*line0*, *line1*, *line2*), where:: # linen = (x0, y0), (x1, y1), ... (xm, ym) segments = [[embedding[:, start], embedding[:, stop]] for start, stop in zip(start_idx, end_idx)] values = np.abs(partial_correlations[non_zero]) lc = LineCollection(segments, zorder=0, cmap=plt.cm.hot_r, norm=plt.Normalize(0, .7 * values.max())) lc.set_array(values) lc.set_linewidths(15 * values) ax.add_collection(lc) # Add a label to each node. The challenge here is that we want to # position the labels to avoid overlap with other labels for index, (name, label, (x, y)) in enumerate( zip(names, labels, embedding.T)): dx = x - embedding[0] dx[index] = 1 dy = y - embedding[1] dy[index] = 1 this_dx = dx[np.argmin(np.abs(dy))] this_dy = dy[np.argmin(np.abs(dx))] if this_dx > 0: horizontalalignment = 'left' x = x + .002 else: horizontalalignment = 'right' x = x - .002 if this_dy > 0: verticalalignment = 'bottom' y = y + .002 else: verticalalignment = 'top' y = y - .002 plt.text(x, y, name, size=10, horizontalalignment=horizontalalignment, verticalalignment=verticalalignment, bbox=dict(facecolor='w', edgecolor=plt.cm.spectral(label / float(n_labels)), alpha=.6)) plt.xlim(embedding[0].min() - .15 * embedding[0].ptp(), embedding[0].max() + .10 * embedding[0].ptp(),) plt.ylim(embedding[1].min() - .03 * embedding[1].ptp(), embedding[1].max() + .03 * embedding[1].ptp()) plt.show()
bsd-3-clause
ifduyue/sentry
src/sentry/filters/manager.py
10
1101
from __future__ import absolute_import, print_function __all__ = ['FilterManager', 'FilterNotRegistered'] import six class FilterNotRegistered(Exception): pass # TODO(dcramer): a lot of these managers are very similar and should abstracted # into some kind of base class class FilterManager(object): def __init__(self): self.__values = {} def __iter__(self): return six.itervalues(self.__values) def all(self): return iter(self) def get(self, id): try: cls = self.__values[id] except KeyError: raise FilterNotRegistered(id) return cls def exists(self, id): return id in self.__values def register(self, cls): self.__values[cls.id] = cls def unregister(self, cls): try: if self.__values[cls.id] != cls: # dont allow unregistering of arbitrary provider raise FilterNotRegistered(cls.id) except KeyError: # we gracefully handle a missing provider return del self.__values[cls.id]
bsd-3-clause
redhat-openstack/django_openstack_auth
openstack_auth/backend.py
2
10356
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module defining the Django auth backend class for the Keystone API. """ import datetime import logging import pytz from django.conf import settings from django.utils.translation import ugettext_lazy as _ from keystoneclient import exceptions as keystone_exceptions from openstack_auth import exceptions from openstack_auth import user as auth_user from openstack_auth import utils LOG = logging.getLogger(__name__) KEYSTONE_CLIENT_ATTR = "_keystoneclient" class KeystoneBackend(object): """Django authentication backend for use with ``django.contrib.auth``.""" def __init__(self): self._auth_plugins = None @property def auth_plugins(self): if self._auth_plugins is None: plugins = getattr( settings, 'AUTHENTICATION_PLUGINS', ['openstack_auth.plugin.password.PasswordPlugin', 'openstack_auth.plugin.token.TokenPlugin']) self._auth_plugins = [utils.import_string(p)() for p in plugins] return self._auth_plugins def check_auth_expiry(self, auth_ref, margin=None): if not utils.is_token_valid(auth_ref, margin): msg = _("The authentication token issued by the Identity service " "has expired.") LOG.warning("The authentication token issued by the Identity " "service appears to have expired before it was " "issued. This may indicate a problem with either your " "server or client configuration.") raise exceptions.KeystoneAuthException(msg) return True def get_user(self, user_id): """Returns the current user from the session data. If authenticated, this return the user object based on the user ID and session data. Note: this required monkey-patching the ``contrib.auth`` middleware to make the ``request`` object available to the auth backend class. """ if (hasattr(self, 'request') and user_id == self.request.session["user_id"]): token = self.request.session['token'] endpoint = self.request.session['region_endpoint'] services_region = self.request.session['services_region'] user = auth_user.create_user_from_token(self.request, token, endpoint, services_region) return user else: return None def authenticate(self, auth_url=None, **kwargs): """Authenticates a user via the Keystone Identity API.""" LOG.debug('Beginning user authentication') if not auth_url: auth_url = settings.OPENSTACK_KEYSTONE_URL auth_url = utils.fix_auth_url_version(auth_url) for plugin in self.auth_plugins: unscoped_auth = plugin.get_plugin(auth_url=auth_url, **kwargs) if unscoped_auth: break else: msg = _('No authentication backend could be determined to ' 'handle the provided credentials.') LOG.warn('No authentication backend could be determined to ' 'handle the provided credentials. This is likely a ' 'configuration error that should be addressed.') raise exceptions.KeystoneAuthException(msg) session = utils.get_session() keystone_client_class = utils.get_keystone_client().Client try: unscoped_auth_ref = unscoped_auth.get_access(session) except keystone_exceptions.ConnectionRefused as exc: LOG.error(str(exc)) msg = _('Unable to establish connection to keystone endpoint.') raise exceptions.KeystoneAuthException(msg) except (keystone_exceptions.Unauthorized, keystone_exceptions.Forbidden, keystone_exceptions.NotFound) as exc: LOG.debug(str(exc)) raise exceptions.KeystoneAuthException(_('Invalid credentials.')) except (keystone_exceptions.ClientException, keystone_exceptions.AuthorizationFailure) as exc: msg = _("An error occurred authenticating. " "Please try again later.") LOG.debug(str(exc)) raise exceptions.KeystoneAuthException(msg) # Check expiry for our unscoped auth ref. self.check_auth_expiry(unscoped_auth_ref) projects = plugin.list_projects(session, unscoped_auth, unscoped_auth_ref) # Attempt to scope only to enabled projects projects = [project for project in projects if project.enabled] # Abort if there are no projects for this user if not projects: msg = _('You are not authorized for any projects.') raise exceptions.KeystoneAuthException(msg) # the recent project id a user might have set in a cookie recent_project = None request = kwargs.get('request') if request: # Grab recent_project found in the cookie, try to scope # to the last project used. recent_project = request.COOKIES.get('recent_project') # if a most recent project was found, try using it first if recent_project: for pos, project in enumerate(projects): if project.id == recent_project: # move recent project to the beginning projects.pop(pos) projects.insert(0, project) break for project in projects: token = unscoped_auth_ref.auth_token scoped_auth = utils.get_token_auth_plugin(auth_url, token=token, project_id=project.id) try: scoped_auth_ref = scoped_auth.get_access(session) except (keystone_exceptions.ClientException, keystone_exceptions.AuthorizationFailure): pass else: break else: msg = _("Unable to authenticate to any available projects.") raise exceptions.KeystoneAuthException(msg) # Check expiry for our new scoped token. self.check_auth_expiry(scoped_auth_ref) interface = getattr(settings, 'OPENSTACK_ENDPOINT_TYPE', 'public') # If we made it here we succeeded. Create our User! unscoped_token = unscoped_auth_ref.auth_token user = auth_user.create_user_from_token( request, auth_user.Token(scoped_auth_ref, unscoped_token=unscoped_token), scoped_auth_ref.service_catalog.url_for(endpoint_type=interface)) if request is not None: request.session['unscoped_token'] = unscoped_token request.user = user timeout = getattr(settings, "SESSION_TIMEOUT", 3600) token_life = user.token.expires - datetime.datetime.now(pytz.utc) session_time = min(timeout, token_life.seconds) request.session.set_expiry(session_time) scoped_client = keystone_client_class(session=session, auth=scoped_auth) # Support client caching to save on auth calls. setattr(request, KEYSTONE_CLIENT_ATTR, scoped_client) LOG.debug('Authentication completed.') return user def get_group_permissions(self, user, obj=None): """Returns an empty set since Keystone doesn't support "groups".""" # Keystone V3 added "groups". The Auth token response includes the # roles from the user's Group assignment. It should be fine just # returning an empty set here. return set() def get_all_permissions(self, user, obj=None): """Returns a set of permission strings that the user has. This permission available to the user is derived from the user's Keystone "roles". The permissions are returned as ``"openstack.{{ role.name }}"``. """ if user.is_anonymous() or obj is not None: return set() # TODO(gabrielhurley): Integrate policy-driven RBAC # when supported by Keystone. role_perms = set(["openstack.roles.%s" % role['name'].lower() for role in user.roles]) services = [] for service in user.service_catalog: try: service_type = service['type'] except KeyError: continue service_regions = [utils.get_endpoint_region(endpoint) for endpoint in service.get('endpoints', [])] if user.services_region in service_regions: services.append(service_type.lower()) service_perms = set(["openstack.services.%s" % service for service in services]) return role_perms | service_perms def has_perm(self, user, perm, obj=None): """Returns True if the given user has the specified permission.""" if not user.is_active: return False return perm in self.get_all_permissions(user, obj) def has_module_perms(self, user, app_label): """Returns True if user has any permissions in the given app_label. Currently this matches for the app_label ``"openstack"``. """ if not user.is_active: return False for perm in self.get_all_permissions(user): if perm[:perm.index('.')] == app_label: return True return False
apache-2.0
buddylindsey/photo-blogger
photob/settings.py
1
6132
# Django settings for photob project. import os ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = lambda *a: os.path.join(ROOT, *a) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS from postgresify import postgresify DATABASES = postgresify() try: if(os.environ['TRAVIS'] == 'true' ): DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "photob", "USER": "postgres", "PASSWORD": "", "HOST": "localhost", "PORT": "", } } except: pass # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = 'http://dash-media.s3.amazonaws.com' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. "%s/static" % path('.'), ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '8lt)s)y@k&amp;gy-7j6&amp;ds3nm2ho$a&amp;c6mh=ak*-w%lrwu-lfq$%k' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'photob.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'photob.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. "%s/templates" % path('.'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'social_auth', 'accounts', 'image', 'home', 'storages' ) AWS_STORAGE_BUCKET_NAME = 'dash-media' STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage' DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' # django-social-auth settings AUTHENTICATION_BACKENDS = ( 'social_auth.backends.facebook.FacebookBackend', 'django.contrib.auth.backends.ModelBackend', ) SOCIAL_AUTH_ENABLED_BACKENDS = ('github',) SOCIAL_AUTH_DEFAULT_USERNAME = 'new_social_auth_user' FACEBOOK_APP_ID = os.environ['FACEBOOK_APP_ID'] FACEBOOK_API_SECRET = os.environ['FACEBOOK_APP_SECRET'] FACEBOOK_EXTENDED_PERMISSIONS = ['email'] LOGIN_URL = '/accounts/login/' LOGIN_REDIRECT_URL = '/' LOGIN_ERROR_URL = '/accounts/login-error/' # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } try: from local_settings import * except ImportError: pass
bsd-2-clause
laplaceliu/readthedocs.org
readthedocs/builds/filters.py
33
1312
from django.utils.translation import ugettext_lazy as _ import django_filters from readthedocs.builds import constants from readthedocs.builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionSlugFilter(django_filters.FilterSet): class Meta: model = Version fields = { 'identifier': ['icontains'], 'slug': ['icontains'], } class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__slug') # Allow filtering on slug= or version= slug = django_filters.CharFilter(label=_("Name"), name='slug', lookup_type='exact') version = django_filters.CharFilter(label=_("Version"), name='slug', lookup_type='exact') class Meta: model = Version fields = ['project', 'slug', 'version'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date", lookup_type='range') type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build fields = ['type', 'date', 'success']
mit
yuchangfu/pythonfun
flaskenv/Lib/site-packages/pip/_vendor/html5lib/trie/py.py
1323
1775
from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type from bisect import bisect_left from ._base import Trie as ABCTrie class Trie(ABCTrie): def __init__(self, data): if not all(isinstance(x, text_type) for x in data.keys()): raise TypeError("All keys must be strings") self._data = data self._keys = sorted(data.keys()) self._cachestr = "" self._cachepoints = (0, len(data)) def __contains__(self, key): return key in self._data def __len__(self): return len(self._data) def __iter__(self): return iter(self._data) def __getitem__(self, key): return self._data[key] def keys(self, prefix=None): if prefix is None or prefix == "" or not self._keys: return set(self._keys) if prefix.startswith(self._cachestr): lo, hi = self._cachepoints start = i = bisect_left(self._keys, prefix, lo, hi) else: start = i = bisect_left(self._keys, prefix) keys = set() if start == len(self._keys): return keys while self._keys[i].startswith(prefix): keys.add(self._keys[i]) i += 1 self._cachestr = prefix self._cachepoints = (start, i) return keys def has_keys_with_prefix(self, prefix): if prefix in self._data: return True if prefix.startswith(self._cachestr): lo, hi = self._cachepoints i = bisect_left(self._keys, prefix, lo, hi) else: i = bisect_left(self._keys, prefix) if i == len(self._keys): return False return self._keys[i].startswith(prefix)
gpl-3.0
edgarRd/incubator-airflow
airflow/contrib/operators/bigquery_to_gcs.py
7
4491
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from airflow.contrib.hooks.bigquery_hook import BigQueryHook from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults class BigQueryToCloudStorageOperator(BaseOperator): """ Transfers a BigQuery table to a Google Cloud Storage bucket. .. seealso:: For more details about these parameters: https://cloud.google.com/bigquery/docs/reference/v2/jobs :param source_project_dataset_table: The dotted (<project>.|<project>:)<dataset>.<table> BigQuery table to use as the source data. If <project> is not included, project will be the project defined in the connection json. (templated) :type source_project_dataset_table: string :param destination_cloud_storage_uris: The destination Google Cloud Storage URI (e.g. gs://some-bucket/some-file.txt). (templated) Follows convention defined here: https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple :type destination_cloud_storage_uris: list :param compression: Type of compression to use. :type compression: string :param export_format: File format to export. :type field_delimiter: string :param field_delimiter: The delimiter to use when extracting to a CSV. :type field_delimiter: string :param print_header: Whether to print a header for a CSV file extract. :type print_header: boolean :param bigquery_conn_id: reference to a specific BigQuery hook. :type bigquery_conn_id: string :param delegate_to: The account to impersonate, if any. For this to work, the service account making the request must have domain-wide delegation enabled. :type delegate_to: string :param labels: a dictionary containing labels for the job/query, passed to BigQuery :type labels: dict """ template_fields = ('source_project_dataset_table', 'destination_cloud_storage_uris', 'labels') template_ext = ('.sql',) ui_color = '#e4e6f0' @apply_defaults def __init__(self, source_project_dataset_table, destination_cloud_storage_uris, compression='NONE', export_format='CSV', field_delimiter=',', print_header=True, bigquery_conn_id='bigquery_default', delegate_to=None, labels=None, *args, **kwargs): super(BigQueryToCloudStorageOperator, self).__init__(*args, **kwargs) self.source_project_dataset_table = source_project_dataset_table self.destination_cloud_storage_uris = destination_cloud_storage_uris self.compression = compression self.export_format = export_format self.field_delimiter = field_delimiter self.print_header = print_header self.bigquery_conn_id = bigquery_conn_id self.delegate_to = delegate_to self.labels = labels def execute(self, context): self.log.info('Executing extract of %s into: %s', self.source_project_dataset_table, self.destination_cloud_storage_uris) hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to) conn = hook.get_conn() cursor = conn.cursor() cursor.run_extract( self.source_project_dataset_table, self.destination_cloud_storage_uris, self.compression, self.export_format, self.field_delimiter, self.print_header, self.labels)
apache-2.0
scripnichenko/nova
nova/tests/unit/api/openstack/compute/test_admin_actions.py
35
3992
# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.openstack.compute import admin_actions as admin_actions_v21 from nova.api.openstack.compute.legacy_v2.contrib import admin_actions \ as admin_actions_v2 from nova import exception from nova import test from nova.tests.unit.api.openstack.compute import admin_only_action_common from nova.tests.unit.api.openstack import fakes class AdminActionsTestV21(admin_only_action_common.CommonTests): admin_actions = admin_actions_v21 _api_version = '2.1' def setUp(self): super(AdminActionsTestV21, self).setUp() self.controller = self.admin_actions.AdminActionsController() self.compute_api = self.controller.compute_api def _fake_controller(*args, **kwargs): return self.controller self.stubs.Set(self.admin_actions, 'AdminActionsController', _fake_controller) self.mox.StubOutWithMock(self.compute_api, 'get') def test_actions(self): actions = ['_reset_network', '_inject_network_info'] method_translations = {'_reset_network': 'reset_network', '_inject_network_info': 'inject_network_info'} self._test_actions(actions, method_translations) def test_actions_with_non_existed_instance(self): actions = ['_reset_network', '_inject_network_info'] self._test_actions_with_non_existed_instance(actions) def test_actions_with_locked_instance(self): actions = ['_reset_network', '_inject_network_info'] method_translations = {'_reset_network': 'reset_network', '_inject_network_info': 'inject_network_info'} self._test_actions_with_locked_instance(actions, method_translations=method_translations) class AdminActionsTestV2(AdminActionsTestV21): admin_actions = admin_actions_v2 _api_version = '2' class AdminActionsPolicyEnforcementV21(test.NoDBTestCase): def setUp(self): super(AdminActionsPolicyEnforcementV21, self).setUp() self.controller = admin_actions_v21.AdminActionsController() self.req = fakes.HTTPRequest.blank('') self.fake_id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' def common_policy_check(self, rule, fun_name, *arg, **kwarg): self.policy.set_rules(rule) func = getattr(self.controller, fun_name) exc = self.assertRaises( exception.PolicyNotAuthorized, func, *arg, **kwarg) self.assertEqual( "Policy doesn't allow %s to be performed." % rule.popitem()[0], exc.format_message()) def test_reset_network_policy_failed(self): rule = {"os_compute_api:os-admin-actions:reset_network": "project:non_fake"} self.common_policy_check( rule, "_reset_network", self.req, self.fake_id, body={}) def test_inject_network_info_policy_failed(self): rule = {"os_compute_api:os-admin-actions:inject_network_info": "project:non_fake"} self.common_policy_check( rule, "_inject_network_info", self.req, self.fake_id, body={}) def test_reset_state_policy_failed(self): rule = {"os_compute_api:os-admin-actions:reset_state": "project:non_fake"} self.common_policy_check( rule, "_reset_state", self.req, self.fake_id, body={"os-resetState": {"state": "active"}})
apache-2.0
pattisdr/osf.io
api_tests/comments/views/test_comment_report_detail.py
17
16596
from django.utils import timezone import mock import pytest from addons.wiki.tests.factories import WikiFactory from api.base.settings.defaults import API_BASE from api_tests import utils as test_utils from osf.models import Guid from osf_tests.factories import ( ProjectFactory, AuthUserFactory, CommentFactory, ) @pytest.mark.django_db class ReportDetailViewMixin(object): @pytest.fixture() def user(self): return AuthUserFactory() @pytest.fixture() def contributor(self): return AuthUserFactory() @pytest.fixture() def non_contrib(self): return AuthUserFactory() @pytest.fixture() def payload(self, user): return { 'data': { 'id': user._id, 'type': 'comment_reports', 'attributes': { 'category': 'spam', 'message': 'Spam is delicious.' } } } # check if all necessary features are setup in subclass @pytest.fixture() def private_project(self): raise NotImplementedError @pytest.fixture() def comment(self): raise NotImplementedError @pytest.fixture() def private_url(self): raise NotImplementedError @pytest.fixture() def public_project(self): raise NotImplementedError @pytest.fixture() def public_comment(self): raise NotImplementedError @pytest.fixture() def public_url(self): raise NotImplementedError def test_private_node_view_report_detail_auth_misc( self, app, user, contributor, non_contrib, private_url): # test_private_node_reporting_contributor_can_view_report_detail res = app.get(private_url, auth=user.auth) assert res.status_code == 200 assert res.json['data']['id'] == user._id # test_private_node_reported_contributor_cannot_view_report_detail res = app.get(private_url, auth=contributor.auth, expect_errors=True) assert res.status_code == 403 # test_private_node_logged_in_non_contrib_cannot_view_report_detail res = app.get(private_url, auth=non_contrib.auth, expect_errors=True) assert res.status_code == 403 # test_private_node_logged_out_contributor_cannot_view_report_detail res = app.get(private_url, expect_errors=True) assert res.status_code == 401 def test_public_node_view_report_detail_auth_misc( self, app, user, contributor, non_contrib, public_url): # test_public_node_reporting_contributor_can_view_report_detail res = app.get(public_url, auth=user.auth) assert res.status_code == 200 assert res.json['data']['id'] == user._id # test_public_node_reported_contributor_cannot_view_report_detail res = app.get(public_url, auth=contributor.auth, expect_errors=True) assert res.status_code == 403 # test_public_node_logged_in_non_contrib_cannot_view_other_users_report_detail res = app.get(public_url, auth=non_contrib.auth, expect_errors=True) assert res.status_code == 403 # test_public_node_logged_out_contributor_cannot_view_report_detail res = app.get(public_url, expect_errors=True) assert res.status_code == 401 def test_public_node_logged_in_non_contrib_reporter_can_view_own_report_detail( self, app, non_contrib, public_comment): public_comment.reports[non_contrib._id] = { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, } public_comment.save() url = '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, non_contrib._id) res = app.get(url, auth=non_contrib.auth) assert res.status_code == 200 def test_private_node_update_report_detail_auth_misc( self, app, user, contributor, non_contrib, payload, private_url): # test_private_node_reported_contributor_cannot_update_report_detail res = app.put_json_api( private_url, payload, auth=contributor.auth, expect_errors=True ) assert res.status_code == 403 # test_private_node_logged_in_non_contrib_cannot_update_report_detail res = app.put_json_api( private_url, payload, auth=non_contrib.auth, expect_errors=True ) assert res.status_code == 403 # test_private_node_logged_out_contributor_cannot_update_detail res = app.put_json_api(private_url, payload, expect_errors=True) assert res.status_code == 401 # test_private_node_reporting_contributor_can_update_report_detail res = app.put_json_api(private_url, payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['id'] == user._id assert res.json['data']['attributes']['message'] == payload['data']['attributes']['message'] def test_public_node_update_report_detail_auth_misc( self, app, user, contributor, non_contrib, payload, public_url): # test_public_node_reported_contributor_cannot_update_detail res = app.put_json_api( public_url, payload, auth=contributor.auth, expect_errors=True ) assert res.status_code == 403 # test_public_node_logged_in_non_contrib_cannot_update_other_users_report_detail res = app.put_json_api( public_url, payload, auth=non_contrib.auth, expect_errors=True ) assert res.status_code == 403 # test_public_node_logged_out_contributor_cannot_update_report_detail res = app.put_json_api(public_url, payload, expect_errors=True) assert res.status_code == 401 # test_public_node_reporting_contributor_can_update_detail res = app.put_json_api(public_url, payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['id'] == user._id assert res.json['data']['attributes']['message'] == payload['data']['attributes']['message'] def test_public_node_logged_in_non_contrib_reporter_can_update_own_report_detail( self, app, non_contrib, public_comment): public_comment.reports[non_contrib._id] = { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, } public_comment.save() url = '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, non_contrib._id) payload = { 'data': { 'id': non_contrib._id, 'type': 'comment_reports', 'attributes': { 'category': 'spam', 'message': 'Spam is delicious.' } } } res = app.put_json_api(url, payload, auth=non_contrib.auth) assert res.status_code == 200 assert res.json['data']['attributes']['message'] == payload['data']['attributes']['message'] def test_private_node_delete_report_detail_auth_misc( self, app, user, contributor, non_contrib, private_project, private_url, comment ): # test_private_node_reported_contributor_cannot_delete_report_detail res = app.delete_json_api( private_url, auth=contributor.auth, expect_errors=True ) assert res.status_code == 403 # test_private_node_logged_in_non_contrib_cannot_delete_report_detail res = app.delete_json_api( private_url, auth=non_contrib.auth, expect_errors=True ) assert res.status_code == 403 # test_private_node_logged_out_contributor_cannot_delete_detail res = app.delete_json_api(private_url, expect_errors=True) assert res.status_code == 401 # test_private_node_reporting_contributor_can_delete_report_detail comment_new = CommentFactory.build( node=private_project, user=contributor, target=comment.target ) comment_new.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} comment_new.save() url = '/{}comments/{}/reports/{}/'.format( API_BASE, comment_new._id, user._id) res = app.delete_json_api(url, auth=user.auth) assert res.status_code == 204 def test_public_node_delete_report_detail_auth_misc( self, app, user, contributor, non_contrib, public_url): # test_public_node_reported_contributor_cannot_delete_detail res = app.delete_json_api( public_url, auth=contributor.auth, expect_errors=True) assert res.status_code == 403 # test_public_node_logged_in_non_contrib_cannot_delete_other_users_report_detail res = app.delete_json_api( public_url, auth=non_contrib.auth, expect_errors=True) assert res.status_code == 403 # test_public_node_logged_out_contributor_cannot_delete_report_detail res = app.delete_json_api(public_url, expect_errors=True) assert res.status_code == 401 # test_public_node_reporting_contributor_can_delete_detail res = app.delete_json_api(public_url, auth=user.auth) assert res.status_code == 204 def test_public_node_logged_in_non_contrib_reporter_can_delete_own_report_detail( self, app, non_contrib, public_comment): public_comment.reports[non_contrib._id] = { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, } public_comment.save() url = '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, non_contrib._id) res = app.delete_json_api(url, auth=non_contrib.auth) assert res.status_code == 204 class TestReportDetailView(ReportDetailViewMixin): # private_project_comment_reports @pytest.fixture() def private_project(self, user, contributor): private_project = ProjectFactory.create(is_public=False, creator=user) private_project.add_contributor(contributor=contributor, save=True) return private_project @pytest.fixture() def comment(self, user, contributor, private_project): comment = CommentFactory(node=private_project, user=contributor) comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} comment.save() return comment @pytest.fixture() def private_url(self, user, comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, comment._id, user._id) # public_project_comment_reports @pytest.fixture() def public_project(self, user, contributor): public_project = ProjectFactory.create(is_public=True, creator=user) public_project.add_contributor(contributor=contributor, save=True) return public_project @pytest.fixture() def public_comment(self, user, contributor, public_project): public_comment = CommentFactory(node=public_project, user=contributor) public_comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} public_comment.save() return public_comment @pytest.fixture() def public_url(self, user, public_comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, user._id) class TestFileCommentReportDetailView(ReportDetailViewMixin): # private_project_comment_reports @pytest.fixture() def private_project(self, user, contributor): private_project = ProjectFactory.create(is_public=False, creator=user) private_project.add_contributor(contributor=contributor, save=True) return private_project @pytest.fixture() def file(self, user, private_project): return test_utils.create_test_file(private_project, user) @pytest.fixture() def comment(self, user, contributor, private_project, file): comment = CommentFactory( node=private_project, target=file.get_guid(), user=contributor) comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} comment.save() return comment @pytest.fixture() def private_url(self, user, comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, comment._id, user._id) # public_project_comment_reports @pytest.fixture() def public_project(self, user, contributor): public_project = ProjectFactory.create(is_public=True, creator=user) public_project.add_contributor(contributor=contributor, save=True) return public_project @pytest.fixture() def public_file(self, user, public_project): return test_utils.create_test_file(public_project, user) @pytest.fixture() def public_comment(self, user, contributor, public_project, public_file): public_comment = CommentFactory( node=public_project, target=public_file.get_guid(), user=contributor) public_comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} public_comment.save() return public_comment @pytest.fixture() def public_url(self, user, public_comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, user._id) class TestWikiCommentReportDetailView(ReportDetailViewMixin): # private_project_comment_reports @pytest.fixture() def private_project(self, user, contributor): private_project = ProjectFactory.create(is_public=False, creator=user) private_project.add_contributor(contributor=contributor, save=True) return private_project @pytest.fixture() def wiki(self, user, private_project): with mock.patch('osf.models.AbstractNode.update_search'): return WikiFactory( user=user, node=private_project, ) @pytest.fixture() def comment(self, user, contributor, private_project, wiki): comment = CommentFactory( node=private_project, target=Guid.load(wiki._id), user=contributor ) comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} comment.save() return comment @pytest.fixture() def private_url(self, user, comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, comment._id, user._id) # public_project_comment_reports @pytest.fixture() def public_project(self, user, contributor): public_project = ProjectFactory.create(is_public=True, creator=user) public_project.add_contributor(contributor=contributor, save=True) return public_project @pytest.fixture() def public_wiki(self, user, public_project): with mock.patch('osf.models.AbstractNode.update_search'): return WikiFactory( user=user, node=public_project, ) @pytest.fixture() def public_comment(self, user, contributor, public_project, public_wiki): public_comment = CommentFactory( node=public_project, target=Guid.load(public_wiki._id), user=contributor ) public_comment.reports = {user._id: { 'category': 'spam', 'text': 'This is spam', 'date': timezone.now(), 'retracted': False, }} public_comment.save() return public_comment @pytest.fixture() def public_url(self, user, public_comment): return '/{}comments/{}/reports/{}/'.format( API_BASE, public_comment._id, user._id)
apache-2.0
lyft/incubator-airflow
airflow/contrib/hooks/gcs_hook.py
5
1514
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This module is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs`. """ import warnings from airflow.providers.google.cloud.hooks.gcs import GCSHook warnings.warn( "This module is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs`.", DeprecationWarning, stacklevel=2 ) class GoogleCloudStorageHook(GCSHook): """ This class is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs.GCSHook`. """ def __init__(self, *args, **kwargs): warnings.warn( "This class is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs.GCSHook`.", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs)
apache-2.0
jjas0nn/solvem
tensorflow/lib/python2.7/site-packages/setuptools/sandbox.py
86
14317
import os import sys import tempfile import operator import functools import itertools import re import contextlib import pickle import six from six.moves import builtins, map import pkg_resources if sys.platform.startswith('java'): import org.python.modules.posix.PosixModule as _os else: _os = sys.modules[os.name] try: _file = file except NameError: _file = None _open = open from distutils.errors import DistutilsError from pkg_resources import working_set __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. """ mode = 'rb' with open(filename, mode) as stream: script = stream.read() # compile() function in Python 2.6 and 3.1 requires LF line endings. if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2): script = script.replace(b'\r\n', b'\n') script = script.replace(b'\r', b'\n') if locals is None: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals) @contextlib.contextmanager def save_argv(repl=None): saved = sys.argv[:] if repl is not None: sys.argv[:] = repl try: yield saved finally: sys.argv[:] = saved @contextlib.contextmanager def save_path(): saved = sys.path[:] try: yield saved finally: sys.path[:] = saved @contextlib.contextmanager def override_temp(replacement): """ Monkey-patch tempfile.tempdir with replacement, ensuring it exists """ if not os.path.isdir(replacement): os.makedirs(replacement) saved = tempfile.tempdir tempfile.tempdir = replacement try: yield finally: tempfile.tempdir = saved @contextlib.contextmanager def pushd(target): saved = os.getcwd() os.chdir(target) try: yield saved finally: os.chdir(saved) class UnpickleableException(Exception): """ An exception representing another Exception that could not be pickled. """ @staticmethod def dump(type, exc): """ Always return a dumped (pickled) type and exc. If exc can't be pickled, wrap it in UnpickleableException first. """ try: return pickle.dumps(type), pickle.dumps(exc) except Exception: # get UnpickleableException inside the sandbox from setuptools.sandbox import UnpickleableException as cls return cls.dump(cls, cls(repr(exc))) class ExceptionSaver: """ A Context Manager that will save an exception, serialized, and restore it later. """ def __enter__(self): return self def __exit__(self, type, exc, tb): if not exc: return # dump the exception self._saved = UnpickleableException.dump(type, exc) self._tb = tb # suppress the exception return True def resume(self): "restore and re-raise any exception" if '_saved' not in vars(self): return type, exc = map(pickle.loads, self._saved) six.reraise(type, exc, self._tb) @contextlib.contextmanager def save_modules(): """ Context in which imported modules are saved. Translates exceptions internal to the context into the equivalent exception outside the context. """ saved = sys.modules.copy() with ExceptionSaver() as saved_exc: yield saved sys.modules.update(saved) # remove any modules imported since del_modules = ( mod_name for mod_name in sys.modules if mod_name not in saved # exclude any encodings modules. See #285 and not mod_name.startswith('encodings.') ) _clear_modules(del_modules) saved_exc.resume() def _clear_modules(module_names): for mod_name in list(module_names): del sys.modules[mod_name] @contextlib.contextmanager def save_pkg_resources_state(): saved = pkg_resources.__getstate__() try: yield saved finally: pkg_resources.__setstate__(saved) @contextlib.contextmanager def setup_context(setup_dir): temp_dir = os.path.join(setup_dir, 'temp') with save_pkg_resources_state(): with save_modules(): hide_setuptools() with save_path(): with save_argv(): with override_temp(temp_dir): with pushd(setup_dir): # ensure setuptools commands are available __import__('setuptools') yield def _needs_hiding(mod_name): """ >>> _needs_hiding('setuptools') True >>> _needs_hiding('pkg_resources') True >>> _needs_hiding('setuptools_plugin') False >>> _needs_hiding('setuptools.__init__') True >>> _needs_hiding('distutils') True >>> _needs_hiding('os') False >>> _needs_hiding('Cython') True """ pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)') return bool(pattern.match(mod_name)) def hide_setuptools(): """ Remove references to setuptools' modules from sys.modules to allow the invocation to import the most appropriate setuptools. This technique is necessary to avoid issues such as #315 where setuptools upgrading itself would fail to find a function declared in the metadata. """ modules = filter(_needs_hiding, sys.modules) _clear_modules(modules) def run_setup(setup_script, args): """Run a distutils setup script, sandboxed in its directory""" setup_dir = os.path.abspath(os.path.dirname(setup_script)) with setup_context(setup_dir): try: sys.argv[:] = [setup_script] + list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() working_set.callbacks.append(lambda dist: dist.activate()) # __file__ should be a byte string on Python 2 (#712) dunder_file = ( setup_script if isinstance(setup_script, str) else setup_script.encode(sys.getfilesystemencoding()) ) def runner(): ns = dict(__file__=dunder_file, __name__='__main__') _execfile(setup_script, ns) DirectorySandbox(setup_dir).run(runner) except SystemExit as v: if v.args and v.args[0]: raise # Normal exit, just return class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" _active = False def __init__(self): self._attrs = [ name for name in dir(_os) if not name.startswith('_') and hasattr(self, name) ] def _copy(self, source): for name in self._attrs: setattr(os, name, getattr(source, name)) def run(self, func): """Run 'func' under os sandboxing""" try: self._copy(self) if _file: builtins.file = self._file builtins.open = self._open self._active = True return func() finally: self._active = False if _file: builtins.file = _file builtins.open = _open self._copy(_os) def _mk_dual_path_wrapper(name): original = getattr(_os, name) def wrap(self, src, dst, *args, **kw): if self._active: src, dst = self._remap_pair(name, src, dst, *args, **kw) return original(src, dst, *args, **kw) return wrap for name in ["rename", "link", "symlink"]: if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) def _mk_single_path_wrapper(name, original=None): original = original or getattr(_os, name) def wrap(self, path, *args, **kw): if self._active: path = self._remap_input(name, path, *args, **kw) return original(path, *args, **kw) return wrap if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) for name in [ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", "startfile", "mkfifo", "mknod", "pathconf", "access" ]: if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) def _mk_single_with_return(name): original = getattr(_os, name) def wrap(self, path, *args, **kw): if self._active: path = self._remap_input(name, path, *args, **kw) return self._remap_output(name, original(path, *args, **kw)) return original(path, *args, **kw) return wrap for name in ['readlink', 'tempnam']: if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) def _mk_query(name): original = getattr(_os, name) def wrap(self, *args, **kw): retval = original(*args, **kw) if self._active: return self._remap_output(name, retval) return retval return wrap for name in ['getcwd', 'tmpnam']: if hasattr(_os, name): locals()[name] = _mk_query(name) def _validate_path(self, path): """Called to remap or validate any path, whether input or output""" return path def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" return self._validate_path(path) def _remap_output(self, operation, path): """Called for path outputs""" return self._validate_path(path) def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" return ( self._remap_input(operation + '-from', src, *args, **kw), self._remap_input(operation + '-to', dst, *args, **kw) ) if hasattr(os, 'devnull'): _EXCEPTIONS = [os.devnull,] else: _EXCEPTIONS = [] class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" write_ops = dict.fromkeys([ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", ]) _exception_patterns = [ # Allow lib2to3 to attempt to save a pickled grammar object (#121) r'.*lib2to3.*\.pickle$', ] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): self._sandbox = os.path.normcase(os.path.realpath(sandbox)) self._prefix = os.path.join(self._sandbox, '') self._exceptions = [ os.path.normcase(os.path.realpath(path)) for path in exceptions ] AbstractSandbox.__init__(self) def _violation(self, operation, *args, **kw): from setuptools.sandbox import SandboxViolation raise SandboxViolation(operation, args, kw) if _file: def _file(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("file", path, mode, *args, **kw) return _file(path, mode, *args, **kw) def _open(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("open", path, mode, *args, **kw) return _open(path, mode, *args, **kw) def tmpnam(self): self._violation("tmpnam") def _ok(self, path): active = self._active try: self._active = False realpath = os.path.normcase(os.path.realpath(path)) return ( self._exempted(realpath) or realpath == self._sandbox or realpath.startswith(self._prefix) ) finally: self._active = active def _exempted(self, filepath): start_matches = ( filepath.startswith(exception) for exception in self._exceptions ) pattern_matches = ( re.match(pattern, filepath) for pattern in self._exception_patterns ) candidates = itertools.chain(start_matches, pattern_matches) return any(candidates) def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" if operation in self.write_ops and not self._ok(path): self._violation(operation, os.path.realpath(path), *args, **kw) return path def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" if not self._ok(src) or not self._ok(dst): self._violation(operation, src, dst, *args, **kw) return (src, dst) def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) return _os.open(file, flags, mode, *args, **kw) WRITE_FLAGS = functools.reduce( operator.or_, [getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] ) class SandboxViolation(DistutilsError): """A setup script attempted to modify the filesystem outside the sandbox""" def __str__(self): return """SandboxViolation: %s%r %s The package setup script has attempted to modify files on your system that are not within the EasyInstall build area, and has been aborted. This package cannot be safely installed by EasyInstall, and may not support alternate installation locations even if you run its setup script by hand. Please inform the package's author and the EasyInstall maintainers to find out if a fix or workaround is available.""" % self.args #
mit
303750856/linux-3.1
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
edevil/django
django/contrib/gis/gdal/prototypes/ds.py
10
4282
""" This module houses the ctypes function prototypes for OGR DataSource related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*, OGR_Fld_* routines are relevant here. """ from ctypes import c_char_p, c_double, c_int, c_long, c_void_p, POINTER from django.contrib.gis.gdal.envelope import OGREnvelope from django.contrib.gis.gdal.libgdal import lgdal from django.contrib.gis.gdal.prototypes.generation import (const_string_output, double_output, geom_output, int_output, srs_output, void_output, voidptr_output) c_int_p = POINTER(c_int) # shortcut type ### Driver Routines ### register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False) cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False) get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int]) get_driver_by_name = voidptr_output(lgdal.OGRGetDriverByName, [c_char_p], errcheck=False) get_driver_count = int_output(lgdal.OGRGetDriverCount, []) get_driver_name = const_string_output(lgdal.OGR_Dr_GetName, [c_void_p], decoding='ascii') ### DataSource ### open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)]) destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False) release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p]) get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p]) get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int]) get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p]) get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p]) ### Layer Routines ### get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int]) get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long]) get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int]) get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p]) get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p]) get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p]) reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False) test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p]) get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p]) set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False) set_spatial_filter_rect = void_output(lgdal.OGR_L_SetSpatialFilterRect, [c_void_p, c_double, c_double, c_double, c_double], errcheck=False ) ### Feature Definition Routines ### get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p]) get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p]) get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int]) ### Feature Routines ### clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p]) destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False) feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p]) get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p]) get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p]) get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int]) get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p]) get_field_as_datetime = int_output(lgdal.OGR_F_GetFieldAsDateTime, [c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p] ) get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int]) get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int]) get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int]) get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p]) ### Field Routines ### get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p]) get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p]) get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p]) get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int]) get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
bsd-3-clause