repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
breathe/ansible | lib/ansible/plugins/action/raw.py | 106 | 1665 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
import re
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
if self._play_context.check_mode:
# in --check mode, always skip this module execution
return dict(skipped=True)
executable = self._task.args.get('executable')
result = self._low_level_execute_command(self._task.args.get('_raw_params'), tmp=tmp, executable=executable)
# for some modules (script, raw), the sudo success key
# may leak into the stdout due to the way the sudo/su
# command is constructed, so we filter that out here
if result.get('stdout','').strip().startswith('BECOME-SUCCESS-'):
result['stdout'] = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', result['stdout'])
return result
| gpl-3.0 |
marscher/mdtraj | MDTraj/core/trajectory.py | 1 | 51903 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2014 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors: Kyle A. Beauchamp, TJ Lane, Joshua Adelman, Lee-Ping Wang
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
##############################################################################
# Imports
##############################################################################
from __future__ import print_function, division
import os
import warnings
import functools
from copy import deepcopy
import numpy as np
from mdtraj.formats import DCDTrajectoryFile
from mdtraj.formats import BINPOSTrajectoryFile
from mdtraj.formats import XTCTrajectoryFile
from mdtraj.formats import TRRTrajectoryFile
from mdtraj.formats import HDF5TrajectoryFile
from mdtraj.formats import NetCDFTrajectoryFile
from mdtraj.formats import LH5TrajectoryFile
from mdtraj.formats import PDBTrajectoryFile
from mdtraj.formats import MDCRDTrajectoryFile
from mdtraj.formats import ArcTrajectoryFile
from mdtraj.formats.prmtop import load_prmtop
from mdtraj.core.topology import Topology
from mdtraj.utils import (ensure_type, in_units_of, lengths_and_angles_to_box_vectors,
box_vectors_to_lengths_and_angles, cast_indices)
from mdtraj.utils.six.moves import xrange
from mdtraj.utils.six import PY3, string_types
from mdtraj import _rmsd
from mdtraj import _FormatRegistry
##############################################################################
# Globals
##############################################################################
__all__ = ['open', 'load', 'iterload', 'load_frame', 'Trajectory']
##############################################################################
# Utilities
##############################################################################
def _assert_files_exist(filenames):
"""Throw an IO error if files don't exist
Parameters
----------
filenames : {str, [str]}
String or list of strings to check
"""
if isinstance(filenames, string_types):
filenames = [filenames]
for fn in filenames:
if not (os.path.exists(fn) and os.path.isfile(fn)):
raise IOError('No such file: %s' % fn)
def _parse_topology(top):
"""Get the topology from a argument of indeterminate type
If top is a string, we try loading a pdb, if its a trajectory
we extract its topology.
Returns
-------
topology : md.Topology
"""
try:
ext = os.path.splitext(top)[1]
except:
ext = None # might not be a string
if isinstance(top, string_types) and (ext in ['.pdb', '.h5','.lh5']):
_traj = load_frame(top, 0)
topology = _traj.topology
elif isinstance(top, string_types) and (ext == '.prmtop'):
topology = load_prmtop(top)
elif isinstance(top, Trajectory):
topology = top.topology
elif isinstance(top, Topology):
topology = top
else:
raise TypeError('A topology is required. You supplied top=%s' % str(top))
return topology
##############################################################################
# Utilities
##############################################################################
def open(filename, mode='r', force_overwrite=True, **kwargs):
"""Open a trajectory file-like object
This factor function returns an instance of an open file-like
object capable of reading/writing the trajectory (depending on
'mode'). It does not actually load the trajectory from disk or
write anything.
Parameters
----------
filename : str
Path to the trajectory file on disk
mode : {'r', 'w'}
The mode in which to open the file, either 'r' for read or 'w' for
write.
force_overwrite : bool
If opened in write mode, and a file by the name of `filename` already
exists on disk, should we overwrite it?
Other Parameters
----------------
kwargs : dict
Other keyword parameters are passed directly to the file object
Returns
-------
fileobject : object
Open trajectory file, whose type is determined by the filename
extension
See Also
--------
load, ArcTrajectoryFile, BINPOSTrajectoryFile, DCDTrajectoryFile,
HDF5TrajectoryFile, LH5TrajectoryFile, MDCRDTrajectoryFile,
NetCDFTrajectoryFile, PDBTrajectoryFile, TRRTrajectoryFile,
XTCTrajectoryFile
"""
extension = os.path.splitext(filename)[1]
try:
loader = _FormatRegistry.fileobjects[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files with extensions in %s'
% (filename, extension, _FormatRegistry.fileobjects.keys()))
return loader(filename, mode=mode, force_overwrite=force_overwrite, **kwargs)
def load_frame(filename, index, top=None, atom_indices=None):
"""Load a single frame from a trajectory file
Parameters
----------
filename : str
Path to the trajectory file on disk
index : int
Load the `index`-th frame from the specified file
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information.
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. These indices are zero-based (not 1 based, as used by the PDB
format).
Examples
--------
>>> import mdtraj as md
>>> first_frame = md.load_frame('traj.h5', 0)
>>> print first_frame
<mdtraj.Trajectory with 1 frames, 22 atoms>
See Also
--------
load, load_frame
Returns
-------
trajectory : md.Trajectory
The resulting conformation, as an md.Trajectory object containing
a single frame.
"""
_assert_files_exist(filename)
extension = os.path.splitext(filename)[1]
try:
loader = _FormatRegistry.loaders[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files with extensions in %s'
% (filename, extension, _FormatRegistry.loaders.keys()))
kwargs = {'atom_indices': atom_indices}
if loader.__name__ not in ['load_hdf5', 'load_pdb']:
kwargs['top'] = top
return loader(filename, frame=index, **kwargs)
def load(filename_or_filenames, discard_overlapping_frames=False, **kwargs):
"""Load a trajectory from one or more files on disk.
This function dispatches to one of the specialized trajectory loaders based
on the extension on the filename. Because different trajectory formats save
different information on disk, the specific keyword argument options supported
depend on the specific loaded.
Parameters
----------
filename_or_filenames : {str, list of strings}
Filename or list of filenames containing trajectory files of a single format.
discard_overlapping_frames : bool, default=False
Look for overlapping frames between the last frame of one filename and
the first frame of a subsequent filename and discard them
Other Parameters
----------------
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information. This option is not required for the .h5, .lh5,
and .pdb formats, which already contain topology information.
stride : int, default=None
Only read every stride-th frame
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. This may be slightly slower than the standard read because it
requires an extra copy, but will save memory.
See Also
--------
load_frame, iterload
Examples
--------
>>> import mdtraj as md
>>> traj = md.load('output.xtc', top='topology.pdb')
>>> print traj
<mdtraj.Trajectory with 500 frames, 423 atoms at 0x110740a90>
>>> traj2 = md.load('output.xtc', stride=2, top='topology.pdb')
>>> print traj2
<mdtraj.Trajectory with 250 frames, 423 atoms at 0x11136e410>
>>> traj3 = md.load_hdf5('output.xtc', atom_indices=[0,1] top='topology.pdb')
>>> print traj3
<mdtraj.Trajectory with 500 frames, 2 atoms at 0x18236e4a0>
Returns
-------
trajectory : md.Trajectory
The resulting trajectory, as an md.Trajectory object.
"""
_assert_files_exist(filename_or_filenames)
if "top" in kwargs: # If applicable, pre-loads the topology from PDB for major performance boost.
kwargs["top"] = _parse_topology(kwargs["top"])
# grab the extension of the filename
if isinstance(filename_or_filenames, string_types): # If a single filename
extension = os.path.splitext(filename_or_filenames)[1]
filename = filename_or_filenames
else: # If multiple filenames, take the first one.
extensions = [os.path.splitext(filename_i)[1] for filename_i in filename_or_filenames]
if len(set(extensions)) != 1:
raise(TypeError("All filenames must have same extension!"))
else:
t = [load(f, **kwargs) for f in filename_or_filenames]
# we know the topology is equal because we sent the same topology
# kwarg in, so there's no reason to spend extra time checking
return t[0].join(t[1:], discard_overlapping_frames=discard_overlapping_frames,
check_topology=False)
try:
#loader = _LoaderRegistry[extension][0]
loader = _FormatRegistry.loaders[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files '
'with extensions in %s' % (filename, extension, _FormatRegistry.loaders.keys()))
if loader.__name__ in ['load_hdf5', 'load_pdb', 'load_lh5']:
if 'top' in kwargs:
warnings.warn('top= kwarg ignored since file contains topology information')
# this is a little hack that makes calling load() more predicable. since
# most of the loaders take a kwargs "top" except for load_hdf5, (since
# it saves the topology inside the file), we often end up calling
# load_hdf5 via this function with the top kwarg specified. but then
# there would be a signature binding error. it's easier just to ignore
# it.
kwargs.pop('top', None)
value = loader(filename, **kwargs)
return value
def iterload(filename, chunk=100, **kwargs):
"""An iterator over a trajectory from one or more files on disk, in fragments
This may be more memory efficient than loading an entire trajectory at
once
Parameters
----------
filename : str
Path to the trajectory file on disk
chunk : int
Number of frames to load at once from disk per iteration.
Other Parameters
----------------
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information. This option is not required for the .h5, .lh5,
and .pdb formats, which already contain topology information.
stride : int, default=None
Only read every stride-th frame.
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. This may be slightly slower than the standard read because it
requires an extra copy, but will save memory.
See Also
--------
load, load_frame
Examples
--------
>>> import mdtraj as md
>>> for chunk in md.iterload('output.xtc', top='topology.pdb')
... print chunk
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
"""
stride = kwargs.get('stride', 1)
atom_indices = cast_indices(kwargs.get('atom_indices', None))
if chunk % stride != 0:
raise ValueError('Stride must be a divisor of chunk. stride=%d does not go '
'evenly into chunk=%d' % (stride, chunk))
if filename.endswith('.h5'):
if 'top' in kwargs:
warnings.warn('top= kwarg ignored since file contains topology information')
with HDF5TrajectoryFile(filename) as f:
if atom_indices is None:
topology = f.topology
else:
topology = f.topology.subset(atom_indices)
while True:
data = f.read(chunk*stride, stride=stride, atom_indices=atom_indices)
if data == []:
raise StopIteration()
in_units_of(data.coordinates, f.distance_unit, Trajectory._distance_unit, inplace=True)
in_units_of(data.cell_lengths, f.distance_unit, Trajectory._distance_unit, inplace=True)
yield Trajectory(xyz=data.coordinates, topology=topology,
time=data.time, unitcell_lengths=data.cell_lengths,
unitcell_angles=data.cell_angles)
if filename.endswith('.lh5'):
if 'top' in kwargs:
warnings.warn('top= kwarg ignored since file contains topology information')
with LH5TrajectoryFile(filename) as f:
if atom_indices is None:
topology = f.topology
else:
topology = f.topology.subset(atom_indices)
ptr = 0
while True:
xyz = f.read(chunk*stride, stride=stride, atom_indices=atom_indices)
if len(xyz) == 0:
raise StopIteration()
in_units_of(xyz, f.distance_unit, Trajectory._distance_unit, inplace=True)
time = np.arange(ptr, ptr+len(xyz)*stride, stride)
ptr += len(xyz)*stride
yield Trajectory(xyz=xyz, topology=topology, time=time)
elif filename.endswith('.xtc'):
topology = _parse_topology(kwargs.get('top', None))
with XTCTrajectoryFile(filename) as f:
while True:
xyz, time, step, box = f.read(chunk*stride, stride=stride, atom_indices=atom_indices)
if len(xyz) == 0:
raise StopIteration()
in_units_of(xyz, f.distance_unit, Trajectory._distance_unit, inplace=True)
in_units_of(box, f.distance_unit, Trajectory._distance_unit, inplace=True)
trajectory = Trajectory(xyz=xyz, topology=topology, time=time)
trajectory.unitcell_vectors = box
yield trajectory
elif filename.endswith('.dcd'):
topology = _parse_topology(kwargs.get('top', None))
with DCDTrajectoryFile(filename) as f:
ptr = 0
while True:
# for reasons that I have not investigated, dcdtrajectory file chunk and stride
# together work like this method, but HDF5/XTC do not.
xyz, box_length, box_angle = f.read(chunk, stride=stride, atom_indices=atom_indices)
if len(xyz) == 0:
raise StopIteration()
in_units_of(xyz, f.distance_unit, Trajectory._distance_unit, inplace=True)
in_units_of(box_length, f.distance_unit, Trajectory._distance_unit, inplace=True)
time = np.arange(ptr, ptr+len(xyz)*stride, stride)
ptr += len(xyz)*stride
yield Trajectory(xyz=xyz, topology=topology, time=time, unitcell_lengths=box_length,
unitcell_angles=box_angle)
else:
t = load(filename, **kwargs)
for i in range(0, len(t), chunk):
yield t[i:i+chunk]
class Trajectory(object):
"""Container object for a molecular dynamics trajectory
A Trajectory represents a collection of one or more molecular structures,
generally (but not necessarily) from a molecular dynamics trajectory. The
Trajectory stores a number of fields describing the system through time,
including the cartesian coordinates of each atoms (``xyz``), the topology
of the molecular system (``topology``), and information about the
unitcell if appropriate (``unitcell_vectors``, ``unitcell_length``,
``unitcell_angles``).
A Trajectory should generally be constructed by loading a file from disk.
Trajectories can be loaded from (and saved to) the PDB, XTC, TRR, DCD,
binpos, NetCDF or MDTraj HDF5 formats.
Trajectory supports fancy indexing, so you can extract one or more frames
from a Trajectory as a separate trajectory. For example, to form a
trajectory with every other frame, you can slice with ``traj[::2]``.
Trajectory uses the nanometer, degree & picosecond unit system.
Examples
--------
>>> # loading a trajectory
>>> import mdtraj as md
>>> md.load('trajectory.xtc', top='native.pdb')
<mdtraj.Trajectory with 1000 frames, 22 atoms at 0x1058a73d0>
>>> # slicing a trajectory
>>> t = md.load('trajectory.h5')
>>> print(t)
<mdtraj.Trajectory with 100 frames, 22 atoms>
>>> print(t[::2])
<mdtraj.Trajectory with 50 frames, 22 atoms>
>>> # calculating the average distance between two atoms
>>> import mdtraj as md
>>> import numpy as np
>>> t = md.load('trajectory.h5')
>>> np.mean(np.sqrt(np.sum((t.xyz[:, 0, :] - t.xyz[:, 21, :])**2, axis=1)))
See Also
--------
mdtraj.load : High-level function that loads files and returns an ``md.Trajectory``
Attributes
----------
n_frames : int
n_atoms : int
n_residues : int
time : np.ndarray, shape=(n_frames,)
timestep : float
topology : md.Topology
top : md.Topology
xyz : np.ndarray, shape=(n_frames, n_atoms, 3)
unitcell_vectors : {np.ndarray, shape=(n_frames, 3, 3), None}
unitcell_lengths : {np.ndarray, shape=(n_frames, 3), None}
unitcell_angles : {np.ndarray, shape=(n_frames, 3), None}
"""
# this is NOT configurable. if it's set to something else, things will break
# (thus why I make it private)
_distance_unit = 'nanometers'
@property
def topology(self):
"""Topology of the system, describing the organization of atoms into residues, bonds, etc
Returns
-------
topology : md.Topology
The topology object, describing the organization of atoms into
residues, bonds, etc
"""
return self._topology
@topology.setter
def topology(self, value):
"Set the topology of the system, describing the organization of atoms into residues, bonds, etc"
# todo: more typechecking
self._topology = value
@property
def n_frames(self):
"""Number of frames in the trajectory
Returns
-------
n_frames : int
The number of frames in the trajectory
"""
return self._xyz.shape[0]
@property
def n_atoms(self):
"""Number of atoms in the trajectory
Returns
-------
n_atoms : int
The number of atoms in the trajectory
"""
return self._xyz.shape[1]
@property
def n_residues(self):
"""Number of residues (amino acids) in the trajectory
Returns
-------
n_residues : int
The number of residues in the trajectory's topology
"""
if self.top is None:
return 0
return sum([1 for r in self.top.residues])
@property
def top(self):
"""Alias for self.topology, describing the organization of atoms into residues, bonds, etc
Returns
-------
topology : md.Topology
The topology object, describing the organization of atoms into
residues, bonds, etc
"""
return self._topology
@top.setter
def top(self, value):
"Set the topology of the system, describing the organization of atoms into residues, bonds, etc"
# todo: more typechecking
self._topology = value
@property
def timestep(self):
"""Timestep between frames, in picoseconds
Returns
-------
timestep : float
The timestep between frames, in picoseconds.
"""
if self.n_frames <= 1:
raise(ValueError("Cannot calculate timestep if trajectory has one frame."))
return self._time[1] - self._time[0]
@property
def time(self):
"""The simulation time corresponding to each frame, in picoseconds
Returns
-------
time : np.ndarray, shape=(n_frames,)
The simulation time corresponding to each frame, in picoseconds
"""
return self._time
@time.setter
def time(self, value):
"Set the simulation time corresponding to each frame, in picoseconds"
if isinstance(value, list):
value = np.array(value)
if np.isscalar(value) and self.n_frames == 1:
value = np.array([value])
elif not value.shape == (self.n_frames,):
raise ValueError('Wrong shape. Got %s, should be %s' % (value.shape,
(self.n_frames)))
self._time = value
@property
def unitcell_vectors(self):
"""The vectors that define the shape of the unit cell in each frame
Returns
-------
vectors : np.ndarray, shape(n_frames, 3, 3)
Vectors definiing the shape of the unit cell in each frame.
The semantics of this array are that the shape of the unit cell
in frame ``i`` are given by the three vectors, ``value[i, 0, :]``,
``value[i, 1, :]``, and ``value[i, 2, :]``.
"""
if self._unitcell_lengths is None or self._unitcell_angles is None:
return None
v1, v2, v3 = lengths_and_angles_to_box_vectors(
self._unitcell_lengths[:, 0], # a
self._unitcell_lengths[:, 1], # b
self._unitcell_lengths[:, 2], # c
self._unitcell_angles[:, 0], # alpha
self._unitcell_angles[:, 1], # beta
self._unitcell_angles[:, 2], # gamma
)
return np.swapaxes(np.dstack((v1, v2, v3)), 1, 2)
@unitcell_vectors.setter
def unitcell_vectors(self, vectors):
"""Set the three vectors that define the shape of the unit cell
Parameters
----------
vectors : tuple of three arrays, each of shape=(n_frames, 3)
The semantics of this array are that the shape of the unit cell
in frame ``i`` are given by the three vectors, ``value[i, 0, :]``,
``value[i, 1, :]``, and ``value[i, 2, :]``.
"""
if vectors is None:
self._unitcell_lengths = None
self._unitcell_angles = None
return
if not len(vectors) == len(self):
raise TypeError('unitcell_vectors must be the same length as '
'the trajectory. you provided %s' % str(vectors))
v1 = vectors[:, 0, :]
v2 = vectors[:, 1, :]
v3 = vectors[:, 2, :]
a, b, c, alpha, beta, gamma = box_vectors_to_lengths_and_angles(v1, v2, v3)
self._unitcell_lengths = np.vstack((a, b, c)).T
self._unitcell_angles = np.vstack((alpha, beta, gamma)).T
@property
def unitcell_lengths(self):
"""Lengths that define the shape of the unit cell in each frame.
Returns
-------
lengths : {np.ndarray, shape=(n_frames, 3), None}
Lengths of the unit cell in each frame, in nanometers, or None
if the Trajectory contains no unitcell information.
"""
return self._unitcell_lengths
@property
def unitcell_angles(self):
"""Angles that define the shape of the unit cell in each frame.
Returns
-------
lengths : np.ndarray, shape=(n_frames, 3)
The angles between the three unitcell vectors in each frame,
``alpha``, ``beta``, and ``gamma``. ``alpha' gives the angle
between vectors ``b`` and ``c``, ``beta`` gives the angle between
vectors ``c`` and ``a``, and ``gamma`` gives the angle between
vectors ``a`` and ``b``. The angles are in degrees.
"""
return self._unitcell_angles
@unitcell_lengths.setter
def unitcell_lengths(self, value):
"""Set the lengths that define the shape of the unit cell in each frame
Parameters
----------
value : np.ndarray, shape=(n_frames, 3)
The distances ``a``, ``b``, and ``c`` that define the shape of the
unit cell in each frame, or None
"""
self._unitcell_lengths = ensure_type(value, np.float32, 2,
'unitcell_lengths', can_be_none=True, shape=(len(self), 3),
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
@unitcell_angles.setter
def unitcell_angles(self, value):
"""Set the lengths that define the shape of the unit cell in each frame
Parameters
----------
value : np.ndarray, shape=(n_frames, 3)
The angles ``alpha``, ``beta`` and ``gamma`` that define the
shape of the unit cell in each frame. The angles should be in
degrees.
"""
self._unitcell_angles = ensure_type(value, np.float32, 2,
'unitcell_angles', can_be_none=True, shape=(len(self), 3),
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
@property
def xyz(self):
"""Cartesian coordinates of each atom in each simulation frame
Returns
-------
xyz : np.ndarray, shape=(n_frames, n_atoms, 3)
A three dimensional numpy array, with the cartesian coordinates
of each atoms in each frame.
"""
return self._xyz
@xyz.setter
def xyz(self, value):
"Set the cartesian coordinates of each atom in each simulation frame"
if self.top is not None:
# if we have a topology and its not None
shape = (None, self.topology._numAtoms, 3)
else:
shape = (None, None, 3)
value = ensure_type(value, np.float32, 3, 'xyz', shape=shape,
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
self._xyz = value
self._rmsd_traces = None
def _string_summary_basic(self):
"""Basic summary of traj in string form."""
unitcell_str = 'and unitcells' if self._have_unitcell else 'without unitcells'
value = "mdtraj.Trajectory with %d frames, %d atoms, %d residues, %s" % (
self.n_frames, self.n_atoms, self.n_residues, unitcell_str)
return value
def __len__(self):
return self.n_frames
def __add__(self, other):
"Concatenate two trajectories"
return self.join(other)
def __str__(self):
return "<%s>" % (self._string_summary_basic())
def __repr__(self):
return "<%s at 0x%02x>" % (self._string_summary_basic(), id(self))
# def describe(self):
# """Diagnostic summary statistics on the trajectory"""
# # What information do we want to display?
# # Goals: easy to figure out if a trajectory is blowing up or contains
# # bad data, easy to diagonose other problems. Generally give a
# # high-level description of the data in the trajectory.
# # Possibly show std. dev. of differnt coordinates in the trajectory
# # or maybe its RMSD drift or something?
# # Also, check for any NaNs or Infs in the data. Or other common issues
# # like that?
# # Note that pandas.DataFrame has a describe() method, which gives
# # min/max/mean/std.dev./percentiles of each column in a DataFrame.
# raise NotImplementedError()
def superpose(self, reference, frame=0, atom_indices=None, parallel=True):
"""Superpose each conformation in this trajectory upon a reference
Parameters
----------
reference : md.Trajectory
For each conformation in this trajectory, aligned to a particular
reference conformation in another trajectory object.
frame : int
The index of the conformation in `reference` to align to.
atom_indices : array_like, or None
The indices of the atoms to superpose. If not
supplied, all atoms will be used.
parallel : bool
Use OpenMP to run the superposition in parallel over multiple cores
Returns
-------
self
"""
if atom_indices is None:
atom_indices = slice(None)
n_frames = self.xyz.shape[0]
self_align_xyz = np.asarray(self.xyz[:, atom_indices, :], order='c')
self_displace_xyz = np.asarray(self.xyz, order='c')
ref_align_xyz = np.array(reference.xyz[frame, atom_indices, :], copy=True, order='c').reshape(1, -1, 3)
offset = np.mean(self_align_xyz, axis=1, dtype=np.float64).reshape(n_frames, 1, 3)
self_align_xyz -= offset
if self_align_xyz.ctypes.data != self_displace_xyz.ctypes.data:
# when atom_indices is None, these two arrays alias the same memory
# so we only need to do the centering once
self_displace_xyz -= offset
ref_offset = ref_align_xyz[0].astype('float64').mean(0)
ref_align_xyz[0] -= ref_offset
self_g = np.einsum('ijk,ijk->i', self_align_xyz, self_align_xyz)
ref_g = np.einsum('ijk,ijk->i', ref_align_xyz , ref_align_xyz)
_rmsd.superpose_atom_major(
ref_align_xyz, self_align_xyz, ref_g, self_g, self_displace_xyz,
0, parallel=parallel)
self.xyz = self_displace_xyz + ref_offset
return self
def join(self, other, check_topology=True, discard_overlapping_frames=False):
"""Join two trajectories together along the time/frame axis.
This method joins trajectories along the time axis, giving a new trajectory
of length equal to the sum of the lengths of `self` and `other`.
It can also be called by using `self + other`
Parameters
----------
other : Trajectory or list of Trajectory
One or more trajectories to join with this one. These trajectories
are *appended* to the end of this trajectory.
check_topology : bool
Ensure that the topology of `self` and `other` are identical before
joining them. If false, the resulting trajectory will have the
topology of `self`.
discard_overlapping_frames : bool, optional
If True, compare coordinates at trajectory edges to discard overlapping
frames. Default: False.
See Also
--------
stack : join two trajectories along the atom axis
"""
if isinstance(other, Trajectory):
other = [other]
if isinstance(other, list):
if not all(isinstance(o, Trajectory) for o in other):
raise TypeError('You can only join Trajectory instances')
if not all(self.n_atoms == o.n_atoms for o in other):
raise ValueError('Number of atoms in self (%d) is not equal '
'to number of atoms in other' % (self.n_atoms))
if check_topology and not all(self.topology == o.topology for o in other):
raise ValueError('The topologies of the Trajectories are not the same')
if not all(self._have_unitcell == o._have_unitcell for o in other):
raise ValueError('Mixing trajectories with and without unitcell')
else:
raise TypeError('`other` must be a list of Trajectory. You supplied %d' % type(other))
# list containing all of the trajs to merge, including self
trajectories = [self] + other
if discard_overlapping_frames:
for i in range(len(trajectories)-1):
# last frame of trajectory i
x0 = trajectories[i].xyz[-1]
# first frame of trajectory i+1
x1 = trajectories[i + 1].xyz[0]
# check that all atoms are within 2e-3 nm
# (this is kind of arbitrary)
if np.all(np.abs(x1 - x0) < 2e-3):
trajectories[i] = trajectories[i][:-1]
xyz = np.concatenate([t.xyz for t in trajectories])
time = np.concatenate([t.time for t in trajectories])
angles = lengths = None
if self._have_unitcell:
angles = np.concatenate([t.unitcell_angles for t in trajectories])
lengths = np.concatenate([t.unitcell_lengths for t in trajectories])
# use this syntax so that if you subclass Trajectory,
# the subclass's join() will return an instance of the subclass
return self.__class__(xyz, deepcopy(self._topology), time=time,
unitcell_lengths=lengths, unitcell_angles=angles)
def stack(self, other):
"""Stack two trajectories along the atom axis
This method joins trajectories along the atom axis, giving a new trajectory
with a number of atoms equal to the sum of the number of atoms in
`self` and `other`.
Notes
-----
The resulting trajectory will have the unitcell and time information
the left operand.
Examples
--------
>>> t1 = md.load('traj1.h5')
>>> t2 = md.load('traj2.h5')
>>> # even when t2 contains no unitcell information
>>> t2.unitcell_vectors = None
>>> stacked = t1.stack(t2)
>>> # the stacked trajectory inherits the unitcell information
>>> # from the first trajectory
>>> np.all(stacked.unitcell_vectors == t1.unitcell_vectors)
True
Parameters
----------
other : Trajectory
The other trajectory to join
See Also
--------
join : join two trajectories along the time/frame axis.
"""
if not isinstance(other, Trajectory):
raise TypeError('You can only stack two Trajectory instances')
if self.n_frames != other.n_frames:
raise ValueError('Number of frames in self (%d) is not equal '
'to number of frames in other (%d)' % (self.n_frames, other.n_frames))
if self.topology is not None:
topology = self.topology.join(other.topology)
else:
topology = None
xyz = np.hstack((self.xyz, other.xyz))
return self.__class__(xyz=xyz, topology=topology, unitcell_angles=self.unitcell_angles,
unitcell_lengths=self.unitcell_lengths, time=self.time)
def __getitem__(self, key):
"Get a slice of this trajectory"
return self.slice(key)
def slice(self, key, copy=True):
"""Slice trajectory, by extracting one or more frames into a separate object
This method can also be called using index bracket notation, i.e
`traj[1] == traj.slice(1)`
Parameters
----------
key : {int, np.ndarray, slice}
The slice to take. Can be either an int, a list of ints, or a slice
object.
copy : bool, default=True
Copy the arrays after slicing. If you set this to false, then if
you modify a slice, you'll modify the original array since they
point to the same data.
"""
xyz = self.xyz[key]
time = self.time[key]
unitcell_lengths, unitcell_angles = None, None
if self.unitcell_angles is not None:
unitcell_angles = self.unitcell_angles[key]
if self.unitcell_lengths is not None:
unitcell_lengths = self.unitcell_lengths[key]
if copy:
xyz = xyz.copy()
time = time.copy()
topology = deepcopy(self._topology)
if self.unitcell_angles is not None:
unitcell_angles = unitcell_angles.copy()
if self.unitcell_lengths is not None:
unitcell_lengths = unitcell_lengths.copy()
newtraj = self.__class__(xyz, topology, time, unitcell_lengths=unitcell_lengths,
unitcell_angles=unitcell_angles)
return newtraj
def __init__(self, xyz, topology, time=None, unitcell_lengths=None, unitcell_angles=None):
# install the topology into the object first, so that when setting
# the xyz, we can check that it lines up (e.g. n_atoms), with the topology
self.topology = topology
self.xyz = xyz
# _rmsd_traces are the inner product of each centered conformation,
# which are required for computing RMSD. Normally these values are
# calculated on the fly in the cython code (rmsd/_rmsd.pyx), but
# optionally, we enable the use precomputed values which can speed
# up the calculation (useful for clustering), but potentially be unsafe
# if self._xyz is modified without a corresponding change to
# self._rmsd_traces. This array is populated computed by
# center_conformations, and no other methods should really touch it.
self._rmsd_traces = None
# box has no default, it'll just be none normally
self.unitcell_lengths = unitcell_lengths
self.unitcell_angles = unitcell_angles
# time will take the default 1..N
if time is None:
time = np.arange(len(self.xyz))
self.time = time
if (topology is not None) and (topology._numAtoms != self.n_atoms):
raise ValueError("Number of atoms in xyz (%s) and "
"in topology (%s) don't match" % (self.n_atoms, topology._numAtoms))
def openmm_positions(self, frame):
"""OpenMM-compatable positions of a single frame.
Examples
--------
>>> t = md.load('trajectory.h5')
>>> context.setPositions(t.openmm_positions(0))
Parameters
----------
frame : int
The index of frame of the trajectory that you wish to extract
Returns
-------
positions : list
The cartesian coordinates of specific trajectory frame, formatted
for input to OpenMM
"""
from simtk.openmm import Vec3
from simtk.unit import nanometer
Pos = []
for xyzi in self.xyz[frame]:
Pos.append(Vec3(xyzi[0], xyzi[1], xyzi[2]))
return Pos * nanometer
def openmm_boxes(self, frame):
"""OpenMM-compatable box vectors of a single frame.
Examples
--------
>>> t = md.load('trajectory.h5')
>>> context.setPeriodicBoxVectors(t.openmm_positions(0))
Parameters
----------
frame : int
Return box for this single frame.
Returns
-------
box : tuple
The periodic box vectors for this frame, formatted for input to
OpenMM.
"""
from simtk.openmm import Vec3
from simtk.unit import nanometer
vectors = self[frame].unitcell_vectors
if vectors is None:
raise ValueError("this trajectory does not contain box size information")
v1, v2, v3 = vectors
return (Vec3(*v1), Vec3(*v2), Vec3(*v3)) * nanometer
@staticmethod
# im not really sure if the load function should be just a function or a method on the class
# so effectively, lets make it both?
def load(filenames, **kwargs):
"""Load a trajectory from disk
Parameters
----------
filenames : {str, [str]}
Either a string or list of strings
Other Parameters
----------------
As requested by the various load functions -- it depends on the extension
"""
return load(filenames, **kwargs)
def save(self, filename, **kwargs):
"""Save trajectory to disk, in a format determined by the filename extension
Parameters
----------
filename : str
filesystem path in which to save the trajectory. The extension will
be parsed and will control the format.
Other Parameters
----------------
lossy : bool
For .h5 or .lh5, whether or not to use compression.
no_models: bool
For .pdb. TODO: Document this?
force_overwrite : bool
For .binpos, .xtc, .dcd. If `filename` already exists, overwrite it.
"""
# grab the extension of the filename
extension = os.path.splitext(filename)[1]
savers = {'.xtc': self.save_xtc,
'.trr': self.save_trr,
'.pdb': self.save_pdb,
'.dcd': self.save_dcd,
'.h5': self.save_hdf5,
'.binpos': self.save_binpos,
'.nc': self.save_netcdf,
'.netcdf': self.save_netcdf,
'.crd': self.save_mdcrd,
'.mdcrd': self.save_mdcrd,
'.ncdf': self.save_netcdf,
'.lh5': self.save_lh5,
}
try:
saver = savers[extension]
except KeyError:
raise IOError('Sorry, no saver for filename=%s (extension=%s) '
'was found. I can only save files '
'with extensions in %s' % (filename, extension, savers.keys()))
# run the saver, and return whatever output it gives
return saver(filename, **kwargs)
def save_hdf5(self, filename, force_overwrite=True):
"""Save trajectory to MDTraj HDF5 format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with HDF5TrajectoryFile(filename, 'w', force_overwrite=True) as f:
f.write(coordinates=self.xyz, time=self.time,
cell_angles=self.unitcell_angles,
cell_lengths=self.unitcell_lengths)
f.topology = self.topology
def save_pdb(self, filename, force_overwrite=True):
"""Save trajectory to RCSB PDB format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
self._check_valid_unitcell()
with PDBTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
for i in xrange(self.n_frames):
if self._have_unitcell:
f.write(in_units_of(self._xyz[i], Trajectory._distance_unit, f.distance_unit),
self.topology,
modelIndex=i,
unitcell_lengths=in_units_of(self.unitcell_lengths[i], Trajectory._distance_unit, f.distance_unit),
unitcell_angles=self.unitcell_angles[i])
else:
f.write(in_units_of(self._xyz[i], Trajectory._distance_unit, f.distance_unit),
self.topology,
modelIndex=i)
def save_xtc(self, filename, force_overwrite=True):
"""Save trajectory to Gromacs XTC format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with XTCTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=self.xyz, time=self.time, box=self.unitcell_vectors)
def save_trr(self, filename, force_overwrite=True):
"""Save trajectory to Gromacs TRR format
Notes
-----
Only the xyz coordinates and the time are saved, the velocities
and forces in the trr will be zeros
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with TRRTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=self.xyz, time=self.time, box=self.unitcell_vectors)
def save_dcd(self, filename, force_overwrite=True):
"""Save trajectory to CHARMM/NAMD DCD format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filenames, if its already there
"""
self._check_valid_unitcell()
with DCDTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
def save_binpos(self, filename, force_overwrite=True):
"""Save trajectory to AMBER BINPOS format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with BINPOSTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit))
def save_mdcrd(self, filename, force_overwrite=True):
"""Save trajectory to AMBER mdcrd format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
self._check_valid_unitcell()
if self._have_unitcell:
if not np.all(self.unitcell_angles == 90):
raise ValueError('Only rectilinear boxes can be saved to mdcrd files')
with MDCRDTrajectoryFile(filename, mode='w', force_overwrite=force_overwrite) as f:
f.write(in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit))
def save_netcdf(self, filename, force_overwrite=True):
"""Save trajectory in AMBER NetCDF format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
self._check_valid_unitcell()
with NetCDFTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(coordinates=in_units_of(self._xyz, Trajectory._distance_unit, NetCDFTrajectoryFile.distance_unit),
time=self.time,
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
def save_lh5(self, filename):
"""Save trajectory in deprecated MSMBuilder2 LH5 (lossy HDF5) format.
Parameters
----------
filename : str
filesystem path in which to save the trajectory
"""
with LH5TrajectoryFile(filename, 'w', force_overwrite=True) as f:
f.write(coordinates=self.xyz)
f.topology = self.topology
def center_coordinates(self, mass_weighted=False):
"""Center each trajectory frame at the origin (0,0,0).
This method acts inplace on the trajectory. The centering can
be either uniformly weighted (mass_weighted=False) or weighted by
the mass of each atom (mass_weighted=True).
Parameters
----------
mass_weighted : bool, optional (default = False)
If True, weight atoms by mass when removing COM.
Returns
-------
self
"""
if mass_weighted and self.top is not None:
masses = np.array([a.element.mass for a in self.top.atoms])
masses /= masses.sum()
for x in self._xyz:
x -= (x.astype('float64').T.dot(masses))
else:
self._rmsd_traces = _rmsd._center_inplace_atom_major(self._xyz)
return self
def restrict_atoms(self, atom_indices):
"""Retain only a subset of the atoms in a trajectory (inplace)
Deletes atoms not in `atom_indices`, and re-indexes those that remain
Parameters
----------
atom_indices : list([int])
List of atom indices to keep.
Returns
-------
self
"""
if self._topology is not None:
self._topology = self._topology.subset(atom_indices)
self._xyz = np.array(self.xyz[:,atom_indices], order='C')
return self
def _check_valid_unitcell(self):
"""Do some sanity checking on self.unitcell_lengths and self.unitcell_angles
"""
if self.unitcell_lengths is not None and self.unitcell_angles is None:
raise AttributeError('unitcell length data exists, but no angles')
if self.unitcell_lengths is None and self.unitcell_angles is not None:
raise AttributeError('unitcell angles data exists, but no lengths')
if self.unitcell_lengths is not None and np.any(self.unitcell_lengths < 0):
raise ValueError('unitcell length < 0')
if self.unitcell_angles is not None and np.any(self.unitcell_angles < 0):
raise ValueError('unitcell angle < 0')
@property
def _have_unitcell(self):
return self._unitcell_lengths is not None and self._unitcell_angles is not None
| lgpl-2.1 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.5.0/Lib/uuid.py | 3 | 23887 | r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address. uuid4() creates a random UUID.
Typical usage:
>>> import uuid
# make a UUID based on the host ID and current time
>>> uuid.uuid1() # doctest: +SKIP
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a random UUID
>>> uuid.uuid4() # doctest: +SKIP
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
"""
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
int_ = int # The built-in int type
bytes_ = bytes # The built-in bytes type
class UUID(object):
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
UUID objects are immutable, hashable, and usable as dictionary keys.
Converting a UUID to a string with str() yields something in the form
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
five possible forms: a similar string of hexadecimal digits, or a tuple
of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
48-bit values respectively) as an argument named 'fields', or a string
of 16 bytes (with all the integer fields in big-endian order) as an
argument named 'bytes', or a string of 16 bytes (with the first three
fields in little-endian order) as an argument named 'bytes_le', or a
single 128-bit integer as an argument named 'int'.
UUIDs have these read-only attributes:
bytes the UUID as a 16-byte string (containing the six
integer fields in big-endian byte order)
bytes_le the UUID as a 16-byte string (with time_low, time_mid,
and time_hi_version in little-endian byte order)
fields a tuple of the six integer fields of the UUID,
which are also available as six individual attributes
and two derived attributes:
time_low the first 32 bits of the UUID
time_mid the next 16 bits of the UUID
time_hi_version the next 16 bits of the UUID
clock_seq_hi_variant the next 8 bits of the UUID
clock_seq_low the next 8 bits of the UUID
node the last 48 bits of the UUID
time the 60-bit timestamp
clock_seq the 14-bit sequence number
hex the UUID as a 32-character hexadecimal string
int the UUID as a 128-bit integer
urn the UUID as a URN as specified in RFC 4122
variant the UUID variant (one of the constants RESERVED_NCS,
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
version the UUID version number (1 through 5, meaningful only
when the variant is RFC_4122)
"""
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
int=None, version=None):
r"""Create a UUID from either a string of 32 hexadecimal digits,
a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
in little-endian order as the 'bytes_le' argument, a tuple of six
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
the 'fields' argument, or a single 128-bit integer as the 'int'
argument. When a string of hex digits is given, curly braces,
hyphens, and a URN prefix are all optional. For example, these
expressions all yield the same UUID:
UUID('{12345678-1234-5678-1234-567812345678}')
UUID('12345678123456781234567812345678')
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
UUID(bytes='\x12\x34\x56\x78'*4)
UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
'\x12\x34\x56\x78\x12\x34\x56\x78')
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
UUID(int=0x12345678123456781234567812345678)
Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
be given. The 'version' argument is optional; if given, the resulting
UUID will have its variant and version set according to RFC 4122,
overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
"""
if [hex, bytes, bytes_le, fields, int].count(None) != 4:
raise TypeError('need one of hex, bytes, bytes_le, fields, or int')
if hex is not None:
hex = hex.replace('urn:', '').replace('uuid:', '')
hex = hex.strip('{}').replace('-', '')
if len(hex) != 32:
raise ValueError('badly formed hexadecimal UUID string')
int = int_(hex, 16)
if bytes_le is not None:
if len(bytes_le) != 16:
raise ValueError('bytes_le is not a 16-char string')
bytes = (bytes_le[4-1::-1] + bytes_le[6-1:4-1:-1] +
bytes_le[8-1:6-1:-1] + bytes_le[8:])
if bytes is not None:
if len(bytes) != 16:
raise ValueError('bytes is not a 16-char string')
assert isinstance(bytes, bytes_), repr(bytes)
int = int_.from_bytes(bytes, byteorder='big')
if fields is not None:
if len(fields) != 6:
raise ValueError('fields is not a 6-tuple')
(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node) = fields
if not 0 <= time_low < 1<<32:
raise ValueError('field 1 out of range (need a 32-bit value)')
if not 0 <= time_mid < 1<<16:
raise ValueError('field 2 out of range (need a 16-bit value)')
if not 0 <= time_hi_version < 1<<16:
raise ValueError('field 3 out of range (need a 16-bit value)')
if not 0 <= clock_seq_hi_variant < 1<<8:
raise ValueError('field 4 out of range (need an 8-bit value)')
if not 0 <= clock_seq_low < 1<<8:
raise ValueError('field 5 out of range (need an 8-bit value)')
if not 0 <= node < 1<<48:
raise ValueError('field 6 out of range (need a 48-bit value)')
clock_seq = (clock_seq_hi_variant << 8) | clock_seq_low
int = ((time_low << 96) | (time_mid << 80) |
(time_hi_version << 64) | (clock_seq << 48) | node)
if int is not None:
if not 0 <= int < 1<<128:
raise ValueError('int is out of range (need a 128-bit value)')
if version is not None:
if not 1 <= version <= 5:
raise ValueError('illegal version number')
# Set the variant to RFC 4122.
int &= ~(0xc000 << 48)
int |= 0x8000 << 48
# Set the version number.
int &= ~(0xf000 << 64)
int |= version << 76
self.__dict__['int'] = int
def __eq__(self, other):
if isinstance(other, UUID):
return self.int == other.int
return NotImplemented
# Q. What's the value of being able to sort UUIDs?
# A. Use them as keys in a B-Tree or similar mapping.
def __lt__(self, other):
if isinstance(other, UUID):
return self.int < other.int
return NotImplemented
def __gt__(self, other):
if isinstance(other, UUID):
return self.int > other.int
return NotImplemented
def __le__(self, other):
if isinstance(other, UUID):
return self.int <= other.int
return NotImplemented
def __ge__(self, other):
if isinstance(other, UUID):
return self.int >= other.int
return NotImplemented
def __hash__(self):
return hash(self.int)
def __int__(self):
return self.int
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, str(self))
def __setattr__(self, name, value):
raise TypeError('UUID objects are immutable')
def __str__(self):
hex = '%032x' % self.int
return '%s-%s-%s-%s-%s' % (
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
@property
def bytes(self):
return self.int.to_bytes(16, 'big')
@property
def bytes_le(self):
bytes = self.bytes
return (bytes[4-1::-1] + bytes[6-1:4-1:-1] + bytes[8-1:6-1:-1] +
bytes[8:])
@property
def fields(self):
return (self.time_low, self.time_mid, self.time_hi_version,
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
@property
def time_low(self):
return self.int >> 96
@property
def time_mid(self):
return (self.int >> 80) & 0xffff
@property
def time_hi_version(self):
return (self.int >> 64) & 0xffff
@property
def clock_seq_hi_variant(self):
return (self.int >> 56) & 0xff
@property
def clock_seq_low(self):
return (self.int >> 48) & 0xff
@property
def time(self):
return (((self.time_hi_version & 0x0fff) << 48) |
(self.time_mid << 32) | self.time_low)
@property
def clock_seq(self):
return (((self.clock_seq_hi_variant & 0x3f) << 8) |
self.clock_seq_low)
@property
def node(self):
return self.int & 0xffffffffffff
@property
def hex(self):
return '%032x' % self.int
@property
def urn(self):
return 'urn:uuid:' + str(self)
@property
def variant(self):
if not self.int & (0x8000 << 48):
return RESERVED_NCS
elif not self.int & (0x4000 << 48):
return RFC_4122
elif not self.int & (0x2000 << 48):
return RESERVED_MICROSOFT
else:
return RESERVED_FUTURE
@property
def version(self):
# The version bits are only meaningful for RFC 4122 UUIDs.
if self.variant == RFC_4122:
return int((self.int >> 76) & 0xf)
def _popen(command, *args):
import os, shutil, subprocess
executable = shutil.which(command)
if executable is None:
path = os.pathsep.join(('/sbin', '/usr/sbin'))
executable = shutil.which(command, path=path)
if executable is None:
return None
# LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
# on stderr (Note: we don't have an example where the words we search
# for are actually localized, but in theory some system could do so.)
env = dict(os.environ)
env['LC_ALL'] = 'C'
proc = subprocess.Popen((executable,) + args,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
env=env)
return proc
def _find_mac(command, args, hw_identifiers, get_index):
try:
proc = _popen(command, *args.split())
if not proc:
return
with proc:
for line in proc.stdout:
words = line.lower().rstrip().split()
for i in range(len(words)):
if words[i] in hw_identifiers:
try:
word = words[get_index(i)]
mac = int(word.replace(b':', b''), 16)
if mac:
return mac
except (ValueError, IndexError):
# Virtual interfaces, such as those provided by
# VPNs, do not have a colon-delimited MAC address
# as expected, but a 16-byte HWAddr separated by
# dashes. These should be ignored in favor of a
# real MAC address
pass
except OSError:
pass
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
for args in ('', '-a', '-av'):
mac = _find_mac('ifconfig', args, [b'hwaddr', b'ether'], lambda i: i+1)
if mac:
return mac
def _ip_getnode():
"""Get the hardware address on Unix by running ip."""
# This works on Linux with iproute2.
mac = _find_mac('ip', 'link list', [b'link/ether'], lambda i: i+1)
if mac:
return mac
def _arp_getnode():
"""Get the hardware address on Unix by running arp."""
import os, socket
try:
ip_addr = socket.gethostbyname(socket.gethostname())
except OSError:
return None
# Try getting the MAC addr from arp based on our IP address (Solaris).
return _find_mac('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
def _lanscan_getnode():
"""Get the hardware address on Unix by running lanscan."""
# This might work on HP-UX.
return _find_mac('lanscan', '-ai', [b'lan0'], lambda i: 0)
def _netstat_getnode():
"""Get the hardware address on Unix by running netstat."""
# This might work on AIX, Tru64 UNIX and presumably on IRIX.
try:
proc = _popen('netstat', '-ia')
if not proc:
return
with proc:
words = proc.stdout.readline().rstrip().split()
try:
i = words.index(b'Address')
except ValueError:
return
for line in proc.stdout:
try:
words = line.rstrip().split()
word = words[i]
if len(word) == 17 and word.count(b':') == 5:
mac = int(word.replace(b':', b''), 16)
if mac:
return mac
except (ValueError, IndexError):
pass
except OSError:
pass
def _ipconfig_getnode():
"""Get the hardware address on Windows by running ipconfig.exe."""
import os, re
dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
try:
import ctypes
buffer = ctypes.create_string_buffer(300)
ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
dirs.insert(0, buffer.value.decode('mbcs'))
except:
pass
for dir in dirs:
try:
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
except OSError:
continue
with pipe:
for line in pipe:
value = line.split(':')[-1].strip().lower()
if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
return int(value.replace('-', ''), 16)
def _netbios_getnode():
"""Get the hardware address on Windows using NetBIOS calls.
See http://support.microsoft.com/kb/118623 for details."""
import win32wnet, netbios
ncb = netbios.NCB()
ncb.Command = netbios.NCBENUM
ncb.Buffer = adapters = netbios.LANA_ENUM()
adapters._pack()
if win32wnet.Netbios(ncb) != 0:
return
adapters._unpack()
for i in range(adapters.length):
ncb.Reset()
ncb.Command = netbios.NCBRESET
ncb.Lana_num = ord(adapters.lana[i])
if win32wnet.Netbios(ncb) != 0:
continue
ncb.Reset()
ncb.Command = netbios.NCBASTAT
ncb.Lana_num = ord(adapters.lana[i])
ncb.Callname = '*'.ljust(16)
ncb.Buffer = status = netbios.ADAPTER_STATUS()
if win32wnet.Netbios(ncb) != 0:
continue
status._unpack()
bytes = status.adapter_address[:6]
if len(bytes) != 6:
continue
return int.from_bytes(bytes, 'big')
# Thanks to Thomas Heller for ctypes and for his help with its use here.
# If ctypes is available, use it to find system routines for UUID generation.
# XXX This makes the module non-thread-safe!
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
try:
import ctypes, ctypes.util
import sys
# The uuid_generate_* routines are provided by libuuid on at least
# Linux and FreeBSD, and provided by libc on Mac OS X.
_libnames = ['uuid']
if not sys.platform.startswith('win'):
_libnames.append('c')
for libname in _libnames:
try:
lib = ctypes.CDLL(ctypes.util.find_library(libname))
except Exception:
continue
if hasattr(lib, 'uuid_generate_random'):
_uuid_generate_random = lib.uuid_generate_random
if hasattr(lib, 'uuid_generate_time'):
_uuid_generate_time = lib.uuid_generate_time
if _uuid_generate_random is not None:
break # found everything we were looking for
del _libnames
# The uuid_generate_* functions are broken on MacOS X 10.5, as noted
# in issue #8621 the function generates the same sequence of values
# in the parent process and all children created using fork (unless
# those children use exec as well).
#
# Assume that the uuid_generate functions are broken from 10.5 onward,
# the test can be adjusted when a later version is fixed.
if sys.platform == 'darwin':
import os
if int(os.uname().release.split('.')[0]) >= 9:
_uuid_generate_random = _uuid_generate_time = None
# On Windows prior to 2000, UuidCreate gives a UUID containing the
# hardware address. On Windows 2000 and later, UuidCreate makes a
# random UUID and UuidCreateSequential gives a UUID containing the
# hardware address. These routines are provided by the RPC runtime.
# NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last
# 6 bytes returned by UuidCreateSequential are fixed, they don't appear
# to bear any relationship to the MAC address of any network device
# on the box.
try:
lib = ctypes.windll.rpcrt4
except:
lib = None
_UuidCreate = getattr(lib, 'UuidCreateSequential',
getattr(lib, 'UuidCreate', None))
except:
pass
def _unixdll_getnode():
"""Get the hardware address on Unix using ctypes."""
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_time(_buffer)
return UUID(bytes=bytes_(_buffer.raw)).node
def _windll_getnode():
"""Get the hardware address on Windows using ctypes."""
_buffer = ctypes.create_string_buffer(16)
if _UuidCreate(_buffer) == 0:
return UUID(bytes=bytes_(_buffer.raw)).node
def _random_getnode():
"""Get a random node ID, with eighth bit set as suggested by RFC 4122."""
import random
return random.getrandbits(48) | 0x010000000000
_node = None
def getnode():
"""Get the hardware address as a 48-bit positive integer.
The first time this runs, it may launch a separate program, which could
be quite slow. If all attempts to obtain the hardware address fail, we
choose a random 48-bit number with its eighth bit set to 1 as recommended
in RFC 4122.
"""
global _node
if _node is not None:
return _node
import sys
if sys.platform == 'win32':
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
else:
getters = [_unixdll_getnode, _ifconfig_getnode, _ip_getnode,
_arp_getnode, _lanscan_getnode, _netstat_getnode]
for getter in getters + [_random_getnode]:
try:
_node = getter()
except:
continue
if _node is not None:
return _node
_last_timestamp = None
def uuid1(node=None, clock_seq=None):
"""Generate a UUID from a host ID, sequence number, and the current time.
If 'node' is not given, getnode() is used to obtain the hardware
address. If 'clock_seq' is given, it is used as the sequence number;
otherwise a random 14-bit sequence number is chosen."""
# When the system provides a version-1 UUID generator, use it (but don't
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
if _uuid_generate_time and node is clock_seq is None:
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_time(_buffer)
return UUID(bytes=bytes_(_buffer.raw))
global _last_timestamp
import time
nanoseconds = int(time.time() * 1e9)
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
timestamp = int(nanoseconds/100) + 0x01b21dd213814000
if _last_timestamp is not None and timestamp <= _last_timestamp:
timestamp = _last_timestamp + 1
_last_timestamp = timestamp
if clock_seq is None:
import random
clock_seq = random.getrandbits(14) # instead of stable storage
time_low = timestamp & 0xffffffff
time_mid = (timestamp >> 32) & 0xffff
time_hi_version = (timestamp >> 48) & 0x0fff
clock_seq_low = clock_seq & 0xff
clock_seq_hi_variant = (clock_seq >> 8) & 0x3f
if node is None:
node = getnode()
return UUID(fields=(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node), version=1)
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
from hashlib import md5
hash = md5(namespace.bytes + bytes(name, "utf-8")).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():
"""Generate a random UUID."""
# When the system provides a version-4 UUID generator, use it.
if _uuid_generate_random:
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_random(_buffer)
return UUID(bytes=bytes_(_buffer.raw))
# Otherwise, get randomness from urandom or the 'random' module.
try:
import os
return UUID(bytes=os.urandom(16), version=4)
except Exception:
import random
return UUID(int=random.getrandbits(128), version=4)
def uuid5(namespace, name):
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
from hashlib import sha1
hash = sha1(namespace.bytes + bytes(name, "utf-8")).digest()
return UUID(bytes=hash[:16], version=5)
# The following standard UUIDs are for use with uuid3() or uuid5().
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')
| mit |
tmerrick1/spack | var/spack/repos/builtin/packages/minismac2d/package.py | 5 | 3131 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Minismac2d(MakefilePackage):
"""Proxy Application. Solves the finite-differenced 2D incompressible
Navier-Stokes equations with Spalart-Allmaras one-equation
turbulence model on a structured body conforming grid.
"""
homepage = "http://mantevo.org"
url = "http://mantevo.org/downloads/releaseTarballs/miniapps/MiniSMAC2D/miniSMAC2D-2.0.tgz"
tags = ['proxy-app']
version('2.0', '1bb1a52cea21bc9162bf7a71a6ddf37d')
depends_on('mpi')
parallel = False
@property
def build_targets(self):
targets = [
'CPP=cpp',
'FC={0}'.format(self.spec['mpi'].mpifc),
'LD={0}'.format(self.spec['mpi'].mpifc),
'MPIDIR=-I{0}'.format(self.spec['mpi'].headers.directories[0]),
'CPPFLAGS=-P -traditional -DD_PRECISION',
'FFLAGS=-O3 -c -g -DD_PRECISION',
'LDFLAGS=-O3',
'--file=Makefile_mpi_only'
]
return targets
def edit(self, spec, prefix):
# Editing input file to point to installed data files
param_file = FileFilter('smac2d.in')
param_file.filter('bcmain_directory=.*', "bcmain_directory='.'")
param_file.filter('bcmain_filename=.*',
"bcmain_filename='bcmain.dat_original_119x31'")
param_file.filter('xygrid_directory=.*', "xygrid_directory='.'")
param_file.filter('xygrid_filename=.*',
"xygrid_filename='xy.dat_original_119x31'")
def install(self, spec, prefix):
# Manual Installation
mkdirp(prefix.bin)
mkdirp(prefix.doc)
install('smac2d_mpi_only', prefix.bin)
install('bcmain.dat_original_119x31', prefix.bin)
install('xy.dat_original_119x31', prefix.bin)
install('smac2d.in', prefix.bin)
install('README.txt', prefix.doc)
| lgpl-2.1 |
albertjan/pypyjs | website/js/pypy.js-0.2.0/lib/modules/hashlib.py | 8 | 7418 | # $Id$
#
# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
# Licensed to PSF under a Contributor Agreement.
#
__doc__ = """hashlib module - A common interface to many hash functions.
new(name, string='') - returns a new hash object implementing the
given hash function; initializing the hash
using the given string data.
Named constructor functions are also available, these are much faster
than using new():
md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
More algorithms may be available on your platform but the above are
guaranteed to exist.
NOTE: If you want the adler32 or crc32 hash functions they are available in
the zlib module.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
Hash objects have these methods:
- update(arg): Update the hash object with the string arg. Repeated calls
are equivalent to a single call with the concatenation of all
the arguments.
- digest(): Return the digest of the strings passed to the update() method
so far. This may contain non-ASCII characters, including
NUL bytes.
- hexdigest(): Like digest() except the digest is returned as a string of
double length, containing only hexadecimal digits.
- copy(): Return a copy (clone) of the hash object. This can be used to
efficiently compute the digests of strings that share a common
initial substring.
For example, to obtain the digest of the string 'Nobody inspects the
spammish repetition':
>>> import hashlib
>>> m = hashlib.md5()
>>> m.update("Nobody inspects")
>>> m.update(" the spammish repetition")
>>> m.digest()
'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
More condensed:
>>> hashlib.sha224("Nobody inspects the spammish repetition").hexdigest()
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
"""
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
algorithms = __always_supported
__all__ = __always_supported + ('new', 'algorithms', 'pbkdf2_hmac')
def __get_builtin_constructor(name):
try:
if name in ('SHA1', 'sha1'):
import _sha
return _sha.new
elif name in ('MD5', 'md5'):
import _md5
return _md5.new
elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
import _sha256
bs = name[3:]
if bs == '256':
return _sha256.sha256
elif bs == '224':
return _sha256.sha224
elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
import _sha512
bs = name[3:]
if bs == '512':
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
except ImportError:
pass # no extension module, this hash is unsupported.
raise ValueError('unsupported hash type ' + name)
def __get_openssl_constructor(name):
try:
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
# defined but the hash not actually available thanks to OpenSSL.
f()
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
return __get_builtin_constructor(name)
def __py_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
return __get_builtin_constructor(name)(string)
def __hash_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
try:
return _hashlib.new(name, string)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
return __get_builtin_constructor(name)(string)
try:
import _hashlib
new = __hash_new
__get_hash = __get_openssl_constructor
except ImportError:
new = __py_new
__get_hash = __get_builtin_constructor
for __func_name in __always_supported:
# try them all, some may not work due to the OpenSSL
# version not supporting that algorithm.
try:
globals()[__func_name] = __get_hash(__func_name)
except ValueError:
import logging
logging.exception('code for hash %s was not found.', __func_name)
try:
# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA
from _hashlib import pbkdf2_hmac
except ImportError:
import binascii
import struct
_trans_5C = b"".join(chr(x ^ 0x5C) for x in range(256))
_trans_36 = b"".join(chr(x ^ 0x36) for x in range(256))
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None):
"""Password based key derivation function 2 (PKCS #5 v2.0)
This Python implementations based on the hmac module about as fast
as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster
for long passwords.
"""
if not isinstance(hash_name, str):
raise TypeError(hash_name)
if not isinstance(password, (bytes, bytearray)):
password = bytes(buffer(password))
if not isinstance(salt, (bytes, bytearray)):
salt = bytes(buffer(salt))
# Fast inline HMAC implementation
inner = new(hash_name)
outer = new(hash_name)
blocksize = getattr(inner, 'block_size', 64)
if len(password) > blocksize:
password = new(hash_name, password).digest()
password = password + b'\x00' * (blocksize - len(password))
inner.update(password.translate(_trans_36))
outer.update(password.translate(_trans_5C))
def prf(msg, inner=inner, outer=outer):
# PBKDF2_HMAC uses the password as key. We can re-use the same
# digest objects and and just update copies to skip initialization.
icpy = inner.copy()
ocpy = outer.copy()
icpy.update(msg)
ocpy.update(icpy.digest())
return ocpy.digest()
if iterations < 1:
raise ValueError(iterations)
if dklen is None:
dklen = outer.digest_size
if dklen < 1:
raise ValueError(dklen)
hex_format_string = "%%0%ix" % (new(hash_name).digest_size * 2)
dkey = b''
loop = 1
while len(dkey) < dklen:
prev = prf(salt + struct.pack(b'>I', loop))
rkey = int(binascii.hexlify(prev), 16)
for i in xrange(iterations - 1):
prev = prf(prev)
rkey ^= int(binascii.hexlify(prev), 16)
loop += 1
dkey += binascii.unhexlify(hex_format_string % rkey)
return dkey[:dklen]
# Cleanup locals()
del __always_supported, __func_name, __get_hash
del __py_new, __hash_new, __get_openssl_constructor
| mit |
thusser/rtml-parse | rtmlparse/examples/telescope.py | 1 | 1224 | from rtmlparse import RTML
from rtmlparse.elements import *
from rtmlparse.misc import units, unitvalues
def main():
# create rtml
rtml = RTML('rtml://monet.uni-goettingen.de/resource', RTML.Mode.resource)
# telescope
telescope = Telescope(rtml, name='MONET/S')
telescope.Aperture = unitvalues.ApertureValue(1.2)
telescope.FocalLength = 8.4
telescope.FocalRatio = 'f/7'
telescope.PlateScale = 24.56
# add mirrors
Mirrors(telescope, number=3, coating=units.CoatingTypes.silver)
# location
location = Location(telescope)
location.EastLongitude = 20.810694
location.Latitude = -32.379444
location.Height = 1798.
location.TimeZone = 2
# camera
camera = Camera(telescope, name='SI-1100')
#camera.Description = 'Scientific Instruments 1100'
camera.SpectralRegion = SpectralRegionTypes.optical
# detector
detector = Detector(camera)
detector.NumColumns = 2048
detector.NumRows = 2048
detector.PixelSize = 15.
# dump it
print rtml.dumps(pretty_print=True)
with open("rtml.xml", "wb") as f:
rtml.dump(f)
# validate it
print 'Validating...'
rtml.valid()
if __name__ == '__main__':
main() | mit |
switchboardOp/ansible | test/units/modules/network/nxos/test_nxos_pim.py | 47 | 2116 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_pim
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosPimModule(TestNxosModule):
module = nxos_pim
def setUp(self):
self.mock_get_config = patch('ansible.modules.network.nxos.nxos_pim.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_pim.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, device=''):
self.get_config.return_value = load_fixture('nxos_pim', 'config.cfg')
self.load_config.return_value = None
def test_nxos_pim(self):
set_module_args(dict(ssm_range='232.0.0.0/8'))
self.execute_module(changed=True, commands=['ip pim ssm range 232.0.0.0/8'])
def test_nxos_pim_none(self):
set_module_args(dict(ssm_range='none'))
self.execute_module(changed=True, commands=['ip pim ssm range none'])
def test_nxos_pim_no_change(self):
set_module_args(dict(ssm_range='127.0.0.0/31'))
self.execute_module(changed=False, commands=[])
| gpl-3.0 |
qwefi/nova | nova/tests/virt/powervm/test_powervm.py | 2 | 42457 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for PowerVMDriver.
"""
import contextlib
import os
import paramiko
from nova import context
from nova import db
from nova import test
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import task_states
from nova.network import model as network_model
from nova.openstack.common import processutils
from nova.tests import fake_network_cache_model
from nova.tests.image import fake
from nova.virt import images
from nova.virt.powervm import blockdev as powervm_blockdev
from nova.virt.powervm import common
from nova.virt.powervm import driver as powervm_driver
from nova.virt.powervm import exception
from nova.virt.powervm import lpar
from nova.virt.powervm import operator as powervm_operator
def fake_lpar(instance_name):
return lpar.LPAR(name=instance_name,
lpar_id=1, desired_mem=1024,
max_mem=2048, max_procs=2,
uptime=939395, state='Running')
def fake_ssh_connect(connection):
"""Returns a new paramiko.SSHClient object."""
return paramiko.SSHClient()
def raise_(ex):
"""Raises the given Exception."""
raise ex
class FakePowerVMOperator(powervm_operator.PowerVMOperator):
def get_lpar(self, instance_name, resource_type='lpar'):
return fake_lpar(instance_name)
def run_vios_command(self, cmd):
pass
class FakeIVMOperator(powervm_operator.IVMOperator):
def get_lpar(self, instance_name, resource_type='lpar'):
return fake_lpar(instance_name)
def list_lpar_instances(self):
return ['instance-00000001', 'instance-00000002']
def create_lpar(self, lpar):
pass
def start_lpar(self, instance_name):
pass
def stop_lpar(self, instance_name, time_out=30):
pass
def remove_lpar(self, instance_name):
pass
def get_vhost_by_instance_id(self, instance_id):
return 'vhostfake'
def get_virtual_eth_adapter_id(self):
return 1
def get_disk_name_by_vhost(self, vhost):
return 'lvfake01'
def remove_disk(self, disk_name):
pass
def run_cfg_dev(self, device_name):
pass
def attach_disk_to_vhost(self, disk, vhost):
pass
def get_memory_info(self):
return {'total_mem': 65536, 'avail_mem': 46336}
def get_cpu_info(self):
return {'total_procs': 8.0, 'avail_procs': 6.3}
def get_disk_info(self):
return {'disk_total': 10168,
'disk_used': 0,
'disk_avail': 10168}
def get_hostname(self):
return 'fake-powervm'
def rename_lpar(self, old, new):
pass
def _remove_file(self, file_path):
pass
def set_lpar_mac_base_value(self, instance_name, mac):
pass
def get_logical_vol_size(self, diskname):
pass
def macs_for_instance(self, instance):
return set(['FA:98:64:2B:29:39'])
def run_vios_command(self, cmd):
pass
class FakeBlockAdapter(powervm_blockdev.PowerVMLocalVolumeAdapter):
def __init__(self):
self.connection_data = common.Connection(host='fake_compute_1',
username='fake_user',
password='fake_pass')
pass
def _create_logical_volume(self, size):
return 'lvfake01'
def _remove_logical_volume(self, lv_name):
pass
def _copy_file_to_device(self, sourcePath, device, decrompress=True):
pass
def _copy_image_file(self, sourcePath, remotePath, decompress=False):
finalPath = '/tmp/rhel62.raw.7e358754160433febd6f3318b7c9e335'
size = 4294967296
return finalPath, size
def _copy_device_to_file(self, device_name, file_path):
pass
def _copy_image_file_from_host(self, remote_source_path, local_dest_dir,
compress=False):
snapshot_file = '/tmp/rhel62.raw.7e358754160433febd6f3318b7c9e335'
snap_ref = open(snapshot_file, 'w+')
snap_ref.close()
return snapshot_file
def fake_get_powervm_operator():
return FakeIVMOperator(common.Connection('fake_host', 'fake_user',
'fake_password'))
def create_instance(testcase):
fake.stub_out_image_service(testcase.stubs)
ctxt = context.get_admin_context()
instance_type = db.flavor_get(ctxt, 1)
sys_meta = flavors.save_flavor_info({}, instance_type)
return db.instance_create(ctxt,
{'user_id': 'fake',
'project_id': 'fake',
'instance_type_id': 1,
'memory_mb': 1024,
'vcpus': 2,
'image_ref': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'system_metadata': sys_meta})
class PowerVMDriverTestCase(test.TestCase):
"""Unit tests for PowerVM connection calls."""
fake_network_info = 'fake_network_info'
fake_create_lpar_instance_called = False
def fake_create_lpar_instance(self, instance, network_info,
host_stats=None):
"""Stub for the _create_lpar_instance method.
This stub assumes that 'instance' is the one created in the test case
setUp method and 'network_info' is equal to self.fake_network_info.
@return: fake LPAR based on instance parameter where the name of the
LPAR is the uuid of the instance
"""
self.fake_create_lpar_instance_called = True
self.assertEquals(self.instance, instance)
self.assertEquals(self.fake_network_info, network_info)
return self.powervm_connection._powervm._operator.get_lpar(
instance['uuid'])
def setUp(self):
super(PowerVMDriverTestCase, self).setUp()
self.stubs.Set(powervm_operator, 'get_powervm_operator',
fake_get_powervm_operator)
self.stubs.Set(powervm_operator, 'get_powervm_disk_adapter',
lambda: FakeBlockAdapter())
self.powervm_connection = powervm_driver.PowerVMDriver(None)
self.instance = create_instance(self)
def test_list_instances(self):
instances = self.powervm_connection.list_instances()
self.assertTrue('instance-00000001' in instances)
self.assertTrue('instance-00000002' in instances)
def test_instance_exists(self):
name = self.instance['name']
self.assertTrue(self.powervm_connection.instance_exists(name))
def test_spawn(self):
def fake_image_fetch(context, image_id, file_path,
user_id, project_id):
pass
self.flags(powervm_img_local_path='/images/')
self.stubs.Set(images, 'fetch', fake_image_fetch)
image_meta = {}
image_meta['id'] = '666'
fake_net_info = network_model.NetworkInfo([
fake_network_cache_model.new_vif()])
self.powervm_connection.spawn(context.get_admin_context(),
self.instance, image_meta, [], 's3cr3t',
fake_net_info)
state = self.powervm_connection.get_info(self.instance)['state']
self.assertEqual(state, power_state.RUNNING)
def test_spawn_create_lpar_fail(self):
self.flags(powervm_img_local_path='/images/')
self.stubs.Set(images, 'fetch', lambda *x, **y: None)
self.stubs.Set(
self.powervm_connection._powervm,
'get_host_stats',
lambda *x, **y: raise_(
(processutils.ProcessExecutionError('instance_name'))))
fake_net_info = network_model.NetworkInfo([
fake_network_cache_model.new_vif()])
self.assertRaises(exception.PowerVMLPARCreationFailed,
self.powervm_connection.spawn,
context.get_admin_context(),
self.instance,
{'id': 'ANY_ID'}, [], 's3cr3t', fake_net_info)
def test_spawn_cleanup_on_fail(self):
self.flags(powervm_img_local_path='/images/')
self.stubs.Set(images, 'fetch', lambda *x, **y: None)
self.stubs.Set(
self.powervm_connection._powervm._disk_adapter,
'create_volume_from_image',
lambda *x, **y: raise_(exception.PowerVMImageCreationFailed()))
self.stubs.Set(
self.powervm_connection._powervm, '_cleanup',
lambda *x, **y: raise_(Exception('This should be logged.')))
fake_net_info = network_model.NetworkInfo([
fake_network_cache_model.new_vif()])
self.assertRaises(exception.PowerVMImageCreationFailed,
self.powervm_connection.spawn,
context.get_admin_context(),
self.instance,
{'id': 'ANY_ID'}, [], 's3cr3t', fake_net_info)
def test_snapshot(self):
def update_task_state(task_state, expected_state=None):
self._loc_task_state = task_state
self._loc_expected_task_state = expected_state
loc_context = context.get_admin_context()
arch = 'fake_arch'
properties = {'instance_id': self.instance['id'],
'user_id': str(loc_context.user_id),
'architecture': arch}
snapshot_name = 'fake_snap'
sent_meta = {'name': snapshot_name, 'is_public': False,
'status': 'creating', 'properties': properties}
image_service = fake.FakeImageService()
recv_meta = image_service.create(loc_context, sent_meta)
self.powervm_connection.snapshot(loc_context,
self.instance, recv_meta['id'],
update_task_state)
self.assertTrue(self._loc_task_state == task_states.IMAGE_UPLOADING and
self._loc_expected_task_state == task_states.IMAGE_PENDING_UPLOAD)
snapshot = image_service.show(context, recv_meta['id'])
self.assertEquals(snapshot['properties']['image_state'], 'available')
self.assertEquals(snapshot['properties']['architecture'], arch)
self.assertEquals(snapshot['status'], 'active')
self.assertEquals(snapshot['name'], snapshot_name)
def _set_get_info_stub(self, state):
def fake_get_instance(instance_name):
return {'state': state,
'max_mem': 512,
'desired_mem': 256,
'max_procs': 2,
'uptime': 2000}
self.stubs.Set(self.powervm_connection._powervm, '_get_instance',
fake_get_instance)
def test_get_info_state_nostate(self):
self._set_get_info_stub('')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.NOSTATE)
def test_get_info_state_running(self):
self._set_get_info_stub('Running')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.RUNNING)
def test_get_info_state_starting(self):
self._set_get_info_stub('Starting')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.RUNNING)
def test_get_info_state_shutdown(self):
self._set_get_info_stub('Not Activated')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.SHUTDOWN)
def test_get_info_state_shutting_down(self):
self._set_get_info_stub('Shutting Down')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.SHUTDOWN)
def test_get_info_state_error(self):
self._set_get_info_stub('Error')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.CRASHED)
def test_get_info_state_not_available(self):
self._set_get_info_stub('Not Available')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.CRASHED)
def test_get_info_state_open_firmware(self):
self._set_get_info_stub('Open Firmware')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.CRASHED)
def test_get_info_state_unmapped(self):
self._set_get_info_stub('The Universe')
info_dict = self.powervm_connection.get_info(self.instance)
self.assertEqual(info_dict['state'], power_state.NOSTATE)
def test_destroy(self):
self.powervm_connection.destroy(self.instance, None)
self.stubs.Set(FakeIVMOperator, 'get_lpar', lambda x, y: None)
name = self.instance['name']
self.assertFalse(self.powervm_connection.instance_exists(name))
def test_get_info(self):
info = self.powervm_connection.get_info(self.instance)
self.assertEqual(info['state'], power_state.RUNNING)
self.assertEqual(info['max_mem'], 2048)
self.assertEqual(info['mem'], 1024)
self.assertEqual(info['num_cpu'], 2)
self.assertEqual(info['cpu_time'], 939395)
def test_remote_utility_1(self):
path_one = '/some/file/'
path_two = '/path/filename'
joined_path = common.aix_path_join(path_one, path_two)
expected_path = '/some/file/path/filename'
self.assertEqual(joined_path, expected_path)
def test_remote_utility_2(self):
path_one = '/some/file/'
path_two = 'path/filename'
joined_path = common.aix_path_join(path_one, path_two)
expected_path = '/some/file/path/filename'
self.assertEqual(joined_path, expected_path)
def test_remote_utility_3(self):
path_one = '/some/file'
path_two = '/path/filename'
joined_path = common.aix_path_join(path_one, path_two)
expected_path = '/some/file/path/filename'
self.assertEqual(joined_path, expected_path)
def test_remote_utility_4(self):
path_one = '/some/file'
path_two = 'path/filename'
joined_path = common.aix_path_join(path_one, path_two)
expected_path = '/some/file/path/filename'
self.assertEqual(joined_path, expected_path)
def _test_finish_revert_migration_after_crash(self, backup_made,
new_made,
power_on):
inst = {'name': 'foo'}
network_info = []
network_info.append({'address': 'fa:89:f0:8b:9b:39'})
self.mox.StubOutWithMock(self.powervm_connection, 'instance_exists')
self.mox.StubOutWithMock(self.powervm_connection._powervm, 'destroy')
self.mox.StubOutWithMock(self.powervm_connection._powervm._operator,
'rename_lpar')
self.mox.StubOutWithMock(self.powervm_connection._powervm, 'power_on')
self.mox.StubOutWithMock(self.powervm_connection._powervm._operator,
'set_lpar_mac_base_value')
self.powervm_connection.instance_exists('rsz_foo').AndReturn(
backup_made)
if backup_made:
self.powervm_connection._powervm._operator.set_lpar_mac_base_value(
'rsz_foo', 'fa:89:f0:8b:9b:39')
self.powervm_connection.instance_exists('foo').AndReturn(new_made)
if new_made:
self.powervm_connection._powervm.destroy('foo')
self.powervm_connection._powervm._operator.rename_lpar('rsz_foo',
'foo')
if power_on:
self.powervm_connection._powervm.power_on('foo')
self.mox.ReplayAll()
self.powervm_connection.finish_revert_migration(inst, network_info,
block_device_info=None,
power_on=power_on)
def test_finish_revert_migration_after_crash(self):
self._test_finish_revert_migration_after_crash(True, True, True)
def test_finish_revert_migration_after_crash_before_new(self):
self._test_finish_revert_migration_after_crash(True, False, True)
def test_finish_revert_migration_after_crash_before_backup(self):
# NOTE(mriedem): tests the power_on=False case also
self._test_finish_revert_migration_after_crash(False, False, False)
def test_migrate_volume_use_instance_name(self):
inst_name = 'instance-00000000'
lv_name = 'logical-vol-name'
src_host = 'compute_host_1'
dest = 'compute_host_1'
image_path = 'some/image/path'
fake_noop = lambda *args, **kwargs: None
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'_copy_device_to_file', fake_noop)
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'run_vios_command_as_root', fake_noop)
blockdev_op = self.powervm_connection._powervm._disk_adapter
file_path = blockdev_op.migrate_volume(lv_name, src_host, dest,
image_path, inst_name)
expected_path = 'some/image/path/instance-00000000_rsz.gz'
self.assertEqual(file_path, expected_path)
def test_migrate_volume_use_lv_name(self):
lv_name = 'logical-vol-name'
src_host = 'compute_host_1'
dest = 'compute_host_1'
image_path = 'some/image/path'
fake_noop = lambda *args, **kwargs: None
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'_copy_device_to_file', fake_noop)
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'run_vios_command_as_root', fake_noop)
blockdev_op = self.powervm_connection._powervm._disk_adapter
file_path = blockdev_op.migrate_volume(lv_name, src_host, dest,
image_path)
expected_path = 'some/image/path/logical-vol-name_rsz.gz'
self.assertEqual(file_path, expected_path)
def _test_deploy_from_migrated_file(self, power_on):
instance = self.instance
context = 'fake_context'
network_info = []
network_info.append({'address': 'fa:89:f0:8b:9b:39'})
dest = '10.8.46.20'
disk_info = {}
disk_info['root_disk_file'] = 'some/file/path.gz'
disk_info['old_lv_size'] = 30
self.flags(powervm_mgr=dest)
fake_op = self.powervm_connection._powervm
self.deploy_from_vios_file_called = False
self.power_on = power_on
def fake_deploy_from_vios_file(lpar, file_path, size,
decompress, power_on):
exp_file_path = 'some/file/path.gz'
exp_size = 40 * 1024 ** 3
exp_decompress = True
self.deploy_from_vios_file_called = True
self.assertEqual(exp_file_path, file_path)
self.assertEqual(exp_size, size)
self.assertEqual(exp_decompress, decompress)
self.assertEqual(self.power_on, power_on)
self.stubs.Set(fake_op, '_deploy_from_vios_file',
fake_deploy_from_vios_file)
self.powervm_connection.finish_migration(context, None,
instance, disk_info, network_info,
None, resize_instance=True,
block_device_info=None,
power_on=power_on)
self.assertEqual(self.deploy_from_vios_file_called, True)
def test_deploy_from_migrated_file_power_on(self):
self._test_deploy_from_migrated_file(True)
def test_deploy_from_migrated_file_power_off(self):
self._test_deploy_from_migrated_file(False)
def test_set_lpar_mac_base_value(self):
instance = self.instance
context = 'fake_context'
dest = '10.8.46.20' # Some fake dest IP
instance_type = 'fake_instance_type'
network_info = []
network_info.append({'address': 'fa:89:f0:8b:9b:39'})
block_device_info = None
self.flags(powervm_mgr=dest)
fake_noop = lambda *args, **kwargs: None
fake_op = self.powervm_connection._powervm._operator
self.stubs.Set(fake_op, 'get_vhost_by_instance_id', fake_noop)
self.stubs.Set(fake_op, 'get_disk_name_by_vhost', fake_noop)
self.stubs.Set(self.powervm_connection._powervm, 'power_off',
fake_noop)
self.stubs.Set(fake_op, 'get_logical_vol_size',
lambda *args, **kwargs: '20')
self.stubs.Set(self.powervm_connection, '_get_resize_name', fake_noop)
self.stubs.Set(fake_op, 'rename_lpar', fake_noop)
def fake_migrate_disk(*args, **kwargs):
disk_info = {}
disk_info['fake_dict'] = 'some/file/path.gz'
return disk_info
def fake_set_lpar_mac_base_value(inst_name, mac, *args, **kwargs):
# get expected mac address from FakeIVM set
fake_ivm = FakeIVMOperator(None)
exp_mac = fake_ivm.macs_for_instance(inst_name).pop()
self.assertEqual(exp_mac, mac)
self.stubs.Set(self.powervm_connection._powervm, 'migrate_disk',
fake_migrate_disk)
self.stubs.Set(fake_op, 'set_lpar_mac_base_value',
fake_set_lpar_mac_base_value)
disk_info = self.powervm_connection.migrate_disk_and_power_off(
context, instance,
dest, instance_type, network_info, block_device_info)
def test_migrate_build_scp_command(self):
lv_name = 'logical-vol-name'
src_host = 'compute_host_1'
dest = 'compute_host_2'
image_path = 'some/image/path'
fake_noop = lambda *args, **kwargs: None
@contextlib.contextmanager
def fake_vios_to_vios_auth(*args, **kwargs):
key_name = 'some_key'
yield key_name
self.stubs.Set(common, 'vios_to_vios_auth',
fake_vios_to_vios_auth)
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'run_vios_command_as_root', fake_noop)
def fake_run_vios_command(*args, **kwargs):
cmd = args[0]
exp_cmd = ' '.join(['scp -o "StrictHostKeyChecking no" -i',
'some_key',
'some/image/path/logical-vol-name_rsz.gz',
'fake_user@compute_host_2:some/image/path'])
self.assertEqual(exp_cmd, cmd)
self.stubs.Set(self.powervm_connection._powervm._disk_adapter,
'run_vios_command',
fake_run_vios_command)
blockdev_op = self.powervm_connection._powervm._disk_adapter
file_path = blockdev_op.migrate_volume(lv_name, src_host, dest,
image_path)
def test_get_resize_name(self):
inst_name = 'instance-00000001'
expected_name = 'rsz_instance-00000001'
result = self.powervm_connection._get_resize_name(inst_name)
self.assertEqual(expected_name, result)
def test_get_long_resize_name(self):
inst_name = 'some_really_long_instance_name_00000001'
expected_name = 'rsz__really_long_instance_name_00000001'
result = self.powervm_connection._get_resize_name(inst_name)
self.assertEqual(expected_name, result)
def test_finish_migration_raises_exception(self):
# Tests that the finish_migration method will raise an exception
# if the 'root_disk_file' key is not found in the disk_info parameter.
self.stubs.Set(self.powervm_connection._powervm,
'_create_lpar_instance', self.fake_create_lpar_instance)
self.assertRaises(exception.PowerVMUnrecognizedRootDevice,
self.powervm_connection.finish_migration,
context.get_admin_context(), None,
self.instance, {'old_lv_size': '20'},
self.fake_network_info, None, True)
self.assertTrue(self.fake_create_lpar_instance_called)
def test_finish_migration_successful(self):
# Tests a successful migration (resize) flow and asserts various
# methods called along the way with expected argument values.
fake_file_path = 'some/file/path.py'
disk_info = {'root_disk_file': fake_file_path,
'old_lv_size': '10'}
fake_flavor = {'root_gb': 20}
fake_extract_flavor = lambda *args, **kwargs: fake_flavor
self.fake_deploy_from_migrated_file_called = False
def fake_deploy_from_migrated_file(lpar, file_path, size,
power_on=True):
self.fake_deploy_from_migrated_file_called = True
# assert the lpar is the one created for this test
self.assertEquals(self.instance['uuid'], lpar['name'])
self.assertEquals(fake_file_path, file_path)
# this tests that the 20GB fake_flavor was used
self.assertEqual(fake_flavor['root_gb'] * pow(1024, 3), size)
self.assertTrue(power_on)
self.stubs.Set(self.powervm_connection._powervm,
'_create_lpar_instance',
self.fake_create_lpar_instance)
self.stubs.Set(flavors, 'extract_flavor', fake_extract_flavor)
self.stubs.Set(self.powervm_connection._powervm,
'deploy_from_migrated_file',
fake_deploy_from_migrated_file)
self.powervm_connection.finish_migration(context.get_admin_context(),
None, self.instance,
disk_info,
self.fake_network_info,
None, True)
self.assertTrue(self.fake_create_lpar_instance_called)
self.assertTrue(self.fake_deploy_from_migrated_file_called)
def test_check_host_resources_insufficient_memory(self):
# Tests that the _check_host_resources method will raise an exception
# when the host has insufficient memory for the request.
host_stats = {'host_memory_free': 512,
'vcpus': 12,
'vcpus_used': 1}
self.assertRaises(exception.PowerVMInsufficientFreeMemory,
self.powervm_connection._powervm._check_host_resources,
self.instance, vcpus=2, mem=4096, host_stats=host_stats)
def test_check_host_resources_insufficient_vcpus(self):
# Tests that the _check_host_resources method will raise an exception
# when the host has insufficient CPU for the request.
host_stats = {'host_memory_free': 4096,
'vcpus': 2,
'vcpus_used': 1}
self.assertRaises(exception.PowerVMInsufficientCPU,
self.powervm_connection._powervm._check_host_resources,
self.instance, vcpus=12, mem=512, host_stats=host_stats)
def test_create_lpar_instance_raise_insufficient_memory(self):
# This test will raise an exception because we use the instance
# created for this test case which requires 1024 MB of memory
# but the host only has 512 free.
host_stats = {'host_memory_free': 512,
'vcpus': 12,
'vcpus_used': 1}
self.assertRaises(exception.PowerVMInsufficientFreeMemory,
self.powervm_connection._powervm._create_lpar_instance,
self.instance, self.fake_network_info, host_stats)
def test_create_lpar_instance_raise_insufficient_vcpus(self):
# This test will raise an exception because we use the instance
# created for this test case which requires 2 CPUs but the host only
# has 1 CPU free.
host_stats = {'host_memory_free': 4096,
'vcpus': 1,
'vcpus_used': 1}
self.assertRaises(exception.PowerVMInsufficientCPU,
self.powervm_connection._powervm._create_lpar_instance,
self.instance, self.fake_network_info, host_stats)
def test_confirm_migration_old_instance_destroyed(self):
# Tests that the source instance is destroyed when a migration
# is confirmed.
resize_name = 'rsz_instance'
self.fake_destroy_called = False
def fake_get_resize_name(instance_name):
self.assertEquals(self.instance['name'], instance_name)
return resize_name
def fake_destroy(instance_name, destroy_disks=True):
self.fake_destroy_called = True
self.assertEquals(resize_name, instance_name)
self.assertTrue(destroy_disks)
self.stubs.Set(self.powervm_connection, '_get_resize_name',
fake_get_resize_name)
self.stubs.Set(self.powervm_connection._powervm, 'destroy',
fake_destroy)
self.powervm_connection.confirm_migration(True, self.instance,
self.fake_network_info)
self.assertTrue(self.fake_destroy_called)
def test_get_host_stats(self):
host_stats = self.powervm_connection.get_host_stats(True)
self.assertIsNotNone(host_stats)
self.assertEquals(host_stats['vcpus'], 8.0)
self.assertEquals(round(host_stats['vcpus_used'], 1), 1.7)
self.assertEquals(host_stats['host_memory_total'], 65536)
self.assertEquals(host_stats['host_memory_free'], 46336)
self.assertEquals(host_stats['disk_total'], 10168)
self.assertEquals(host_stats['disk_used'], 0)
self.assertEquals(host_stats['disk_available'], 10168)
self.assertEquals(host_stats['disk_total'],
host_stats['disk_used'] +
host_stats['disk_available'])
self.assertEquals(host_stats['cpu_info'], ('ppc64', 'powervm', '3940'))
self.assertEquals(host_stats['hypervisor_type'], 'powervm')
self.assertEquals(host_stats['hypervisor_version'], '7.1')
self.assertEquals(host_stats['hypervisor_hostname'], "fake-powervm")
self.assertEquals(host_stats['supported_instances'][0][0], "ppc64")
self.assertEquals(host_stats['supported_instances'][0][1], "powervm")
self.assertEquals(host_stats['supported_instances'][0][2], "hvm")
def test_get_host_uptime(self):
# Tests that the get_host_uptime method issues the proper sysstat
# command and parses the output correctly.
exp_cmd = "ioscli sysstat -short fake_user"
output = [("02:54PM up 24 days, 5:41, 1 user, "
"load average: 0.06, 0.03, 0.02")]
fake_op = self.powervm_connection._powervm
self.mox.StubOutWithMock(fake_op._operator, 'run_vios_command')
fake_op._operator.run_vios_command(exp_cmd).AndReturn(output)
self.mox.ReplayAll()
# the host parameter isn't used so we just pass None
uptime = self.powervm_connection.get_host_uptime(None)
self.assertEquals(output[0], uptime)
class PowerVMDriverLparTestCase(test.TestCase):
"""Unit tests for PowerVM connection calls."""
def setUp(self):
super(PowerVMDriverLparTestCase, self).setUp()
self.stubs.Set(powervm_operator.PowerVMOperator, '_update_host_stats',
lambda self: None)
self.powervm_connection = powervm_driver.PowerVMDriver(None)
def test_set_lpar_mac_base_value_command(self):
inst_name = 'some_instance'
mac = 'FA:98:64:2B:29:39'
exp_mac_str = mac[:-2].replace(':', '')
exp_cmd = ('chsyscfg -r lpar -i "name=%(inst_name)s, '
'virtual_eth_mac_base_value=%(exp_mac_str)s"') % locals()
fake_op = self.powervm_connection._powervm
self.mox.StubOutWithMock(fake_op._operator, 'run_vios_command')
fake_op._operator.run_vios_command(exp_cmd)
self.mox.ReplayAll()
fake_op._operator.set_lpar_mac_base_value(inst_name, mac)
class PowerVMDriverCommonTestCase(test.TestCase):
"""Unit tests for the nova.virt.powervm.common module."""
def setUp(self):
super(PowerVMDriverCommonTestCase, self).setUp()
# our fake connection information never changes since we can't
# actually connect to anything for these tests
self.connection = common.Connection('fake_host', 'user', 'password')
def test_check_connection_ssh_is_none(self):
"""
Passes a null ssh object to the check_connection method.
The method should create a new ssh connection using the
Connection object and return it.
"""
self.stubs.Set(common, 'ssh_connect', fake_ssh_connect)
ssh = common.check_connection(None, self.connection)
self.assertIsNotNone(ssh)
def test_check_connection_transport_is_dead(self):
"""
Passes an ssh object to the check_connection method which
does not have a transport set.
The method should create a new ssh connection using the
Connection object and return it.
"""
self.stubs.Set(common, 'ssh_connect', fake_ssh_connect)
ssh1 = fake_ssh_connect(self.connection)
ssh2 = common.check_connection(ssh1, self.connection)
self.assertIsNotNone(ssh2)
self.assertNotEqual(ssh1, ssh2)
def test_check_connection_raise_ssh_exception(self):
"""
Passes an ssh object to the check_connection method which
does not have a transport set.
The method should raise an SSHException.
"""
self.stubs.Set(common, 'ssh_connect',
lambda *x, **y: raise_(paramiko.SSHException(
'Error connecting to host.')))
ssh = fake_ssh_connect(self.connection)
self.assertRaises(paramiko.SSHException,
common.check_connection,
ssh, self.connection)
def fake_copy_image_file(source_path, remote_path):
return '/tmp/fake_file', 1
class PowerVMLocalVolumeAdapterTestCase(test.TestCase):
"""
Unit tests for nova.virt.powervm.blockdev.PowerVMLocalVolumeAdapter.
"""
def setUp(self):
super(PowerVMLocalVolumeAdapterTestCase, self).setUp()
self.context = context.get_admin_context()
self.connection = common.Connection(host='fake_compute_1',
username='fake_user',
password='fake_pass')
self.powervm_adapter = powervm_blockdev.PowerVMLocalVolumeAdapter(
self.connection)
self.instance = create_instance(self)
self.image_id = self.instance['image_ref']
def test_create_volume_from_image_fails_no_disk_name(self):
"""
Tests that delete_volume is not called after create_logical_volume
fails.
"""
def fake_create_logical_volume(size):
raise exception.PowerVMNoSpaceLeftOnVolumeGroup()
def fake_delete_volume(volume_info):
self.fail("Should not be called to do cleanup.")
self.stubs.Set(self.powervm_adapter, '_copy_image_file',
fake_copy_image_file)
self.stubs.Set(self.powervm_adapter, '_create_logical_volume',
fake_create_logical_volume)
self.stubs.Set(self.powervm_adapter, 'delete_volume',
fake_delete_volume)
self.assertRaises(exception.PowerVMNoSpaceLeftOnVolumeGroup,
self.powervm_adapter.create_volume_from_image,
self.context, self.instance, self.image_id)
def test_create_volume_from_image_fails_with_disk_name(self):
"""
Tests that delete_volume is called to cleanup the volume after
create_logical_volume was successful but copy_file_to_device fails.
"""
disk_name = 'lvm_disk_name'
def fake_create_logical_volume(size):
return disk_name
def fake_copy_file_to_device(source_path, device):
raise exception.PowerVMConnectionFailed()
self.delete_volume_called = False
def fake_delete_volume(volume_info):
self.assertEquals(disk_name, volume_info)
self.delete_volume_called = True
self.stubs.Set(self.powervm_adapter, '_copy_image_file',
fake_copy_image_file)
self.stubs.Set(self.powervm_adapter, '_create_logical_volume',
fake_create_logical_volume)
self.stubs.Set(self.powervm_adapter, '_copy_file_to_device',
fake_copy_file_to_device)
self.stubs.Set(self.powervm_adapter, 'delete_volume',
fake_delete_volume)
self.assertRaises(exception.PowerVMConnectionFailed,
self.powervm_adapter.create_volume_from_image,
self.context, self.instance, self.image_id)
self.assertTrue(self.delete_volume_called)
def test_copy_image_file_ftp_failed(self):
file_path = os.tempnam('/tmp', 'image')
remote_path = '/mnt/openstack/images'
exp_remote_path = os.path.join(remote_path,
os.path.basename(file_path))
exp_cmd = ' '.join(['/usr/bin/rm -f', exp_remote_path])
fake_noop = lambda *args, **kwargs: None
fake_op = self.powervm_adapter
self.stubs.Set(fake_op, 'run_vios_command', fake_noop)
self.stubs.Set(fake_op, '_checksum_local_file', fake_noop)
self.mox.StubOutWithMock(common, 'ftp_put_command')
self.mox.StubOutWithMock(self.powervm_adapter,
'run_vios_command_as_root')
msg_args = {'ftp_cmd': 'PUT',
'source_path': file_path,
'dest_path': remote_path}
exp_exception = exception.PowerVMFTPTransferFailed(**msg_args)
common.ftp_put_command(self.connection, file_path,
remote_path).AndRaise(exp_exception)
self.powervm_adapter.run_vios_command_as_root(exp_cmd).AndReturn([])
self.mox.ReplayAll()
self.assertRaises(exception.PowerVMFTPTransferFailed,
self.powervm_adapter._copy_image_file,
file_path, remote_path)
def test_copy_image_file_wrong_checksum(self):
file_path = os.tempnam('/tmp', 'image')
remote_path = '/mnt/openstack/images'
exp_remote_path = os.path.join(remote_path,
os.path.basename(file_path))
exp_cmd = ' '.join(['/usr/bin/rm -f', exp_remote_path])
def fake_md5sum_remote_file(remote_path):
return '3202937169'
def fake_checksum_local_file(source_path):
return '3229026618'
fake_noop = lambda *args, **kwargs: None
fake_op = self.powervm_adapter
self.stubs.Set(fake_op, 'run_vios_command', fake_noop)
self.stubs.Set(fake_op, '_md5sum_remote_file',
fake_md5sum_remote_file)
self.stubs.Set(fake_op, '_checksum_local_file',
fake_checksum_local_file)
self.stubs.Set(common, 'ftp_put_command', fake_noop)
self.mox.StubOutWithMock(self.powervm_adapter,
'run_vios_command_as_root')
self.powervm_adapter.run_vios_command_as_root(exp_cmd).AndReturn([])
self.mox.ReplayAll()
self.assertRaises(exception.PowerVMFileTransferFailed,
self.powervm_adapter._copy_image_file,
file_path, remote_path)
def test_checksum_local_file(self):
file_path = os.tempnam('/tmp', 'image')
img_file = file(file_path, 'w')
img_file.write('This is a test')
img_file.close()
exp_md5sum = 'ce114e4501d2f4e2dcea3e17b546f339'
self.assertEqual(self.powervm_adapter._checksum_local_file(file_path),
exp_md5sum)
os.remove(file_path)
def test_copy_image_file_from_host_with_wrong_checksum(self):
local_path = 'some/tmp'
remote_path = os.tempnam('/mnt/openstack/images', 'image')
def fake_md5sum_remote_file(remote_path):
return '3202937169'
def fake_checksum_local_file(source_path):
return '3229026618'
fake_noop = lambda *args, **kwargs: None
fake_op = self.powervm_adapter
self.stubs.Set(fake_op, 'run_vios_command_as_root', fake_noop)
self.stubs.Set(fake_op, '_md5sum_remote_file',
fake_md5sum_remote_file)
self.stubs.Set(fake_op, '_checksum_local_file',
fake_checksum_local_file)
self.stubs.Set(common, 'ftp_get_command', fake_noop)
self.assertRaises(exception.PowerVMFileTransferFailed,
self.powervm_adapter._copy_image_file_from_host,
remote_path, local_path)
| apache-2.0 |
nitzmahone/ansible | lib/ansible/modules/remote_management/manageiq/manageiq_provider.py | 53 | 35689 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Daniel Korn <korndaniel1@gmail.com>
# (c) 2017, Yaacov Zamir <yzamir@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
module: manageiq_provider
short_description: Management of provider in ManageIQ.
extends_documentation_fragment: manageiq
version_added: '2.4'
author: Daniel Korn (@dkorn)
description:
- The manageiq_provider module supports adding, updating, and deleting provider in ManageIQ.
options:
state:
description:
- absent - provider should not exist, present - provider should be present, refresh - provider will be refreshed
choices: ['absent', 'present', 'refresh']
default: 'present'
name:
description: The provider's name.
required: true
type:
description: The provider's type.
required: true
choices: ['Openshift', 'Amazon', 'oVirt', 'VMware', 'Azure', 'Director', 'OpenStack', 'GCE']
zone:
description: The ManageIQ zone name that will manage the provider.
default: 'default'
provider_region:
description: The provider region name to connect to (e.g. AWS region for Amazon).
host_default_vnc_port_start:
description: The first port in the host VNC range. defaults to None.
version_added: "2.5"
host_default_vnc_port_end:
description: The last port in the host VNC range. defaults to None.
version_added: "2.5"
subscription:
description: Microsoft Azure subscription ID. defaults to None.
version_added: "2.5"
project:
description: Google Compute Engine Project ID. defaults to None.
version_added: "2.5"
azure_tenant_id:
description: Tenant ID. defaults to None.
version_added: "2.5"
aliases: [ keystone_v3_domain_id ]
tenant_mapping_enabled:
type: bool
default: 'no'
description: Whether to enable mapping of existing tenants. defaults to False.
version_added: "2.5"
api_version:
description: The OpenStack Keystone API version. defaults to None.
choices: ['v2', 'v3']
version_added: "2.5"
provider:
description: Default endpoint connection information, required if state is true.
suboptions:
hostname:
description: The provider's api hostname.
required: true
port:
description: The provider's api port.
userid:
description: Provider's api endpoint authentication userid. defaults to None.
password:
description: Provider's api endpoint authentication password. defaults to None.
auth_key:
description: Provider's api endpoint authentication bearer token. defaults to None.
verify_ssl:
description: Whether SSL certificates should be verified for HTTPS requests (deprecated). defaults to True.
type: bool
default: 'yes'
security_protocol:
description: How SSL certificates should be used for HTTPS requests. defaults to None.
choices: ['ssl-with-validation','ssl-with-validation-custom-ca','ssl-without-validation','non-ssl']
certificate_authority:
description: The CA bundle string with custom certificates. defaults to None.
metrics:
description: Metrics endpoint connection information.
suboptions:
hostname:
description: The provider's api hostname.
required: true
port:
description: The provider's api port.
userid:
description: Provider's api endpoint authentication userid. defaults to None.
password:
description: Provider's api endpoint authentication password. defaults to None.
auth_key:
description: Provider's api endpoint authentication bearer token. defaults to None.
verify_ssl:
description: Whether SSL certificates should be verified for HTTPS requests (deprecated). defaults to True.
type: bool
default: 'yes'
security_protocol:
choices: ['ssl-with-validation','ssl-with-validation-custom-ca','ssl-without-validation','non-ssl']
description: How SSL certificates should be used for HTTPS requests. defaults to None.
certificate_authority:
description: The CA bundle string with custom certificates. defaults to None.
path:
description: Database name for oVirt metrics. Defaults to ovirt_engine_history.
default: ovirt_engine_history
alerts:
description: Alerts endpoint connection information.
suboptions:
hostname:
description: The provider's api hostname.
required: true
port:
description: The provider's api port.
userid:
description: Provider's api endpoint authentication userid. defaults to None.
password:
description: Provider's api endpoint authentication password. defaults to None.
auth_key:
description: Provider's api endpoint authentication bearer token. defaults to None.
verify_ssl:
description: Whether SSL certificates should be verified for HTTPS requests (deprecated). defaults to True.
default: true
security_protocol:
choices: ['ssl-with-validation','ssl-with-validation-custom-ca','ssl-without-validation']
description: How SSL certificates should be used for HTTPS requests. defaults to None.
certificate_authority:
description: The CA bundle string with custom certificates. defaults to None.
ssh_keypair:
description: SSH key pair used for SSH connections to all hosts in this provider.
version_added: "2.5"
suboptions:
hostname:
description: Director hostname.
required: true
userid:
description: SSH username.
auth_key:
description: SSH private key.
'''
EXAMPLES = '''
- name: Create a new provider in ManageIQ ('Hawkular' metrics)
manageiq_provider:
name: 'EngLab'
type: 'OpenShift'
state: 'present'
provider:
auth_key: 'topSecret'
hostname: 'example.com'
port: 8443
verify_ssl: true
security_protocol: 'ssl-with-validation-custom-ca'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
metrics:
auth_key: 'topSecret'
role: 'hawkular'
hostname: 'example.com'
port: 443
verify_ssl: true
security_protocol: 'ssl-with-validation-custom-ca'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
manageiq_connection:
url: 'https://127.0.0.1:80'
username: 'admin'
password: 'password'
verify_ssl: true
- name: Update an existing provider named 'EngLab' (defaults to 'Prometheus' metrics)
manageiq_provider:
name: 'EngLab'
type: 'Openshift'
state: 'present'
provider:
auth_key: 'topSecret'
hostname: 'next.example.com'
port: 8443
verify_ssl: true
security_protocol: 'ssl-with-validation-custom-ca'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
metrics:
auth_key: 'topSecret'
hostname: 'next.example.com'
port: 443
verify_ssl: true
security_protocol: 'ssl-with-validation-custom-ca'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
manageiq_connection:
url: 'https://127.0.0.1'
username: 'admin'
password: 'password'
verify_ssl: true
- name: Delete a provider in ManageIQ
manageiq_provider:
name: 'EngLab'
type: 'Openshift'
state: 'absent'
manageiq_connection:
url: 'https://127.0.0.1'
username: 'admin'
password: 'password'
verify_ssl: true
- name: Create a new Amazon provider in ManageIQ using token authentication
manageiq_provider:
name: 'EngAmazon'
type: 'Amazon'
state: 'present'
provider:
hostname: 'amazon.example.com'
userid: 'hello'
password: 'world'
manageiq_connection:
url: 'https://127.0.0.1'
token: 'VeryLongToken'
verify_ssl: true
- name: Create a new oVirt provider in ManageIQ
manageiq_provider:
name: 'RHEV'
type: 'oVirt'
state: 'present'
provider:
hostname: 'rhev01.example.com'
userid: 'admin@internal'
password: 'password'
verify_ssl: true
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
metrics:
hostname: 'metrics.example.com'
path: 'ovirt_engine_history'
userid: 'user_id_metrics'
password: 'password_metrics'
verify_ssl: true
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
manageiq_connection:
url: 'https://127.0.0.1'
username: 'admin'
password: 'password'
verify_ssl: true
- name: Create a new VMware provider in ManageIQ
manageiq_provider:
name: 'EngVMware'
type: 'VMware'
state: 'present'
provider:
hostname: 'vcenter.example.com'
host_default_vnc_port_start: 5800
host_default_vnc_port_end: 5801
userid: 'root'
password: 'password'
manageiq_connection:
url: 'https://127.0.0.1'
token: 'VeryLongToken'
verify_ssl: true
- name: Create a new Azure provider in ManageIQ
manageiq_provider:
name: 'EngAzure'
type: 'Azure'
provider_region: 'northeurope'
subscription: 'e272bd74-f661-484f-b223-88dd128a4049'
azure_tenant_id: 'e272bd74-f661-484f-b223-88dd128a4048'
state: 'present'
provider:
hostname: 'azure.example.com'
userid: 'e272bd74-f661-484f-b223-88dd128a4049'
password: 'password'
manageiq_connection:
url: 'https://cf-6af0.rhpds.opentlc.com'
username: 'admin'
password: 'password'
verify_ssl: false
- name: Create a new OpenStack Director provider in ManageIQ with rsa keypair
manageiq_provider:
name: 'EngDirector'
type: 'Director'
api_version: 'v3'
state: 'present'
provider:
hostname: 'director.example.com'
userid: 'admin'
password: 'password'
security_protocol: 'ssl-with-validation'
verify_ssl: 'true'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
ssh_keypair:
hostname: director.example.com
userid: heat-admin
auth_key: 'SecretSSHPrivateKey'
- name: Create a new OpenStack provider in ManageIQ with amqp metrics
manageiq_provider:
name: 'EngOpenStack'
type: 'OpenStack'
api_version: 'v3'
state: 'present'
provider_region: 'europe'
tenant_mapping_enabled: 'False'
keystone_v3_domain_id: 'mydomain'
provider:
hostname: 'openstack.example.com'
userid: 'admin'
password: 'password'
security_protocol: 'ssl-with-validation'
verify_ssl: 'true'
certificate_authority: |
-----BEGIN CERTIFICATE-----
FAKECERTsdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu
c2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkwHhcNMTcwODIxMTI1NTE5WhcNMjIwODIw
MTI1NTIwWjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE1MDMzMjAxMTkw
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDUDnL2tQ2xf/zO7F7hmZ4S
ZuwKENdI4IYuWSxye4i3hPhKg6eKPzGzmDNWkIMDOrDAj1EgVSNPtPwsOL8OWvJm
AaTjr070D7ZGWWnrrDrWEClBx9Rx/6JAM38RT8Pu7c1hXBm0J81KufSLLYiZ/gOw
Znks5v5RUSGcAXvLkBJeATbsbh6fKX0RgQ3fFTvqQaE/r8LxcTN1uehPX1g5AaRa
z/SNDHaFtQlE3XcqAAukyMn4N5kdNcuwF3GlQ+tJnJv8SstPkfQcZbTMUQ7I2KpJ
ajXnMxmBhV5fCN4rb0QUNCrk2/B+EUMBY4MnxIakqNxnN1kvgI7FBbFgrHUe6QvJ
AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAYRV57LUsqznSLZHA77o9+0fQetIE115DYP7wea42PODJI
QJ+JETEfoCr0+YOMAbVmznP9GH5cMTKEWHExcIpbMBU7nMZp6A3htcJgF2fgPzOA
aTUtzkuVCSrV//mbbYVxoFOc6sR3Br0wBs5+5iz3dBSt7xmgpMzZvqsQl655i051
gGSTIY3z5EJmBZBjwuTjal9mMoPGA4eoTPqlITJDHQ2bdCV2oDbc7zqupGrUfZFA
qzgieEyGzdCSRwjr1/PibA3bpwHyhD9CGD0PRVVTLhw6h6L5kuN1jA20OfzWxf/o
XUsdmRaWiF+l4s6Dcd56SuRp5SGNa2+vP9Of/FX5
-----END CERTIFICATE-----
metrics:
role: amqp
hostname: 'amqp.example.com'
security_protocol: 'non-ssl'
port: 5666
userid: admin
password: password
- name: Create a new GCE provider in ManageIQ
manageiq_provider:
name: 'EngGoogle'
type: 'GCE'
provider_region: 'europe-west1'
project: 'project1'
state: 'present'
provider:
hostname: 'gce.example.com'
auth_key: 'google_json_key'
verify_ssl: 'false'
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.manageiq import ManageIQ, manageiq_argument_spec
def supported_providers():
return dict(
Openshift=dict(
class_name='ManageIQ::Providers::Openshift::ContainerManager',
authtype='bearer',
default_role='default',
metrics_role='prometheus',
alerts_role='prometheus_alerts',
),
Amazon=dict(
class_name='ManageIQ::Providers::Amazon::CloudManager',
),
oVirt=dict(
class_name='ManageIQ::Providers::Redhat::InfraManager',
default_role='default',
metrics_role='metrics',
),
VMware=dict(
class_name='ManageIQ::Providers::Vmware::InfraManager',
),
Azure=dict(
class_name='ManageIQ::Providers::Azure::CloudManager',
),
Director=dict(
class_name='ManageIQ::Providers::Openstack::InfraManager',
ssh_keypair_role="ssh_keypair"
),
OpenStack=dict(
class_name='ManageIQ::Providers::Openstack::CloudManager',
),
GCE=dict(
class_name='ManageIQ::Providers::Google::CloudManager',
),
)
def endpoint_list_spec():
return dict(
provider=dict(type='dict', options=endpoint_argument_spec()),
metrics=dict(type='dict', options=endpoint_argument_spec()),
alerts=dict(type='dict', options=endpoint_argument_spec()),
ssh_keypair=dict(type='dict', options=endpoint_argument_spec()),
)
def endpoint_argument_spec():
return dict(
role=dict(),
hostname=dict(required=True),
port=dict(type='int'),
verify_ssl=dict(default=True, type='bool'),
certificate_authority=dict(),
security_protocol=dict(
choices=[
'ssl-with-validation',
'ssl-with-validation-custom-ca',
'ssl-without-validation',
'non-ssl',
],
),
userid=dict(),
password=dict(no_log=True),
auth_key=dict(no_log=True),
subscription=dict(no_log=True),
project=dict(),
uid_ems=dict(),
path=dict(),
)
def delete_nulls(h):
""" Remove null entries from a hash
Returns:
a hash without nulls
"""
if isinstance(h, list):
return map(delete_nulls, h)
if isinstance(h, dict):
return dict((k, delete_nulls(v)) for k, v in h.items() if v is not None)
return h
class ManageIQProvider(object):
"""
Object to execute provider management operations in manageiq.
"""
def __init__(self, manageiq):
self.manageiq = manageiq
self.module = self.manageiq.module
self.api_url = self.manageiq.api_url
self.client = self.manageiq.client
def class_name_to_type(self, class_name):
""" Convert class_name to type
Returns:
the type
"""
out = [k for k, v in supported_providers().items() if v['class_name'] == class_name]
if len(out) == 1:
return out[0]
return None
def zone_id(self, name):
""" Search for zone id by zone name.
Returns:
the zone id, or send a module Fail signal if zone not found.
"""
zone = self.manageiq.find_collection_resource_by('zones', name=name)
if not zone: # zone doesn't exist
self.module.fail_json(
msg="zone %s does not exist in manageiq" % (name))
return zone['id']
def provider(self, name):
""" Search for provider object by name.
Returns:
the provider, or None if provider not found.
"""
return self.manageiq.find_collection_resource_by('providers', name=name)
def build_connection_configurations(self, provider_type, endpoints):
""" Build "connection_configurations" objects from
requested endpoints provided by user
Returns:
the user requested provider endpoints list
"""
connection_configurations = []
endpoint_keys = endpoint_list_spec().keys()
provider_defaults = supported_providers().get(provider_type, {})
# get endpoint defaults
endpoint = endpoints.get('provider')
default_auth_key = endpoint.get('auth_key')
# build a connection_configuration object for each endpoint
for endpoint_key in endpoint_keys:
endpoint = endpoints.get(endpoint_key)
if endpoint:
# get role and authtype
role = endpoint.get('role') or provider_defaults.get(endpoint_key + '_role', 'default')
if role == 'default':
authtype = provider_defaults.get('authtype') or role
else:
authtype = role
# set a connection_configuration
connection_configurations.append({
'endpoint': {
'role': role,
'hostname': endpoint.get('hostname'),
'port': endpoint.get('port'),
'verify_ssl': [0, 1][endpoint.get('verify_ssl', True)],
'security_protocol': endpoint.get('security_protocol'),
'certificate_authority': endpoint.get('certificate_authority'),
'path': endpoint.get('path'),
},
'authentication': {
'authtype': authtype,
'userid': endpoint.get('userid'),
'password': endpoint.get('password'),
'auth_key': endpoint.get('auth_key') or default_auth_key,
}
})
return connection_configurations
def delete_provider(self, provider):
""" Deletes a provider from manageiq.
Returns:
a short message describing the operation executed.
"""
try:
url = '%s/providers/%s' % (self.api_url, provider['id'])
result = self.client.post(url, action='delete')
except Exception as e:
self.module.fail_json(msg="failed to delete provider %s: %s" % (provider['name'], str(e)))
return dict(changed=True, msg=result['message'])
def edit_provider(self, provider, name, provider_type, endpoints, zone_id, provider_region,
host_default_vnc_port_start, host_default_vnc_port_end,
subscription, project, uid_ems, tenant_mapping_enabled, api_version):
""" Edit a provider from manageiq.
Returns:
a short message describing the operation executed.
"""
url = '%s/providers/%s' % (self.api_url, provider['id'])
resource = dict(
name=name,
zone={'id': zone_id},
provider_region=provider_region,
connection_configurations=endpoints,
host_default_vnc_port_start=host_default_vnc_port_start,
host_default_vnc_port_end=host_default_vnc_port_end,
subscription=subscription,
project=project,
uid_ems=uid_ems,
tenant_mapping_enabled=tenant_mapping_enabled,
api_version=api_version,
)
# NOTE: we do not check for diff's between requested and current
# provider, we always submit endpoints with password or auth_keys,
# since we can not compare with current password or auth_key,
# every edit request is sent to ManageIQ API without compareing
# it to current state.
# clean nulls, we do not send nulls to the api
resource = delete_nulls(resource)
# try to update provider
try:
result = self.client.post(url, action='edit', resource=resource)
except Exception as e:
self.module.fail_json(msg="failed to update provider %s: %s" % (provider['name'], str(e)))
return dict(
changed=True,
msg="successfully updated the provider %s: %s" % (provider['name'], result))
def create_provider(self, name, provider_type, endpoints, zone_id, provider_region,
host_default_vnc_port_start, host_default_vnc_port_end,
subscription, project, uid_ems, tenant_mapping_enabled, api_version):
""" Creates the provider in manageiq.
Returns:
a short message describing the operation executed.
"""
resource = dict(
name=name,
zone={'id': zone_id},
provider_region=provider_region,
host_default_vnc_port_start=host_default_vnc_port_start,
host_default_vnc_port_end=host_default_vnc_port_end,
subscription=subscription,
project=project,
uid_ems=uid_ems,
tenant_mapping_enabled=tenant_mapping_enabled,
api_version=api_version,
connection_configurations=endpoints,
)
# clean nulls, we do not send nulls to the api
resource = delete_nulls(resource)
# try to create a new provider
try:
url = '%s/providers' % (self.api_url)
result = self.client.post(url, type=supported_providers()[provider_type]['class_name'], **resource)
except Exception as e:
self.module.fail_json(msg="failed to create provider %s: %s" % (name, str(e)))
return dict(
changed=True,
msg="successfully created the provider %s: %s" % (name, result['results']))
def refresh(self, provider, name):
""" Trigger provider refresh.
Returns:
a short message describing the operation executed.
"""
try:
url = '%s/providers/%s' % (self.api_url, provider['id'])
result = self.client.post(url, action='refresh')
except Exception as e:
self.module.fail_json(msg="failed to refresh provider %s: %s" % (name, str(e)))
return dict(
changed=True,
msg="refreshing provider %s" % name)
def main():
zone_id = None
endpoints = []
argument_spec = dict(
state=dict(choices=['absent', 'present', 'refresh'], default='present'),
name=dict(required=True),
zone=dict(default='default'),
provider_region=dict(),
host_default_vnc_port_start=dict(),
host_default_vnc_port_end=dict(),
subscription=dict(),
project=dict(),
azure_tenant_id=dict(aliases=['keystone_v3_domain_id']),
tenant_mapping_enabled=dict(default=False, type='bool'),
api_version=dict(choices=['v2', 'v3']),
type=dict(choices=supported_providers().keys()),
)
# add the manageiq connection arguments to the arguments
argument_spec.update(manageiq_argument_spec())
# add the endpoint arguments to the arguments
argument_spec.update(endpoint_list_spec())
module = AnsibleModule(
argument_spec=argument_spec,
required_if=[
('state', 'present', ['provider']),
('state', 'refresh', ['name'])],
required_together=[
['host_default_vnc_port_start', 'host_default_vnc_port_end']
],
)
name = module.params['name']
zone_name = module.params['zone']
provider_type = module.params['type']
raw_endpoints = module.params
provider_region = module.params['provider_region']
host_default_vnc_port_start = module.params['host_default_vnc_port_start']
host_default_vnc_port_end = module.params['host_default_vnc_port_end']
subscription = module.params['subscription']
uid_ems = module.params['azure_tenant_id']
project = module.params['project']
tenant_mapping_enabled = module.params['tenant_mapping_enabled']
api_version = module.params['api_version']
state = module.params['state']
manageiq = ManageIQ(module)
manageiq_provider = ManageIQProvider(manageiq)
provider = manageiq_provider.provider(name)
# provider should not exist
if state == "absent":
# if we have a provider, delete it
if provider:
res_args = manageiq_provider.delete_provider(provider)
# if we do not have a provider, nothing to do
else:
res_args = dict(
changed=False,
msg="provider %s: does not exist in manageiq" % (name))
# provider should exist
if state == "present":
# get data user did not explicitly give
if zone_name:
zone_id = manageiq_provider.zone_id(zone_name)
# if we do not have a provider_type, use the current provider_type
if provider and not provider_type:
provider_type = manageiq_provider.class_name_to_type(provider['type'])
# check supported_providers types
if not provider_type:
manageiq_provider.module.fail_json(
msg="missing required argument: provider_type")
# check supported_providers types
if provider_type not in supported_providers().keys():
manageiq_provider.module.fail_json(
msg="provider_type %s is not supported" % (provider_type))
# build "connection_configurations" objects from user requsted endpoints
# "provider" is a required endpoint, if we have it, we have endpoints
if raw_endpoints.get("provider"):
endpoints = manageiq_provider.build_connection_configurations(provider_type, raw_endpoints)
# if we have a provider, edit it
if provider:
res_args = manageiq_provider.edit_provider(provider, name, provider_type, endpoints, zone_id, provider_region,
host_default_vnc_port_start, host_default_vnc_port_end,
subscription, project, uid_ems, tenant_mapping_enabled, api_version)
# if we do not have a provider, create it
else:
res_args = manageiq_provider.create_provider(name, provider_type, endpoints, zone_id, provider_region,
host_default_vnc_port_start, host_default_vnc_port_end,
subscription, project, uid_ems, tenant_mapping_enabled, api_version)
# refresh provider (trigger sync)
if state == "refresh":
if provider:
res_args = manageiq_provider.refresh(provider, name)
else:
res_args = dict(
changed=False,
msg="provider %s: does not exist in manageiq" % (name))
module.exit_json(**res_args)
if __name__ == "__main__":
main()
| gpl-3.0 |
google/floq-client | floq/client/client/cirq.py | 1 | 1444 | # Copyright 2021 The Floq Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Floq service cirq client."""
from ..simulators import cirq
from . import abstract
class CirqClient(abstract.AbstractClient): # pylint: disable=too-few-public-methods
"""Creates and initializes Floq client resources.
This class provides CirqSimulator class instance, a cirq based simulator
that simulates quantum circuits on the cloud.
"""
@property
def simulator(self) -> cirq.CirqSimulator:
"""CirqSimulator class instance.
This is the client for the floq service and implements the following
cirq simulation interfaces:
- cirq.sim.simulator.SimulatesSamples
- cirq.sim.simulator.SimulatesExpectationValues
"""
return self._container.simulators.CirqSimulator()
| apache-2.0 |
coldmind/django | tests/template_tests/syntax_tests/test_template_tag.py | 521 | 2594 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class TemplateTagTests(SimpleTestCase):
@setup({'templatetag01': '{% templatetag openblock %}'})
def test_templatetag01(self):
output = self.engine.render_to_string('templatetag01')
self.assertEqual(output, '{%')
@setup({'templatetag02': '{% templatetag closeblock %}'})
def test_templatetag02(self):
output = self.engine.render_to_string('templatetag02')
self.assertEqual(output, '%}')
@setup({'templatetag03': '{% templatetag openvariable %}'})
def test_templatetag03(self):
output = self.engine.render_to_string('templatetag03')
self.assertEqual(output, '{{')
@setup({'templatetag04': '{% templatetag closevariable %}'})
def test_templatetag04(self):
output = self.engine.render_to_string('templatetag04')
self.assertEqual(output, '}}')
@setup({'templatetag05': '{% templatetag %}'})
def test_templatetag05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('templatetag05')
@setup({'templatetag06': '{% templatetag foo %}'})
def test_templatetag06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('templatetag06')
@setup({'templatetag07': '{% templatetag openbrace %}'})
def test_templatetag07(self):
output = self.engine.render_to_string('templatetag07')
self.assertEqual(output, '{')
@setup({'templatetag08': '{% templatetag closebrace %}'})
def test_templatetag08(self):
output = self.engine.render_to_string('templatetag08')
self.assertEqual(output, '}')
@setup({'templatetag09': '{% templatetag openbrace %}{% templatetag openbrace %}'})
def test_templatetag09(self):
output = self.engine.render_to_string('templatetag09')
self.assertEqual(output, '{{')
@setup({'templatetag10': '{% templatetag closebrace %}{% templatetag closebrace %}'})
def test_templatetag10(self):
output = self.engine.render_to_string('templatetag10')
self.assertEqual(output, '}}')
@setup({'templatetag11': '{% templatetag opencomment %}'})
def test_templatetag11(self):
output = self.engine.render_to_string('templatetag11')
self.assertEqual(output, '{#')
@setup({'templatetag12': '{% templatetag closecomment %}'})
def test_templatetag12(self):
output = self.engine.render_to_string('templatetag12')
self.assertEqual(output, '#}')
| bsd-3-clause |
spisneha25/django | tests/gis_tests/maps/tests.py | 322 | 2099 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import skipUnless
from django.contrib.gis.geos import HAS_GEOS
from django.test import SimpleTestCase
from django.test.utils import modify_settings, override_settings
from django.utils.encoding import force_text
GOOGLE_MAPS_API_KEY = 'XXXX'
@skipUnless(HAS_GEOS, 'Geos is required.')
@modify_settings(
INSTALLED_APPS={'append': 'django.contrib.gis'},
)
class GoogleMapsTest(SimpleTestCase):
@override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)
def test_google_map_scripts(self):
"""
Testing GoogleMap.scripts() output. See #20773.
"""
from django.contrib.gis.maps.google.gmap import GoogleMap
google_map = GoogleMap()
scripts = google_map.scripts
self.assertIn(GOOGLE_MAPS_API_KEY, scripts)
self.assertIn("new GMap2", scripts)
@override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)
def test_unicode_in_google_maps(self):
"""
Test that GoogleMap doesn't crash with non-ASCII content.
"""
from django.contrib.gis.geos import Point
from django.contrib.gis.maps.google.gmap import GoogleMap, GMarker
center = Point(6.146805, 46.227574)
marker = GMarker(center,
title='En français !')
google_map = GoogleMap(center=center, zoom=18, markers=[marker])
self.assertIn("En français", google_map.scripts)
def test_gevent_html_safe(self):
from django.contrib.gis.maps.google.overlays import GEvent
event = GEvent('click', 'function() {location.href = "http://www.google.com"}')
self.assertTrue(hasattr(GEvent, '__html__'))
self.assertEqual(force_text(event), event.__html__())
def test_goverlay_html_safe(self):
from django.contrib.gis.maps.google.overlays import GOverlayBase
overlay = GOverlayBase()
overlay.js_params = '"foo", "bar"'
self.assertTrue(hasattr(GOverlayBase, '__html__'))
self.assertEqual(force_text(overlay), overlay.__html__())
| bsd-3-clause |
alexmingoia/nixops | nixops/resources/gce_image.py | 5 | 3204 | # -*- coding: utf-8 -*-
# Automatic provisioning of GCE Images.
import os
import libcloud.common.google
from nixops.util import attr_property
from nixops.gce_common import ResourceDefinition, ResourceState
class GCEImageDefinition(ResourceDefinition):
"""Definition of a GCE Image"""
@classmethod
def get_type(cls):
return "gce-image"
def __init__(self, xml):
ResourceDefinition.__init__(self, xml)
self.image_name = self.get_option_value(xml, 'name', str)
self.copy_option(xml, 'sourceUri', str)
self.copy_option(xml, 'description', str, optional = True)
def show_type(self):
return self.get_type()
class GCEImageState(ResourceState):
"""State of a GCE Image"""
image_name = attr_property("gce.name", None)
source_uri = attr_property("gce.sourceUri", None)
description = attr_property("gce.description", None)
@classmethod
def get_type(cls):
return "gce-image"
def __init__(self, depl, name, id):
ResourceState.__init__(self, depl, name, id)
def show_type(self):
return super(GCEImageState, self).show_type()
@property
def resource_id(self):
return self.image_name
nix_name = "gceImages"
@property
def full_name(self):
return "GCE image '{0}'".format(self.image_name)
def image(self):
img = self.connect().ex_get_image(self.image_name)
if img:
img.destroy = img.delete
return img
defn_properties = [ 'description', 'source_uri' ]
def create(self, defn, check, allow_reboot, allow_recreate):
self.no_property_change(defn, 'source_uri')
self.no_property_change(defn, 'description')
self.no_project_change(defn)
self.copy_credentials(defn)
self.image_name = defn.image_name
if check:
image = self.image()
if image:
if self.state == self.UP:
self.handle_changed_property('description', image.extra['description'], can_fix = False)
else:
self.warn_not_supposed_to_exist(valuable_data = True)
self.confirm_destroy(image, self.full_name)
else:
self.warn_missing_resource()
if self.state != self.UP:
self.log("creating {0}...".format(self.full_name))
try:
image = self.connect().ex_copy_image(defn.image_name, defn.source_uri,
description = defn.description)
except libcloud.common.google.ResourceExistsError:
raise Exception("tried creating an image that already exists; "
"please run 'deploy --check' to fix this")
self.state = self.UP
self.copy_properties(defn)
def destroy(self, wipe=False):
if self.state == self.UP:
image = self.image()
if image:
return self.confirm_destroy(image, self.full_name, abort = False)
else:
self.warn("tried to destroy {0} which didn't exist".format(self.full_name))
return True
| lgpl-3.0 |
SergioGonzalezSanz/conformal_predictors | tests/nc_measures/SVMTest.py | 1 | 1492 | import unittest
from conformal_predictors.nc_measures.SVM import SVCDistanceNCMeasure
from sklearn.svm import SVC
from numpy import array
class SVMTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_1(self):
x = array([[1, 1], [2, 2]])
y = array([0, 1])
measure = SVCDistanceNCMeasure()
clf = SVC(decision_function_shape='ovr')
clf.fit(x, y)
measures = measure.evaluate(clf, x)
self.assertAlmostEqual(measures[0, 0], -.63212056)
self.assertAlmostEqual(measures[0, 1], .63212056)
self.assertAlmostEqual(measures[1, 0], .63212056)
self.assertAlmostEqual(measures[1, 1], -.63212056)
def tests_2(self):
x = array([[1, 1], [2, 2], [3, 3]])
y = array([0, 1, 2])
measure = SVCDistanceNCMeasure()
clf = SVC(decision_function_shape='ovr')
clf.fit(x, y)
measures = measure.evaluate(clf, x)
self.assertAlmostEqual(measures[0, 0], -1.5)
self.assertAlmostEqual(measures[0, 1], 1.08754365)
self.assertAlmostEqual(measures[0, 2], .41245635)
self.assertAlmostEqual(measures[1, 0], 1.19584788)
self.assertAlmostEqual(measures[1, 1], -1.60830423)
self.assertAlmostEqual(measures[1, 2], .19584788)
self.assertAlmostEqual(measures[2, 0], .41245635)
self.assertAlmostEqual(measures[2, 1], 1.08754365)
self.assertAlmostEqual(measures[2, 2], -1.5)
| mit |
snnn/tensorflow | tensorflow/python/ops/gradient_checker.py | 20 | 15045 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradient checker for any ops, graphs.
The gradient checker verifies numerically that an op/graph properly
computes the gradients
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
def _product(t):
if isinstance(t, int):
return t
else:
y = 1
for x in t:
y *= x
return y
def _extra_feeds(extra_feed_dict, new_feeds):
if not extra_feed_dict:
return new_feeds
r = {}
r.update(extra_feed_dict)
r.update(new_feeds)
return r
def _compute_theoretical_jacobian(x, x_shape, x_data, dy, dy_shape, dx,
extra_feed_dict):
"""Computes the theoretical Jacobian for dy/dx.
Computes the theoretical Jacobian using the ops generated by
compute_gradient().
Args:
x: the tensor "x".
x_shape: the dimensions of x as a tuple or an array of ints.
x_data: a numpy parray as the input data for x
dy: the tensor "dy".
dy_shape: the dimensions of dy as a tuple or an array of ints.
dx: Tensor or IndexedSlices representing dx
extra_feed_dict: dict that allows fixing specified tensor values
during the jacobian calculation.
Returns:
A 2-d numpy array representing the Jacobian for dy/dx. It has "x_size" rows
and "dy_size" columns where "x_size" is the number of elements in x and
"dy_size" is the number of elements in dy.
Raises:
ValueError: If `dy` is empty but the gradient is nonzero.
"""
# Complex vectors are treated as vectors of twice as many reals.
if x.dtype.is_complex:
x_shape = tuple(x_shape) + (2,)
dy_factor = 2 if dy.dtype.is_complex else 1
# To compute the jacobian, we treat x and y as one-dimensional vectors.
x_size = _product(x_shape)
x_val_size = _product(x_shape[1:]) # This is used for sparse gradients
dy_size = _product(dy_shape) * dy_factor
# Allocate 2-D Jacobian, with x dimensions smashed into the first
# dimension and y dimensions smashed into the second.
jacobian = np.zeros((x_size, dy_size),
dtype=x.dtype.real_dtype.as_numpy_dtype)
# For each of the entry of dy, we set this to be 1 and
# everything else to be 0 and compute the backprop -- this will give us one
# one column of the Jacobian matrix.
dy_data = np.zeros(dy_shape, dtype=dy.dtype.as_numpy_dtype)
dy_data_flat = dy_data.ravel().view(dy.dtype.real_dtype.as_numpy_dtype)
sess = ops.get_default_session()
for col in range(dy_size):
dy_data_flat[col] = 1
if isinstance(dx, ops.IndexedSlices):
backprop_indices, backprop_values = sess.run(
[dx.indices, dx.values],
feed_dict=_extra_feeds(extra_feed_dict, {x: x_data, dy: dy_data}))
for i, v in zip(backprop_indices, backprop_values):
r_begin = i * x_val_size
r_end = r_begin + x_val_size
jacobian[r_begin:r_end, col] += v.flat
else:
assert isinstance(dx, ops.Tensor), "dx = " + str(dx)
backprop = sess.run(
dx, feed_dict=_extra_feeds(extra_feed_dict, {x: x_data, dy: dy_data}))
jacobian[:, col] = backprop.ravel().view(jacobian.dtype)
dy_data_flat[col] = 0
# If the output is empty, run the gradients at least once and make sure
# they produce zeros.
if not dy_size:
backprop = sess.run(
dx, feed_dict=_extra_feeds(extra_feed_dict, {x: x_data, dy: dy_data}))
if backprop.shape != x_data.shape:
raise ValueError("Empty gradient has wrong shape: expected %s, got %s" %
(x_data.shape, backprop.shape))
if np.any(backprop):
raise ValueError("Empty tensor with nonzero gradients")
logging.vlog(1, "Theoretical Jacobian =\n%s", jacobian)
return jacobian
def _compute_numeric_jacobian(x, x_shape, x_data, y, y_shape, delta,
extra_feed_dict):
"""Computes the numeric Jacobian for dy/dx.
Computes the numeric Jacobian by slightly perturbing the inputs and
measuring the differences on the output.
Args:
x: the tensor "x".
x_shape: the dimensions of x as a tuple or an array of ints.
x_data: a numpy array as the input data for x
y: the tensor "y".
y_shape: the dimensions of y as a tuple or an array of ints.
delta: the amount of perturbation we give to the input
extra_feed_dict: dict that allows fixing specified tensor values
during the jacobian calculation.
Returns:
A 2-d numpy array representing the Jacobian for dy/dx. It has "x_size" rows
and "y_size" columns where "x_size" is the number of elements in x and
"y_size" is the number of elements in y.
"""
# bfloat16 doesn't have enough bits to represent high precision numbers such
# as delta. Convert to float32 here. Since numeric_jacobian is expected to
# be the groundtruth to compare against, it shouldn't lose any information.
if x.dtype == dtypes.bfloat16:
x = math_ops.cast(x, dtypes.float32)
if y.dtype == dtypes.bfloat16:
y = math_ops.cast(y, dtypes.float32)
if x_data.dtype == dtypes.bfloat16.as_numpy_dtype:
x_data = x_data.astype(np.float32)
# To compute the jacobian, we treat x and y as one-dimensional vectors
x_size = _product(x_shape) * (2 if x.dtype.is_complex else 1)
y_size = _product(y_shape) * (2 if y.dtype.is_complex else 1)
x_dtype = x.dtype.real_dtype.as_numpy_dtype
y_dtype = y.dtype.real_dtype.as_numpy_dtype
# Make sure we have the right types
x_data = np.asarray(x_data, dtype=x.dtype.as_numpy_dtype)
scale = np.asarray(2 * delta, dtype=y_dtype)[()]
jacobian = np.zeros((x_size, y_size), dtype=x_dtype)
# For each of the entry of x, we slightly perturbs this by adding and
# subtracting a delta and then compute difference between the outputs. This
# will give us one row of the Jacobian matrix.
for row in range(x_size):
x_pos = x_data.copy()
x_neg = x_data.copy()
x_pos.ravel().view(x_dtype)[row] += delta
y_pos = y.eval(feed_dict=_extra_feeds(extra_feed_dict, {x: x_pos}))
x_neg.ravel().view(x_dtype)[row] -= delta
y_neg = y.eval(feed_dict=_extra_feeds(extra_feed_dict, {x: x_neg}))
diff = (y_pos - y_neg) / scale
jacobian[row, :] = diff.ravel().view(y_dtype)
logging.vlog(1, "Numeric Jacobian =\n%s", jacobian)
return jacobian
def _compute_dx_and_dy(x, y, y_shape):
"""Returns a node to compute gradient of y wrt x."""
# We make up a dy so that we can compute the gradients. We don't really use
# the value of dy -- we will always feed it. We need to add an identity node
# so that we can always feed it properly. Otherwise, for the Add operation,
# dx is the same as dy and we cannot fetch the tensor that we are feeding.
with x.graph.as_default():
dy_orig = constant_op.constant(1.0, shape=y_shape, dtype=y.dtype)
dy = array_ops.identity(dy_orig)
# We compute the gradients for y wrt. x
grads = gradients.gradients(y, x, dy)
assert len(grads) == 1
return grads[0], dy_orig
def _compute_gradient(x,
x_shape,
dx,
y,
y_shape,
dy,
x_init_value=None,
delta=1e-3,
extra_feed_dict=None):
"""Computes the theoretical and numerical jacobian."""
t = dtypes.as_dtype(x.dtype)
allowed_types = [dtypes.float16, dtypes.bfloat16, dtypes.float32,
dtypes.float64, dtypes.complex64, dtypes.complex128]
assert t.base_dtype in allowed_types, "Don't support type %s for x" % t.name
t2 = dtypes.as_dtype(y.dtype)
assert t2.base_dtype in allowed_types, "Don't support type %s for y" % t2.name
if x_init_value is not None:
i_shape = list(x_init_value.shape)
assert(list(x_shape) == i_shape), "x_shape = %s, init_data shape = %s" % (
x_shape, i_shape)
x_data = x_init_value
else:
x_data = np.random.random_sample(x_shape).astype(t.as_numpy_dtype)
if t.is_complex:
x_data.imag = np.random.random_sample(x_shape)
jacob_t = _compute_theoretical_jacobian(
x, x_shape, x_data, dy, y_shape, dx, extra_feed_dict=extra_feed_dict)
jacob_n = _compute_numeric_jacobian(
x, x_shape, x_data, y, y_shape, delta, extra_feed_dict=extra_feed_dict)
return jacob_t, jacob_n
def _compute_gradient_list(x,
x_shape,
y,
y_shape,
x_init_value=None,
delta=1e-3,
init_targets=None,
extra_feed_dict=None):
"""Compute gradients for a list of x values."""
assert isinstance(x, list)
dx, dy = zip(*[_compute_dx_and_dy(xi, y, y_shape) for xi in x])
if init_targets is not None:
assert isinstance(init_targets, (list, tuple))
for init in init_targets:
init.run()
if x_init_value is None:
x_init_value = [None] * len(x)
ret = [_compute_gradient(xi, x_shapei, dxi, y, y_shape, dyi, x_init_valuei,
delta, extra_feed_dict=extra_feed_dict)
for xi, x_shapei, dxi, dyi, x_init_valuei in zip(x, x_shape, dx, dy,
x_init_value)]
return ret
@tf_export("test.compute_gradient")
def compute_gradient(x,
x_shape,
y,
y_shape,
x_init_value=None,
delta=1e-3,
init_targets=None,
extra_feed_dict=None):
"""Computes and returns the theoretical and numerical Jacobian.
If `x` or `y` is complex, the Jacobian will still be real but the
corresponding Jacobian dimension(s) will be twice as large. This is required
even if both input and output is complex since TensorFlow graphs are not
necessarily holomorphic, and may have gradients not expressible as complex
numbers. For example, if `x` is complex with shape `[m]` and `y` is complex
with shape `[n]`, each Jacobian `J` will have shape `[m * 2, n * 2]` with
J[::2, ::2] = d(Re y)/d(Re x)
J[::2, 1::2] = d(Im y)/d(Re x)
J[1::2, ::2] = d(Re y)/d(Im x)
J[1::2, 1::2] = d(Im y)/d(Im x)
Args:
x: a tensor or list of tensors
x_shape: the dimensions of x as a tuple or an array of ints. If x is a list,
then this is the list of shapes.
y: a tensor
y_shape: the dimensions of y as a tuple or an array of ints.
x_init_value: (optional) a numpy array of the same shape as "x"
representing the initial value of x. If x is a list, this should be a list
of numpy arrays. If this is none, the function will pick a random tensor
as the initial value.
delta: (optional) the amount of perturbation.
init_targets: list of targets to run to initialize model params.
TODO(mrry): remove this argument.
extra_feed_dict: dict that allows fixing specified tensor values
during the Jacobian calculation.
Returns:
Two 2-d numpy arrays representing the theoretical and numerical
Jacobian for dy/dx. Each has "x_size" rows and "y_size" columns
where "x_size" is the number of elements in x and "y_size" is the
number of elements in y. If x is a list, returns a list of two numpy arrays.
"""
if extra_feed_dict is None:
extra_feed_dict = {}
if isinstance(x, list):
return _compute_gradient_list(x, x_shape, y, y_shape, x_init_value, delta,
init_targets, extra_feed_dict=extra_feed_dict)
else:
if init_targets is not None:
assert isinstance(init_targets, (list, tuple))
for init in init_targets:
init.run()
dx, dy = _compute_dx_and_dy(x, y, y_shape)
ret = _compute_gradient(x, x_shape, dx, y, y_shape, dy, x_init_value, delta,
extra_feed_dict=extra_feed_dict)
return ret
@tf_export("test.compute_gradient_error")
def compute_gradient_error(x,
x_shape,
y,
y_shape,
x_init_value=None,
delta=1e-3,
init_targets=None,
extra_feed_dict=None):
"""Computes the gradient error.
Computes the maximum error for dy/dx between the computed Jacobian and the
numerically estimated Jacobian.
This function will modify the tensors passed in as it adds more operations
and hence changing the consumers of the operations of the input tensors.
This function adds operations to the current session. To compute the error
using a particular device, such as a GPU, use the standard methods for
setting a device (e.g. using with sess.graph.device() or setting a device
function in the session constructor).
Args:
x: a tensor or list of tensors
x_shape: the dimensions of x as a tuple or an array of ints. If x is a list,
then this is the list of shapes.
y: a tensor
y_shape: the dimensions of y as a tuple or an array of ints.
x_init_value: (optional) a numpy array of the same shape as "x"
representing the initial value of x. If x is a list, this should be a list
of numpy arrays. If this is none, the function will pick a random tensor
as the initial value.
delta: (optional) the amount of perturbation.
init_targets: list of targets to run to initialize model params.
extra_feed_dict: dict that allows fixing specified tensor values
during the Jacobian calculation.
Returns:
The maximum error in between the two Jacobians.
"""
grad = compute_gradient(x, x_shape, y, y_shape, x_init_value, delta,
init_targets, extra_feed_dict=extra_feed_dict)
if isinstance(grad, tuple):
grad = [grad]
error = 0
for j_t, j_n in grad:
if j_t.size or j_n.size: # Handle zero size tensors correctly
error = np.maximum(error, np.fabs(j_t - j_n).max())
return error
| apache-2.0 |
borjam/exabgp | src/exabgp/configuration/process/parser.py | 3 | 2367 | # encoding: utf-8
"""
parse_process.py
Created by Thomas Mangin on 2015-06-18.
Copyright (c) 2009-2017 Exa Networks. All rights reserved.
License: 3-clause BSD. (See the COPYRIGHT file)
"""
import os
import stat
def encoder(tokeniser):
value = tokeniser()
if value not in ('text', 'json'):
raise ValueError('"%s" is an invalid option' % value)
return value
def _make_path(prg):
parts = prg.split('/')
env = os.environ.get('EXABGP_ETC', '')
if env:
options = [os.path.join(env.rstrip('/'), os.path.join(*parts[2:])), '/etc/exabgp']
else:
options = []
options.append('/etc/exabgp')
pwd = os.environ.get('PWD', '').split('/')
if pwd:
# without abspath the path is not / prefixed !
if pwd[-1] in ('etc', 'sbin'):
options.append(os.path.abspath(os.path.join(os.path.join(*pwd[:-1]), os.path.join(*parts))))
if 'etc' not in pwd:
options.append(os.path.abspath(os.path.join(os.path.join(*pwd), os.path.join(*parts))))
return options
def run(tokeniser):
prg = tokeniser()
if prg[0] != '/':
if prg.startswith('etc/exabgp'):
options = _make_path(prg)
else:
options = [
os.path.abspath(os.path.join('/etc/exabgp', prg)),
os.path.abspath(os.path.join(os.path.dirname(tokeniser.fname), prg)),
]
options.extend((os.path.abspath(os.path.join(p, prg)) for p in os.getenv('PATH').split(':')))
for option in options:
if os.path.exists(option):
prg = option
if not os.path.exists(prg):
raise ValueError('can not locate the the program "%s"' % prg)
# race conditions are possible, those are sanity checks not security ones ...
s = os.stat(prg)
if stat.S_ISDIR(s.st_mode):
raise ValueError('can not execute directories "%s"' % prg)
if s.st_mode & stat.S_ISUID:
raise ValueError('refusing to run setuid programs "%s"' % prg)
check = stat.S_IXOTH
if s.st_uid == os.getuid():
check |= stat.S_IXUSR
if s.st_gid == os.getgid():
check |= stat.S_IXGRP
if not check & s.st_mode:
raise ValueError('exabgp will not be able to run this program "%s"' % prg)
return [prg] + [_ for _ in tokeniser.generator]
| bsd-3-clause |
jessrosenfield/pants | src/python/pants/base/workunit.py | 3 | 8719 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
import time
import uuid
from collections import namedtuple
from six.moves import range
from pants.rwbuf.read_write_buffer import FileBackedRWBuf
from pants.util.dirutil import safe_mkdir_for
from pants.util.memo import memoized_method
class WorkUnitLabel(object):
# Labels describing a workunit. Reporting code can use this to decide how to display
# information about this workunit.
#
# Note that a workunit can have multiple labels where this makes sense, e.g., TOOL, COMPILER
# and NAILGUN.
SETUP = 'SETUP' # Parsing build files etc.
GOAL = 'GOAL' # Executing a goal.
TASK = 'TASK' # Executing a task within a goal.
GROUP = 'GROUP' # Executing a group.
BOOTSTRAP = 'BOOTSTRAP' # Invocation of code to fetch a tool.
TOOL = 'TOOL' # Single invocations of a tool.
MULTITOOL = 'MULTITOOL' # Multiple consecutive invocations of the same tool.
COMPILER = 'COMPILER' # Invocation of a compiler.
TEST = 'TEST' # Running a test.
JVM = 'JVM' # Running a tool via the JVM.
NAILGUN = 'NAILGUN' # Running a tool via nailgun.
RUN = 'RUN' # Running a binary.
REPL = 'REPL' # Running a repl.
PREP = 'PREP' # Running a prep command
@classmethod
@memoized_method
def keys(cls):
return [key for key in dir(cls) if not key.startswith('_') and key.isupper()]
class WorkUnit(object):
"""A hierarchical unit of work, for the purpose of timing and reporting.
A WorkUnit can be subdivided into further WorkUnits. The WorkUnit concept is deliberately
decoupled from the goal/task hierarchy. This allows some flexibility in having, say,
sub-units inside a task. E.g., there might be one WorkUnit representing an entire pants run,
and that can be subdivided into WorkUnits for each goal. Each of those can be subdivided into
WorkUnits for each task, and a task can subdivide that into further work units, if finer-grained
timing and reporting is needed.
"""
# The outcome of a workunit.
# It can only be set to a new value <= the old one.
ABORTED = 0
FAILURE = 1
WARNING = 2
SUCCESS = 3
UNKNOWN = 4
# Generic workunit log config.
# log_level: Display log messages up to this level.
# color: log color settings.
LogConfig = namedtuple('LogConfig', ['level', 'colors'])
@staticmethod
def outcome_string(outcome):
"""Returns a human-readable string describing the outcome."""
return ['ABORTED', 'FAILURE', 'WARNING', 'SUCCESS', 'UNKNOWN'][outcome]
def __init__(self, run_info_dir, parent, name, labels=None, cmd='', log_config=None):
"""
- run_info_dir: The path of the run_info_dir from the RunTracker that tracks this WorkUnit.
- parent: The containing workunit, if any. E.g., 'compile' might contain 'java', 'scala' etc.,
'scala' might contain 'compile', 'split' etc.
- name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'.
- labels: An optional iterable of labels. The reporters can use this to decide how to
display information about this work.
- cmd: An optional longer string representing this work.
E.g., the cmd line of a compiler invocation.
- log_config: An optional tuple of registered options affecting reporting output.
"""
self._outcome = WorkUnit.UNKNOWN
self.run_info_dir = run_info_dir
self.parent = parent
self.children = []
self.name = name
self.labels = set(labels or ())
self.cmd = cmd
self.id = uuid.uuid4()
self.log_config = log_config
# In seconds since the epoch. Doubles, to account for fractional seconds.
self.start_time = 0
self.end_time = 0
# A workunit may have multiple outputs, which we identify by a name.
# E.g., a tool invocation may have 'stdout', 'stderr', 'debug_log' etc.
self._outputs = {} # name -> output buffer.
self._output_paths = {}
# Do this last, as the parent's _self_time() might get called before we're
# done initializing ourselves.
# TODO: Ensure that a parent can't be ended before all its children are.
if self.parent:
if not log_config:
self.log_config = self.parent.log_config
self.parent.children.append(self)
def has_label(self, label):
return label in self.labels
def start(self):
"""Mark the time at which this workunit started."""
self.start_time = time.time()
def end(self):
"""Mark the time at which this workunit ended."""
self.end_time = time.time()
for output in self._outputs.values():
output.close()
return self.path(), self.duration(), self._self_time(), self.has_label(WorkUnitLabel.TOOL)
def outcome(self):
"""Returns the outcome of this workunit."""
return self._outcome
def set_outcome(self, outcome):
"""Set the outcome of this work unit.
We can set the outcome on a work unit directly, but that outcome will also be affected by
those of its subunits. The right thing happens: The outcome of a work unit is the
worst outcome of any of its subunits and any outcome set on it directly."""
if outcome not in range(0, 5):
raise Exception('Invalid outcome: {}'.format(outcome))
if outcome < self._outcome:
self._outcome = outcome
if self.parent: self.parent.set_outcome(self._outcome)
_valid_name_re = re.compile(r'\w+')
def output(self, name):
"""Returns the output buffer for the specified output name (e.g., 'stdout'), creating it if necessary."""
m = WorkUnit._valid_name_re.match(name)
if not m or m.group(0) != name:
raise Exception('Invalid output name: {}'.format(name))
if name not in self._outputs:
workunit_name = re.sub(r'\W', '_', self.name)
path = os.path.join(self.run_info_dir,
'tool_outputs', '{workunit_name}-{id}.{output_name}'
.format(workunit_name=workunit_name,
id=self.id,
output_name=name))
safe_mkdir_for(path)
self._outputs[name] = FileBackedRWBuf(path)
self._output_paths[name] = path
return self._outputs[name]
def outputs(self):
"""Returns the map of output name -> output buffer."""
return self._outputs
def output_paths(self):
"""Returns the map of output name -> path of the output file."""
return self._output_paths
def duration(self):
"""Returns the time (in fractional seconds) spent in this workunit and its children."""
return (self.end_time or time.time()) - self.start_time
@property
def start_time_string(self):
"""A convenient string representation of start_time."""
return time.strftime('%H:%M:%S', time.localtime(self.start_time))
@property
def start_delta_string(self):
"""A convenient string representation of how long after the run started we started."""
delta = int(self.start_time) - int(self.root().start_time)
return '{:02}:{:02}'.format(int(delta / 60), delta % 60)
def root(self):
ret = self
while ret.parent is not None:
ret = ret.parent
return ret
def ancestors(self):
"""Returns a list consisting of this workunit and those enclosing it, up to the root."""
ret = []
workunit = self
while workunit is not None:
ret.append(workunit)
workunit = workunit.parent
return ret
def path(self):
"""Returns a path string for this workunit, E.g., 'all:compile:jvm:scalac'."""
return ':'.join(reversed([w.name for w in self.ancestors()]))
def unaccounted_time(self):
"""Returns non-leaf time spent in this workunit.
This assumes that all major work should be done in leaves.
TODO: Is this assumption valid?
"""
return 0 if len(self.children) == 0 else self._self_time()
def to_dict(self):
"""Useful for providing arguments to templates."""
ret = {}
for key in ['name', 'cmd', 'id', 'start_time', 'end_time',
'outcome', 'start_time_string', 'start_delta_string']:
val = getattr(self, key)
ret[key] = val() if hasattr(val, '__call__') else val
ret['parent'] = self.parent.to_dict() if self.parent else None
return ret
def _self_time(self):
"""Returns the time spent in this workunit outside of any children."""
return self.duration() - sum([child.duration() for child in self.children])
| apache-2.0 |
bonitadecker77/python-for-android | python3-alpha/python3-src/Lib/test/test_hmac.py | 57 | 12958 | import hmac
import hashlib
import unittest
import warnings
from test import support
class TestVectorsTestCase(unittest.TestCase):
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
def md5test(key, data, digest):
h = hmac.HMAC(key, data)
self.assertEqual(h.hexdigest().upper(), digest.upper())
md5test(b"\x0b" * 16,
b"Hi There",
"9294727A3638BB1C13F48EF8158BFC9D")
md5test(b"Jefe",
b"what do ya want for nothing?",
"750c783e6ab0b503eaa86e310a5db738")
md5test(b"\xaa" * 16,
b"\xdd" * 50,
"56be34521d144c88dbb8c733f0e8b3f6")
md5test(bytes(range(1, 26)),
b"\xcd" * 50,
"697eaf0aca3a3aea3a75164746ffaa79")
md5test(b"\x0C" * 16,
b"Test With Truncation",
"56461ef2342edc00f9bab995690efd4c")
md5test(b"\xaa" * 80,
b"Test Using Larger Than Block-Size Key - Hash Key First",
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
md5test(b"\xaa" * 80,
(b"Test Using Larger Than Block-Size Key "
b"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=hashlib.sha1)
self.assertEqual(h.hexdigest().upper(), digest.upper())
shatest(b"\x0b" * 20,
b"Hi There",
"b617318655057264e28bc0b6fb378c8ef146be00")
shatest(b"Jefe",
b"what do ya want for nothing?",
"effcdf6ae5eb2fa2d27416d5f184df9c259a7c79")
shatest(b"\xAA" * 20,
b"\xDD" * 50,
"125d7342b9ac11cd91a39af48aa17b4f63f175d3")
shatest(bytes(range(1, 26)),
b"\xCD" * 50,
"4c9007f4026250c6bc8414f9bf50c86c2d7235da")
shatest(b"\x0C" * 20,
b"Test With Truncation",
"4c1a03424b55e07fe7f27be1d58bb9324a9a5a04")
shatest(b"\xAA" * 80,
b"Test Using Larger Than Block-Size Key - Hash Key First",
"aa4ae5e15272d00e95705637ce8a3b55ed402112")
shatest(b"\xAA" * 80,
(b"Test Using Larger Than Block-Size Key "
b"and Larger Than One Block-Size Data"),
"e8e99d0f45237d786d6bbaa7965c7808bbff1a91")
def _rfc4231_test_cases(self, hashfunc):
def hmactest(key, data, hexdigests):
h = hmac.HMAC(key, data, digestmod=hashfunc)
self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc])
# 4.2. Test Case 1
hmactest(key = b'\x0b'*20,
data = b'Hi There',
hexdigests = {
hashlib.sha224: '896fb1128abbdf196832107cd49df33f'
'47b4b1169912ba4f53684b22',
hashlib.sha256: 'b0344c61d8db38535ca8afceaf0bf12b'
'881dc200c9833da726e9376c2e32cff7',
hashlib.sha384: 'afd03944d84895626b0825f4ab46907f'
'15f9dadbe4101ec682aa034c7cebc59c'
'faea9ea9076ede7f4af152e8b2fa9cb6',
hashlib.sha512: '87aa7cdea5ef619d4ff0b4241a1d6cb0'
'2379f4e2ce4ec2787ad0b30545e17cde'
'daa833b7d6b8a702038b274eaea3f4e4'
'be9d914eeb61f1702e696c203a126854',
})
# 4.3. Test Case 2
hmactest(key = b'Jefe',
data = b'what do ya want for nothing?',
hexdigests = {
hashlib.sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f'
'8bbea2a39e6148008fd05e44',
hashlib.sha256: '5bdcc146bf60754e6a042426089575c7'
'5a003f089d2739839dec58b964ec3843',
hashlib.sha384: 'af45d2e376484031617f78d2b58a6b1b'
'9c7ef464f5a01b47e42ec3736322445e'
'8e2240ca5e69e2c78b3239ecfab21649',
hashlib.sha512: '164b7a7bfcf819e2e395fbe73b56e0a3'
'87bd64222e831fd610270cd7ea250554'
'9758bf75c05a994a6d034f65f8f0e6fd'
'caeab1a34d4a6b4b636e070a38bce737',
})
# 4.4. Test Case 3
hmactest(key = b'\xaa'*20,
data = b'\xdd'*50,
hexdigests = {
hashlib.sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad264'
'9365b0c1f65d69d1ec8333ea',
hashlib.sha256: '773ea91e36800e46854db8ebd09181a7'
'2959098b3ef8c122d9635514ced565fe',
hashlib.sha384: '88062608d3e6ad8a0aa2ace014c8a86f'
'0aa635d947ac9febe83ef4e55966144b'
'2a5ab39dc13814b94e3ab6e101a34f27',
hashlib.sha512: 'fa73b0089d56a284efb0f0756c890be9'
'b1b5dbdd8ee81a3655f83e33b2279d39'
'bf3e848279a722c806b485a47e67c807'
'b946a337bee8942674278859e13292fb',
})
# 4.5. Test Case 4
hmactest(key = bytes(x for x in range(0x01, 0x19+1)),
data = b'\xcd'*50,
hexdigests = {
hashlib.sha224: '6c11506874013cac6a2abc1bb382627c'
'ec6a90d86efc012de7afec5a',
hashlib.sha256: '82558a389a443c0ea4cc819899f2083a'
'85f0faa3e578f8077a2e3ff46729665b',
hashlib.sha384: '3e8a69b7783c25851933ab6290af6ca7'
'7a9981480850009cc5577c6e1f573b4e'
'6801dd23c4a7d679ccf8a386c674cffb',
hashlib.sha512: 'b0ba465637458c6990e5a8c5f61d4af7'
'e576d97ff94b872de76f8050361ee3db'
'a91ca5c11aa25eb4d679275cc5788063'
'a5f19741120c4f2de2adebeb10a298dd',
})
# 4.7. Test Case 6
hmactest(key = b'\xaa'*131,
data = b'Test Using Larger Than Block-Siz'
b'e Key - Hash Key First',
hexdigests = {
hashlib.sha224: '95e9a0db962095adaebe9b2d6f0dbce2'
'd499f112f2d2b7273fa6870e',
hashlib.sha256: '60e431591ee0b67f0d8a26aacbf5b77f'
'8e0bc6213728c5140546040f0ee37f54',
hashlib.sha384: '4ece084485813e9088d2c63a041bc5b4'
'4f9ef1012a2b588f3cd11f05033ac4c6'
'0c2ef6ab4030fe8296248df163f44952',
hashlib.sha512: '80b24263c7c1a3ebb71493c1dd7be8b4'
'9b46d1f41b4aeec1121b013783f8f352'
'6b56d037e05f2598bd0fd2215d6a1e52'
'95e64f73f63f0aec8b915a985d786598',
})
# 4.8. Test Case 7
hmactest(key = b'\xaa'*131,
data = b'This is a test using a larger th'
b'an block-size key and a larger t'
b'han block-size data. The key nee'
b'ds to be hashed before being use'
b'd by the HMAC algorithm.',
hexdigests = {
hashlib.sha224: '3a854166ac5d9f023f54d517d0b39dbd'
'946770db9c2b95c9f6f565d1',
hashlib.sha256: '9b09ffa71b942fcb27635fbcd5b0e944'
'bfdc63644f0713938a7f51535c3a35e2',
hashlib.sha384: '6617178e941f020d351e2f254e8fd32c'
'602420feb0b8fb9adccebb82461e99c5'
'a678cc31e799176d3860e6110c46523e',
hashlib.sha512: 'e37b6a775dc87dbaa4dfa9f96e5e3ffd'
'debd71f8867289865df5a32d20cdc944'
'b6022cac3c4982b10d5eeb55c3e4de15'
'134676fb6de0446065c97440fa8c6a58',
})
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512)
def test_legacy_block_size_warnings(self):
class MockCrazyHash(object):
"""Ain't no block_size attribute here."""
def __init__(self, *args):
self._x = hashlib.sha1(*args)
self.digest_size = self._x.digest_size
def update(self, v):
self._x.update(v)
def digest(self):
return self._x.digest()
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
with self.assertRaises(RuntimeWarning):
hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash)
self.fail('Expected warning about missing block_size')
MockCrazyHash.block_size = 1
with self.assertRaises(RuntimeWarning):
hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash)
self.fail('Expected warning about small block_size')
class ConstructorTestCase(unittest.TestCase):
def test_normal(self):
# Standard constructor call.
failed = 0
try:
h = hmac.HMAC(b"key")
except:
self.fail("Standard constructor call raised exception.")
def test_withtext(self):
# Constructor call with text.
try:
h = hmac.HMAC(b"key", b"hash this!")
except:
self.fail("Constructor call with text argument raised exception.")
def test_withmodule(self):
# Constructor call with text and digest module.
try:
h = hmac.HMAC(b"key", b"", hashlib.sha1)
except:
self.fail("Constructor call with hashlib.sha1 raised exception.")
class SanityTestCase(unittest.TestCase):
def test_default_is_md5(self):
# Testing if HMAC defaults to MD5 algorithm.
# NOTE: this whitebox test depends on the hmac class internals
h = hmac.HMAC(b"key")
self.assertEqual(h.digest_cons, hashlib.md5)
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
try:
h = hmac.HMAC(b"my secret key")
h.update(b"compute the hash of this text!")
dig = h.digest()
dig = h.hexdigest()
h2 = h.copy()
except:
self.fail("Exception raised during normal usage of HMAC class.")
class CopyTestCase(unittest.TestCase):
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC(b"key")
h2 = h1.copy()
self.assertTrue(h1.digest_cons == h2.digest_cons,
"digest constructors don't match.")
self.assertEqual(type(h1.inner), type(h2.inner),
"Types of inner don't match.")
self.assertEqual(type(h1.outer), type(h2.outer),
"Types of outer don't match.")
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC(b"key")
h2 = h1.copy()
# Using id() in case somebody has overridden __eq__/__ne__.
self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.")
self.assertTrue(id(h1.inner) != id(h2.inner),
"No real copy of the attribute 'inner'.")
self.assertTrue(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC(b"key")
h1.update(b"some random text")
h2 = h1.copy()
self.assertEqual(h1.digest(), h2.digest(),
"Digest of copy doesn't match original digest.")
self.assertEqual(h1.hexdigest(), h2.hexdigest(),
"Hexdigest of copy doesn't match original hexdigest.")
def test_main():
support.run_unittest(
TestVectorsTestCase,
ConstructorTestCase,
SanityTestCase,
CopyTestCase
)
if __name__ == "__main__":
test_main()
| apache-2.0 |
CirrusLogic/rpi-linux | tools/perf/scripts/python/sched-migration.py | 1910 | 11965 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm, common_callchain)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm, common_callchain,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm, common_callchain)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm, common_callchain)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm, common_callchain)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, comm, pid):
pass
def trace_unhandled(event_name, context, event_fields_dict):
pass
| gpl-2.0 |
jbassen/edx-platform | lms/djangoapps/django_comment_client/management/commands/rename_user.py | 3 | 1646 | """
Change the username of an existing user
"""
import logging
from os.path import basename
from pymongo.errors import PyMongoError
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
from django.contrib.auth.models import User
from django_comment_client import management_utils
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
invoke this manage.py command from the console as follows:
python manage.py lms rename_user <username1> <username2>
"""
args = '<old_username> <new_username>'
help = 'Modify the username of an existing user'
def handle(self, *args, **options):
"""
utilizes the rename_user function in the management_utils module
:param args: <old_username> <new_username>
:param options: no options supported
"""
if len(args) != 2:
command_name = '.'.join(basename(__file__).split('.')[:-1])
raise CommandError(
"Usage is {command_name} {command_args}".format(
command_name=command_name,
command_args=self.args,
)
)
try:
management_utils.rename_user(*args)
except (User.DoesNotExist, IntegrityError, PyMongoError):
log.exception('FAILED TO MODIFY USERNAME FOR USER: {old_username}'.format(
old_username=args[0]
))
else:
print "Changed username of user: {old_username} to {new_username}".format(
old_username=args[0],
new_username=args[1],
)
| agpl-3.0 |
thedep2/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/syfy.py | 159 | 1827 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class SyfyIE(InfoExtractor):
_VALID_URL = r'https?://www\.syfy\.com/(?:videos/.+?vid:(?P<id>[0-9]+)|(?!videos)(?P<video_name>[^/]+)(?:$|[?#]))'
_TESTS = [{
'url': 'http://www.syfy.com/videos/Robot%20Combat%20League/Behind%20the%20Scenes/vid:2631458',
'info_dict': {
'id': 'NmqMrGnXvmO1',
'ext': 'flv',
'title': 'George Lucas has Advice for his Daughter',
'description': 'Listen to what insights George Lucas give his daughter Amanda.',
},
'add_ie': ['ThePlatform'],
}, {
'url': 'http://www.syfy.com/wilwheaton',
'md5': '94dfa54ee3ccb63295b276da08c415f6',
'info_dict': {
'id': '4yoffOOXC767',
'ext': 'flv',
'title': 'The Wil Wheaton Project - Premiering May 27th at 10/9c.',
'description': 'The Wil Wheaton Project premieres May 27th at 10/9c. Don\'t miss it.',
},
'add_ie': ['ThePlatform'],
'skip': 'Blocked outside the US',
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_name = mobj.group('video_name')
if video_name:
generic_webpage = self._download_webpage(url, video_name)
video_id = self._search_regex(
r'<iframe.*?class="video_iframe_page"\s+src="/_utils/video/thP_video_controller.php.*?_vid([0-9]+)">',
generic_webpage, 'video ID')
url = 'http://www.syfy.com/videos/%s/%s/vid:%s' % (
video_name, video_name, video_id)
else:
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
return self.url_result(self._og_search_video_url(webpage))
| gpl-3.0 |
ygol/dotfiles | bin/.venv-ansible-venv/lib/python2.6/site-packages/ansible/modules/core/cloud/rackspace/rax_cdb_user.py | 60 | 6244 | #!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_cdb_user
short_description: create / delete a Rackspace Cloud Database
description:
- create / delete a database in the Cloud Databases.
version_added: "1.8"
options:
cdb_id:
description:
- The databases server UUID
default: null
db_username:
description:
- Name of the database user
default: null
db_password:
description:
- Database user password
default: null
databases:
description:
- Name of the databases that the user can access
default: []
host:
description:
- Specifies the host from which a user is allowed to connect to
the database. Possible values are a string containing an IPv4 address
or "%" to allow connecting from any host
default: '%'
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
author: Simon JAILLET
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a user in Cloud Databases
tasks:
- name: User build request
local_action:
module: rax_cdb_user
credentials: ~/.raxpub
region: IAD
cdb_id: 323e7ce0-9cb0-11e3-a5e2-0800200c9a66
db_username: user1
db_password: user1
databases: ['db1']
state: present
register: rax_db_user
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def find_user(instance, name):
try:
user = instance.get_user(name)
except Exception:
return False
return user
def save_user(module, cdb_id, name, password, databases, host):
for arg, value in dict(cdb_id=cdb_id, name=name).iteritems():
if not value:
module.fail_json(msg='%s is required for the "rax_cdb_user" '
'module' % arg)
cdb = pyrax.cloud_databases
try:
instance = cdb.get(cdb_id)
except Exception, e:
module.fail_json(msg='%s' % e.message)
changed = False
user = find_user(instance, name)
if not user:
action = 'create'
try:
user = instance.create_user(name=name,
password=password,
database_names=databases,
host=host)
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
else:
action = 'update'
if user.host != host:
changed = True
user.update(password=password, host=host)
former_dbs = set([item.name for item in user.list_user_access()])
databases = set(databases)
if databases != former_dbs:
try:
revoke_dbs = [db for db in former_dbs if db not in databases]
user.revoke_user_access(db_names=revoke_dbs)
new_dbs = [db for db in databases if db not in former_dbs]
user.grant_user_access(db_names=new_dbs)
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
module.exit_json(changed=changed, action=action, user=rax_to_dict(user))
def delete_user(module, cdb_id, name):
for arg, value in dict(cdb_id=cdb_id, name=name).iteritems():
if not value:
module.fail_json(msg='%s is required for the "rax_cdb_user"'
' module' % arg)
cdb = pyrax.cloud_databases
try:
instance = cdb.get(cdb_id)
except Exception, e:
module.fail_json(msg='%s' % e.message)
changed = False
user = find_user(instance, name)
if user:
try:
user.delete()
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
module.exit_json(changed=changed, action='delete')
def rax_cdb_user(module, state, cdb_id, name, password, databases, host):
# act on the state
if state == 'present':
save_user(module, cdb_id, name, password, databases, host)
elif state == 'absent':
delete_user(module, cdb_id, name)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
cdb_id=dict(type='str', required=True),
db_username=dict(type='str', required=True),
db_password=dict(type='str', required=True, no_log=True),
databases=dict(type='list', default=[]),
host=dict(type='str', default='%'),
state=dict(default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
cdb_id = module.params.get('cdb_id')
name = module.params.get('db_username')
password = module.params.get('db_password')
databases = module.params.get('databases')
host = unicode(module.params.get('host'))
state = module.params.get('state')
setup_rax_module(module, pyrax)
rax_cdb_user(module, state, cdb_id, name, password, databases, host)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
| mit |
philippeback/volatility | contrib/plugins/example.py | 58 | 2769 | # Volatility
#
# Authors:
# Mike Auty <mike.auty@gmail.com>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.timefmt as timefmt
import volatility.obj as obj
import volatility.utils as utils
import volatility.commands as commands
#pylint: disable-msg=C0111
class DateTime(commands.Command):
"""A simple example plugin that gets the date/time information from a Windows image"""
def calculate(self):
"""Calculate and carry out any processing that may take time upon the image"""
# Load the address space
addr_space = utils.load_as(self._config)
# Call a subfunction so that it can be used by other plugins
return self.get_image_time(addr_space)
def get_image_time(self, addr_space):
"""Extracts the time and date from the KUSER_SHARED_DATA area"""
# Get the Image Datetime
result = {}
# Create a VOLATILITY_MAGIC object to look up the location of certain constants
# Get the KUSER_SHARED_DATA location
KUSER_SHARED_DATA = obj.VolMagic(addr_space).KUSER_SHARED_DATA.v()
# Create the _KUSER_SHARED_DATA object at the appropriate offset
k = obj.Object("_KUSER_SHARED_DATA",
offset = KUSER_SHARED_DATA,
vm = addr_space)
# Start reading members from it
result['ImageDatetime'] = k.SystemTime
result['ImageTz'] = timefmt.OffsetTzInfo(-k.TimeZoneBias.as_windows_timestamp() / 10000000)
# Return any results we got
return result
def render_text(self, outfd, data):
"""Renders the calculated data as text to outfd"""
# Convert the result into a datetime object for display in local and non local format
dt = data['ImageDatetime'].as_datetime()
# Display the datetime in UTC as taken from the image
outfd.write("Image date and time : {0}\n".format(data['ImageDatetime']))
# Display the datetime taking into account the timezone of the image itself
outfd.write("Image local date and time : {0}\n".format(timefmt.display_datetime(dt, data['ImageTz'])))
| gpl-2.0 |
b-carter/numpy | numpy/distutils/system_info.py | 4 | 85315 | #!/usr/bin/env python
"""
This file defines a set of system_info classes for getting
information about various resources (libraries, library directories,
include directories, etc.) in the system. Currently, the following
classes are available:
atlas_info
atlas_threads_info
atlas_blas_info
atlas_blas_threads_info
lapack_atlas_info
lapack_atlas_threads_info
atlas_3_10_info
atlas_3_10_threads_info
atlas_3_10_blas_info,
atlas_3_10_blas_threads_info,
lapack_atlas_3_10_info
lapack_atlas_3_10_threads_info
blas_info
lapack_info
openblas_info
blis_info
blas_opt_info # usage recommended
lapack_opt_info # usage recommended
fftw_info,dfftw_info,sfftw_info
fftw_threads_info,dfftw_threads_info,sfftw_threads_info
djbfft_info
x11_info
lapack_src_info
blas_src_info
numpy_info
numarray_info
numpy_info
boost_python_info
agg2_info
wx_info
gdk_pixbuf_xlib_2_info
gdk_pixbuf_2_info
gdk_x11_2_info
gtkp_x11_2_info
gtkp_2_info
xft_info
freetype2_info
umfpack_info
Usage:
info_dict = get_info(<name>)
where <name> is a string 'atlas','x11','fftw','lapack','blas',
'lapack_src', 'blas_src', etc. For a complete list of allowed names,
see the definition of get_info() function below.
Returned info_dict is a dictionary which is compatible with
distutils.setup keyword arguments. If info_dict == {}, then the
asked resource is not available (system_info could not find it).
Several *_info classes specify an environment variable to specify
the locations of software. When setting the corresponding environment
variable to 'None' then the software will be ignored, even when it
is available in system.
Global parameters:
system_info.search_static_first - search static libraries (.a)
in precedence to shared ones (.so, .sl) if enabled.
system_info.verbosity - output the results to stdout if enabled.
The file 'site.cfg' is looked for in
1) Directory of main setup.py file being run.
2) Home directory of user running the setup.py file as ~/.numpy-site.cfg
3) System wide directory (location of this file...)
The first one found is used to get system configuration options The
format is that used by ConfigParser (i.e., Windows .INI style). The
section ALL has options that are the default for each section. The
available sections are fftw, atlas, and x11. Appropriate defaults are
used if nothing is specified.
The order of finding the locations of resources is the following:
1. environment variable
2. section in site.cfg
3. ALL section in site.cfg
Only the first complete match is returned.
Example:
----------
[ALL]
library_dirs = /usr/lib:/usr/local/lib:/opt/lib
include_dirs = /usr/include:/usr/local/include:/opt/include
src_dirs = /usr/local/src:/opt/src
# search static libraries (.a) in preference to shared ones (.so)
search_static_first = 0
[fftw]
fftw_libs = rfftw, fftw
fftw_opt_libs = rfftw_threaded, fftw_threaded
# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs
[atlas]
library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas
# for overriding the names of the atlas libraries
atlas_libs = lapack, f77blas, cblas, atlas
[x11]
library_dirs = /usr/X11R6/lib
include_dirs = /usr/X11R6/include
----------
Authors:
Pearu Peterson <pearu@cens.ioc.ee>, February 2002
David M. Cooke <cookedm@physics.mcmaster.ca>, April 2002
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
"""
from __future__ import division, absolute_import, print_function
import sys
import os
import re
import copy
import warnings
from glob import glob
from functools import reduce
if sys.version_info[0] < 3:
from ConfigParser import NoOptionError
from ConfigParser import RawConfigParser as ConfigParser
else:
from configparser import NoOptionError
from configparser import RawConfigParser as ConfigParser
# It seems that some people are importing ConfigParser from here so is
# good to keep its class name. Use of RawConfigParser is needed in
# order to be able to load path names with percent in them, like
# `feature%2Fcool` which is common on git flow branch names.
from distutils.errors import DistutilsError
from distutils.dist import Distribution
import distutils.sysconfig
from distutils import log
from distutils.util import get_platform
from numpy.distutils.exec_command import (
find_executable, exec_command, get_pythonexe)
from numpy.distutils.misc_util import (is_sequence, is_string,
get_shared_lib_extension)
from numpy.distutils.command.config import config as cmd_config
from numpy.distutils.compat import get_exception
import distutils.ccompiler
import tempfile
import shutil
# Determine number of bits
import platform
_bits = {'32bit': 32, '64bit': 64}
platform_bits = _bits[platform.architecture()[0]]
def libpaths(paths, bits):
"""Return a list of library paths valid on 32 or 64 bit systems.
Inputs:
paths : sequence
A sequence of strings (typically paths)
bits : int
An integer, the only valid values are 32 or 64. A ValueError exception
is raised otherwise.
Examples:
Consider a list of directories
>>> paths = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
For a 32-bit platform, this is already valid:
>>> np.distutils.system_info.libpaths(paths,32)
['/usr/X11R6/lib', '/usr/X11/lib', '/usr/lib']
On 64 bits, we prepend the '64' postfix
>>> np.distutils.system_info.libpaths(paths,64)
['/usr/X11R6/lib64', '/usr/X11R6/lib', '/usr/X11/lib64', '/usr/X11/lib',
'/usr/lib64', '/usr/lib']
"""
if bits not in (32, 64):
raise ValueError("Invalid bit size in libpaths: 32 or 64 only")
# Handle 32bit case
if bits == 32:
return paths
# Handle 64bit case
out = []
for p in paths:
out.extend([p + '64', p])
return out
if sys.platform == 'win32':
default_lib_dirs = ['C:\\',
os.path.join(distutils.sysconfig.EXEC_PREFIX,
'libs')]
default_runtime_dirs = []
default_include_dirs = []
default_src_dirs = ['.']
default_x11_lib_dirs = []
default_x11_include_dirs = []
else:
default_lib_dirs = libpaths(['/usr/local/lib', '/opt/lib', '/usr/lib',
'/opt/local/lib', '/sw/lib'], platform_bits)
default_runtime_dirs = []
default_include_dirs = ['/usr/local/include',
'/opt/include', '/usr/include',
# path of umfpack under macports
'/opt/local/include/ufsparse',
'/opt/local/include', '/sw/include',
'/usr/include/suitesparse']
default_src_dirs = ['.', '/usr/local/src', '/opt/src', '/sw/src']
default_x11_lib_dirs = libpaths(['/usr/X11R6/lib', '/usr/X11/lib',
'/usr/lib'], platform_bits)
default_x11_include_dirs = ['/usr/X11R6/include', '/usr/X11/include',
'/usr/include']
if os.path.exists('/usr/lib/X11'):
globbed_x11_dir = glob('/usr/lib/*/libX11.so')
if globbed_x11_dir:
x11_so_dir = os.path.split(globbed_x11_dir[0])[0]
default_x11_lib_dirs.extend([x11_so_dir, '/usr/lib/X11'])
default_x11_include_dirs.extend(['/usr/lib/X11/include',
'/usr/include/X11'])
import subprocess as sp
tmp = None
try:
# Explicitly open/close file to avoid ResourceWarning when
# tests are run in debug mode Python 3.
tmp = open(os.devnull, 'w')
p = sp.Popen(["gcc", "-print-multiarch"], stdout=sp.PIPE,
stderr=tmp)
except (OSError, DistutilsError):
# OSError if gcc is not installed, or SandboxViolation (DistutilsError
# subclass) if an old setuptools bug is triggered (see gh-3160).
pass
else:
triplet = str(p.communicate()[0].decode().strip())
if p.returncode == 0:
# gcc supports the "-print-multiarch" option
default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)]
default_lib_dirs += [os.path.join("/usr/lib/", triplet)]
finally:
if tmp is not None:
tmp.close()
if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
default_include_dirs.append(os.path.join(sys.prefix, 'include'))
default_src_dirs.append(os.path.join(sys.prefix, 'src'))
default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)]
default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)]
default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)]
default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)]
so_ext = get_shared_lib_extension()
def get_standard_file(fname):
"""Returns a list of files named 'fname' from
1) System-wide directory (directory-location of this module)
2) Users HOME directory (os.environ['HOME'])
3) Local directory
"""
# System-wide file
filenames = []
try:
f = __file__
except NameError:
f = sys.argv[0]
else:
sysfile = os.path.join(os.path.split(os.path.abspath(f))[0],
fname)
if os.path.isfile(sysfile):
filenames.append(sysfile)
# Home directory
# And look for the user config file
try:
f = os.path.expanduser('~')
except KeyError:
pass
else:
user_file = os.path.join(f, fname)
if os.path.isfile(user_file):
filenames.append(user_file)
# Local file
if os.path.isfile(fname):
filenames.append(os.path.abspath(fname))
return filenames
def get_info(name, notfound_action=0):
"""
notfound_action:
0 - do nothing
1 - display warning message
2 - raise error
"""
cl = {'atlas': atlas_info, # use lapack_opt or blas_opt instead
'atlas_threads': atlas_threads_info, # ditto
'atlas_blas': atlas_blas_info,
'atlas_blas_threads': atlas_blas_threads_info,
'lapack_atlas': lapack_atlas_info, # use lapack_opt instead
'lapack_atlas_threads': lapack_atlas_threads_info, # ditto
'atlas_3_10': atlas_3_10_info, # use lapack_opt or blas_opt instead
'atlas_3_10_threads': atlas_3_10_threads_info, # ditto
'atlas_3_10_blas': atlas_3_10_blas_info,
'atlas_3_10_blas_threads': atlas_3_10_blas_threads_info,
'lapack_atlas_3_10': lapack_atlas_3_10_info, # use lapack_opt instead
'lapack_atlas_3_10_threads': lapack_atlas_3_10_threads_info, # ditto
'mkl': mkl_info,
# openblas which may or may not have embedded lapack
'openblas': openblas_info, # use blas_opt instead
# openblas with embedded lapack
'openblas_lapack': openblas_lapack_info, # use blas_opt instead
'blis': blis_info, # use blas_opt instead
'lapack_mkl': lapack_mkl_info, # use lapack_opt instead
'blas_mkl': blas_mkl_info, # use blas_opt instead
'x11': x11_info,
'fft_opt': fft_opt_info,
'fftw': fftw_info,
'fftw2': fftw2_info,
'fftw3': fftw3_info,
'dfftw': dfftw_info,
'sfftw': sfftw_info,
'fftw_threads': fftw_threads_info,
'dfftw_threads': dfftw_threads_info,
'sfftw_threads': sfftw_threads_info,
'djbfft': djbfft_info,
'blas': blas_info, # use blas_opt instead
'lapack': lapack_info, # use lapack_opt instead
'lapack_src': lapack_src_info,
'blas_src': blas_src_info,
'numpy': numpy_info,
'f2py': f2py_info,
'Numeric': Numeric_info,
'numeric': Numeric_info,
'numarray': numarray_info,
'numerix': numerix_info,
'lapack_opt': lapack_opt_info,
'blas_opt': blas_opt_info,
'boost_python': boost_python_info,
'agg2': agg2_info,
'wx': wx_info,
'gdk_pixbuf_xlib_2': gdk_pixbuf_xlib_2_info,
'gdk-pixbuf-xlib-2.0': gdk_pixbuf_xlib_2_info,
'gdk_pixbuf_2': gdk_pixbuf_2_info,
'gdk-pixbuf-2.0': gdk_pixbuf_2_info,
'gdk': gdk_info,
'gdk_2': gdk_2_info,
'gdk-2.0': gdk_2_info,
'gdk_x11_2': gdk_x11_2_info,
'gdk-x11-2.0': gdk_x11_2_info,
'gtkp_x11_2': gtkp_x11_2_info,
'gtk+-x11-2.0': gtkp_x11_2_info,
'gtkp_2': gtkp_2_info,
'gtk+-2.0': gtkp_2_info,
'xft': xft_info,
'freetype2': freetype2_info,
'umfpack': umfpack_info,
'amd': amd_info,
}.get(name.lower(), system_info)
return cl().get_info(notfound_action)
class NotFoundError(DistutilsError):
"""Some third-party program or library is not found."""
class AtlasNotFoundError(NotFoundError):
"""
Atlas (http://math-atlas.sourceforge.net/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [atlas]) or by setting
the ATLAS environment variable."""
class LapackNotFoundError(NotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [lapack]) or by setting
the LAPACK environment variable."""
class LapackSrcNotFoundError(LapackNotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [lapack_src]) or by setting
the LAPACK_SRC environment variable."""
class BlasNotFoundError(NotFoundError):
"""
Blas (http://www.netlib.org/blas/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [blas]) or by setting
the BLAS environment variable."""
class BlasSrcNotFoundError(BlasNotFoundError):
"""
Blas (http://www.netlib.org/blas/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [blas_src]) or by setting
the BLAS_SRC environment variable."""
class FFTWNotFoundError(NotFoundError):
"""
FFTW (http://www.fftw.org/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [fftw]) or by setting
the FFTW environment variable."""
class DJBFFTNotFoundError(NotFoundError):
"""
DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [djbfft]) or by setting
the DJBFFT environment variable."""
class NumericNotFoundError(NotFoundError):
"""
Numeric (http://www.numpy.org/) module not found.
Get it from above location, install it, and retry setup.py."""
class X11NotFoundError(NotFoundError):
"""X11 libraries not found."""
class UmfpackNotFoundError(NotFoundError):
"""
UMFPACK sparse solver (http://www.cise.ufl.edu/research/sparse/umfpack/)
not found. Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [umfpack]) or by setting
the UMFPACK environment variable."""
class system_info(object):
""" get_info() is the only public method. Don't use others.
"""
section = 'ALL'
dir_env_var = None
search_static_first = 0 # XXX: disabled by default, may disappear in
# future unless it is proved to be useful.
verbosity = 1
saved_results = {}
notfounderror = NotFoundError
def __init__(self,
default_lib_dirs=default_lib_dirs,
default_include_dirs=default_include_dirs,
verbosity=1,
):
self.__class__.info = {}
self.local_prefixes = []
defaults = {'library_dirs': os.pathsep.join(default_lib_dirs),
'include_dirs': os.pathsep.join(default_include_dirs),
'runtime_library_dirs': os.pathsep.join(default_runtime_dirs),
'rpath': '',
'src_dirs': os.pathsep.join(default_src_dirs),
'search_static_first': str(self.search_static_first),
'extra_compile_args': '', 'extra_link_args': ''}
self.cp = ConfigParser(defaults)
self.files = []
self.files.extend(get_standard_file('.numpy-site.cfg'))
self.files.extend(get_standard_file('site.cfg'))
self.parse_config_files()
if self.section is not None:
self.search_static_first = self.cp.getboolean(
self.section, 'search_static_first')
assert isinstance(self.search_static_first, int)
def parse_config_files(self):
self.cp.read(self.files)
if not self.cp.has_section(self.section):
if self.section is not None:
self.cp.add_section(self.section)
def calc_libraries_info(self):
libs = self.get_libraries()
dirs = self.get_lib_dirs()
# The extensions use runtime_library_dirs
r_dirs = self.get_runtime_lib_dirs()
# Intrinsic distutils use rpath, we simply append both entries
# as though they were one entry
r_dirs.extend(self.get_runtime_lib_dirs(key='rpath'))
info = {}
for lib in libs:
i = self.check_libs(dirs, [lib])
if i is not None:
dict_append(info, **i)
else:
log.info('Library %s was not found. Ignoring' % (lib))
if r_dirs:
i = self.check_libs(r_dirs, [lib])
if i is not None:
# Swap library keywords found to runtime_library_dirs
# the libraries are insisting on the user having defined
# them using the library_dirs, and not necessarily by
# runtime_library_dirs
del i['libraries']
i['runtime_library_dirs'] = i.pop('library_dirs')
dict_append(info, **i)
else:
log.info('Runtime library %s was not found. Ignoring' % (lib))
return info
def set_info(self, **info):
if info:
lib_info = self.calc_libraries_info()
dict_append(info, **lib_info)
# Update extra information
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
self.saved_results[self.__class__.__name__] = info
def has_info(self):
return self.__class__.__name__ in self.saved_results
def calc_extra_info(self):
""" Updates the information in the current information with
respect to these flags:
extra_compile_args
extra_link_args
"""
info = {}
for key in ['extra_compile_args', 'extra_link_args']:
# Get values
opt = self.cp.get(self.section, key)
if opt:
tmp = {key : [opt]}
dict_append(info, **tmp)
return info
def get_info(self, notfound_action=0):
""" Return a dictonary with items that are compatible
with numpy.distutils.setup keyword arguments.
"""
flag = 0
if not self.has_info():
flag = 1
log.info(self.__class__.__name__ + ':')
if hasattr(self, 'calc_info'):
self.calc_info()
if notfound_action:
if not self.has_info():
if notfound_action == 1:
warnings.warn(self.notfounderror.__doc__, stacklevel=2)
elif notfound_action == 2:
raise self.notfounderror(self.notfounderror.__doc__)
else:
raise ValueError(repr(notfound_action))
if not self.has_info():
log.info(' NOT AVAILABLE')
self.set_info()
else:
log.info(' FOUND:')
res = self.saved_results.get(self.__class__.__name__)
if self.verbosity > 0 and flag:
for k, v in res.items():
v = str(v)
if k in ['sources', 'libraries'] and len(v) > 270:
v = v[:120] + '...\n...\n...' + v[-120:]
log.info(' %s = %s', k, v)
log.info('')
return copy.deepcopy(res)
def get_paths(self, section, key):
dirs = self.cp.get(section, key).split(os.pathsep)
env_var = self.dir_env_var
if env_var:
if is_sequence(env_var):
e0 = env_var[-1]
for e in env_var:
if e in os.environ:
e0 = e
break
if not env_var[0] == e0:
log.info('Setting %s=%s' % (env_var[0], e0))
env_var = e0
if env_var and env_var in os.environ:
d = os.environ[env_var]
if d == 'None':
log.info('Disabled %s: %s',
self.__class__.__name__, '(%s is None)'
% (env_var,))
return []
if os.path.isfile(d):
dirs = [os.path.dirname(d)] + dirs
l = getattr(self, '_lib_names', [])
if len(l) == 1:
b = os.path.basename(d)
b = os.path.splitext(b)[0]
if b[:3] == 'lib':
log.info('Replacing _lib_names[0]==%r with %r' \
% (self._lib_names[0], b[3:]))
self._lib_names[0] = b[3:]
else:
ds = d.split(os.pathsep)
ds2 = []
for d in ds:
if os.path.isdir(d):
ds2.append(d)
for dd in ['include', 'lib']:
d1 = os.path.join(d, dd)
if os.path.isdir(d1):
ds2.append(d1)
dirs = ds2 + dirs
default_dirs = self.cp.get(self.section, key).split(os.pathsep)
dirs.extend(default_dirs)
ret = []
for d in dirs:
if len(d) > 0 and not os.path.isdir(d):
warnings.warn('Specified path %s is invalid.' % d, stacklevel=2)
continue
if d not in ret:
ret.append(d)
log.debug('( %s = %s )', key, ':'.join(ret))
return ret
def get_lib_dirs(self, key='library_dirs'):
return self.get_paths(self.section, key)
def get_runtime_lib_dirs(self, key='runtime_library_dirs'):
path = self.get_paths(self.section, key)
if path == ['']:
path = []
return path
def get_include_dirs(self, key='include_dirs'):
return self.get_paths(self.section, key)
def get_src_dirs(self, key='src_dirs'):
return self.get_paths(self.section, key)
def get_libs(self, key, default):
try:
libs = self.cp.get(self.section, key)
except NoOptionError:
if not default:
return []
if is_string(default):
return [default]
return default
return [b for b in [a.strip() for a in libs.split(',')] if b]
def get_libraries(self, key='libraries'):
if hasattr(self, '_lib_names'):
return self.get_libs(key, default=self._lib_names)
else:
return self.get_libs(key, '')
def library_extensions(self):
static_exts = ['.a']
if sys.platform == 'win32':
static_exts.append('.lib') # .lib is used by MSVC
if self.search_static_first:
exts = static_exts + [so_ext]
else:
exts = [so_ext] + static_exts
if sys.platform == 'cygwin':
exts.append('.dll.a')
if sys.platform == 'darwin':
exts.append('.dylib')
return exts
def check_libs(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks for all libraries as shared libraries first, then
static (or vice versa if self.search_static_first is True).
"""
exts = self.library_extensions()
info = None
for ext in exts:
info = self._check_libs(lib_dirs, libs, opt_libs, [ext])
if info is not None:
break
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def check_libs2(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks each library for shared or static.
"""
exts = self.library_extensions()
info = self._check_libs(lib_dirs, libs, opt_libs, exts)
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def _find_lib(self, lib_dir, lib, exts):
assert is_string(lib_dir)
# under windows first try without 'lib' prefix
if sys.platform == 'win32':
lib_prefixes = ['', 'lib']
else:
lib_prefixes = ['lib']
# for each library name, see if we can find a file for it.
for ext in exts:
for prefix in lib_prefixes:
p = self.combine_paths(lib_dir, prefix + lib + ext)
if p:
break
if p:
assert len(p) == 1
# ??? splitext on p[0] would do this for cygwin
# doesn't seem correct
if ext == '.dll.a':
lib += '.dll'
return lib
return False
def _find_libs(self, lib_dirs, libs, exts):
# make sure we preserve the order of libs, as it can be important
found_dirs, found_libs = [], []
for lib in libs:
for lib_dir in lib_dirs:
found_lib = self._find_lib(lib_dir, lib, exts)
if found_lib:
found_libs.append(found_lib)
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
break
return found_dirs, found_libs
def _check_libs(self, lib_dirs, libs, opt_libs, exts):
"""Find mandatory and optional libs in expected paths.
Missing optional libraries are silently forgotten.
"""
if not is_sequence(lib_dirs):
lib_dirs = [lib_dirs]
# First, try to find the mandatory libraries
found_dirs, found_libs = self._find_libs(lib_dirs, libs, exts)
if len(found_libs) > 0 and len(found_libs) == len(libs):
# Now, check for optional libraries
opt_found_dirs, opt_found_libs = self._find_libs(lib_dirs, opt_libs, exts)
found_libs.extend(opt_found_libs)
for lib_dir in opt_found_dirs:
if lib_dir not in found_dirs:
found_dirs.append(lib_dir)
info = {'libraries': found_libs, 'library_dirs': found_dirs}
return info
else:
return None
def combine_paths(self, *args):
"""Return a list of existing paths composed by all combinations
of items from the arguments.
"""
return combine_paths(*args, **{'verbosity': self.verbosity})
class fft_opt_info(system_info):
def calc_info(self):
info = {}
fftw_info = get_info('fftw3') or get_info('fftw2') or get_info('dfftw')
djbfft_info = get_info('djbfft')
if fftw_info:
dict_append(info, **fftw_info)
if djbfft_info:
dict_append(info, **djbfft_info)
self.set_info(**info)
return
class fftw_info(system_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}]
def calc_ver_info(self, ver_param):
"""Returns True on successful version detection, else False"""
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
libs = self.get_libs(self.section + '_libs', ver_param['libs'])
info = self.check_libs(lib_dirs, libs)
if info is not None:
flag = 0
for d in incl_dirs:
if len(self.combine_paths(d, ver_param['includes'])) \
== len(ver_param['includes']):
dict_append(info, include_dirs=[d])
flag = 1
incl_dirs = [d]
break
if flag:
dict_append(info, define_macros=ver_param['macros'])
else:
info = None
if info is not None:
self.set_info(**info)
return True
else:
log.info(' %s not found' % (ver_param['name']))
return False
def calc_info(self):
for i in self.ver_info:
if self.calc_ver_info(i):
break
class fftw2_info(fftw_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}
]
class fftw3_info(fftw_info):
#variables to override
section = 'fftw3'
dir_env_var = 'FFTW3'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
]
class dfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw',
'libs':['drfftw', 'dfftw'],
'includes':['dfftw.h', 'drfftw.h'],
'macros':[('SCIPY_DFFTW_H', None)]}]
class sfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw',
'libs':['srfftw', 'sfftw'],
'includes':['sfftw.h', 'srfftw.h'],
'macros':[('SCIPY_SFFTW_H', None)]}]
class fftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'fftw threads',
'libs':['rfftw_threads', 'fftw_threads'],
'includes':['fftw_threads.h', 'rfftw_threads.h'],
'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
class dfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw threads',
'libs':['drfftw_threads', 'dfftw_threads'],
'includes':['dfftw_threads.h', 'drfftw_threads.h'],
'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
class sfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw threads',
'libs':['srfftw_threads', 'sfftw_threads'],
'includes':['sfftw_threads.h', 'srfftw_threads.h'],
'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
class djbfft_info(system_info):
section = 'djbfft'
dir_env_var = 'DJBFFT'
notfounderror = DJBFFTNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['djbfft']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
info = None
for d in lib_dirs:
p = self.combine_paths(d, ['djbfft.a'])
if p:
info = {'extra_objects': p}
break
p = self.combine_paths(d, ['libdjbfft.a', 'libdjbfft' + so_ext])
if p:
info = {'libraries': ['djbfft'], 'library_dirs': [d]}
break
if info is None:
return
for d in incl_dirs:
if len(self.combine_paths(d, ['fftc8.h', 'fftfreq.h'])) == 2:
dict_append(info, include_dirs=[d],
define_macros=[('SCIPY_DJBFFT_H', None)])
self.set_info(**info)
return
return
class mkl_info(system_info):
section = 'mkl'
dir_env_var = 'MKLROOT'
_lib_mkl = ['mkl_rt']
def get_mkl_rootdir(self):
mklroot = os.environ.get('MKLROOT', None)
if mklroot is not None:
return mklroot
paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
ld_so_conf = '/etc/ld.so.conf'
if os.path.isfile(ld_so_conf):
with open(ld_so_conf, 'r') as f:
for d in f:
d = d.strip()
if d:
paths.append(d)
intel_mkl_dirs = []
for path in paths:
path_atoms = path.split(os.sep)
for m in path_atoms:
if m.startswith('mkl'):
d = os.sep.join(path_atoms[:path_atoms.index(m) + 2])
intel_mkl_dirs.append(d)
break
for d in paths:
dirs = glob(os.path.join(d, 'mkl', '*'))
dirs += glob(os.path.join(d, 'mkl*'))
for d in dirs:
if os.path.isdir(os.path.join(d, 'lib')):
return d
return None
def __init__(self):
mklroot = self.get_mkl_rootdir()
if mklroot is None:
system_info.__init__(self)
else:
from .cpuinfo import cpu
if cpu.is_Itanium():
plt = '64'
elif cpu.is_Intel() and cpu.is_64bit():
plt = 'intel64'
else:
plt = '32'
system_info.__init__(
self,
default_lib_dirs=[os.path.join(mklroot, 'lib', plt)],
default_include_dirs=[os.path.join(mklroot, 'include')])
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
mkl_libs = self.get_libs('mkl_libs', self._lib_mkl)
info = self.check_libs2(lib_dirs, mkl_libs)
if info is None:
return
dict_append(info,
define_macros=[('SCIPY_MKL_H', None),
('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
if sys.platform == 'win32':
pass # win32 has no pthread library
else:
dict_append(info, libraries=['pthread'])
self.set_info(**info)
class lapack_mkl_info(mkl_info):
pass
class blas_mkl_info(mkl_info):
pass
class atlas_info(system_info):
section = 'atlas'
dir_env_var = 'ATLAS'
_lib_names = ['f77blas', 'cblas']
if sys.platform[:7] == 'freebsd':
_lib_atlas = ['atlas_r']
_lib_lapack = ['alapack_r']
else:
_lib_atlas = ['atlas']
_lib_lapack = ['lapack']
notfounderror = AtlasNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['atlas*', 'ATLAS*',
'sse', '3dnow', 'sse2']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names + self._lib_atlas)
lapack_libs = self.get_libs('lapack_libs', self._lib_lapack)
atlas = None
lapack = None
atlas_1 = None
for d in lib_dirs:
atlas = self.check_libs2(d, atlas_libs, [])
lapack_atlas = self.check_libs2(d, ['lapack_atlas'], [])
if atlas is not None:
lib_dirs2 = [d] + self.combine_paths(d, ['atlas*', 'ATLAS*'])
lapack = self.check_libs2(lib_dirs2, lapack_libs, [])
if lapack is not None:
break
if atlas:
atlas_1 = atlas
log.info(self.__class__)
if atlas is None:
atlas = atlas_1
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
if lapack is not None:
dict_append(info, **lapack)
dict_append(info, **atlas)
elif 'lapack_atlas' in atlas['libraries']:
dict_append(info, **atlas)
dict_append(info,
define_macros=[('ATLAS_WITH_LAPACK_ATLAS', None)])
self.set_info(**info)
return
else:
dict_append(info, **atlas)
dict_append(info, define_macros=[('ATLAS_WITHOUT_LAPACK', None)])
message = """
*********************************************************************
Could not find lapack library within the ATLAS installation.
*********************************************************************
"""
warnings.warn(message, stacklevel=2)
self.set_info(**info)
return
# Check if lapack library is complete, only warn if it is not.
lapack_dir = lapack['library_dirs'][0]
lapack_name = lapack['libraries'][0]
lapack_lib = None
lib_prefixes = ['lib']
if sys.platform == 'win32':
lib_prefixes.append('')
for e in self.library_extensions():
for prefix in lib_prefixes:
fn = os.path.join(lapack_dir, prefix + lapack_name + e)
if os.path.exists(fn):
lapack_lib = fn
break
if lapack_lib:
break
if lapack_lib is not None:
sz = os.stat(lapack_lib)[6]
if sz <= 4000 * 1024:
message = """
*********************************************************************
Lapack library (from ATLAS) is probably incomplete:
size of %s is %sk (expected >4000k)
Follow the instructions in the KNOWN PROBLEMS section of the file
numpy/INSTALL.txt.
*********************************************************************
""" % (lapack_lib, sz / 1024)
warnings.warn(message, stacklevel=2)
else:
info['language'] = 'f77'
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(info, **atlas_extra_info)
self.set_info(**info)
class atlas_blas_info(atlas_info):
_lib_names = ['f77blas', 'cblas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names + self._lib_atlas)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_threads_info(atlas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class atlas_blas_threads_info(atlas_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class lapack_atlas_info(atlas_info):
_lib_names = ['lapack_atlas'] + atlas_info._lib_names
class lapack_atlas_threads_info(atlas_threads_info):
_lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names
class atlas_3_10_info(atlas_info):
_lib_names = ['satlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_info(atlas_3_10_info):
_lib_names = ['satlas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_3_10_threads_info(atlas_3_10_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_threads_info(atlas_3_10_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
class lapack_atlas_3_10_info(atlas_3_10_info):
pass
class lapack_atlas_3_10_threads_info(atlas_3_10_threads_info):
pass
class lapack_info(system_info):
section = 'lapack'
dir_env_var = 'LAPACK'
_lib_names = ['lapack']
notfounderror = LapackNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
lapack_libs = self.get_libs('lapack_libs', self._lib_names)
info = self.check_libs(lib_dirs, lapack_libs, [])
if info is None:
return
info['language'] = 'f77'
self.set_info(**info)
class lapack_src_info(system_info):
section = 'lapack_src'
dir_env_var = 'LAPACK_SRC'
notfounderror = LapackSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['LAPACK*/SRC', 'SRC']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'dgesv.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
# The following is extracted from LAPACK-3.0/SRC/Makefile.
# Added missing names from lapack-lite-3.1.1/SRC/Makefile
# while keeping removed names for Lapack-3.0 compatibility.
allaux = '''
ilaenv ieeeck lsame lsamen xerbla
iparmq
''' # *.f
laux = '''
bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1
laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2
lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre
larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4
lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1
lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf
stebz stedc steqr sterf
larra larrc larrd larr larrk larrj larrr laneg laisnan isnan
lazq3 lazq4
''' # [s|d]*.f
lasrc = '''
gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak
gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv
gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2
geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd
gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal
gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd
ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein
hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0
lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb
lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp
laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv
lartv larz larzb larzt laswp lasyf latbs latdf latps latrd
latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv
pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2
potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri
pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs
spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv
sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2
tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs
trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs
tzrqf tzrzf
lacn2 lahr2 stemr laqr0 laqr1 laqr2 laqr3 laqr4 laqr5
''' # [s|c|d|z]*.f
sd_lasrc = '''
laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l
org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr
orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3
ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx
sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd
stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd
sygvx sytd2 sytrd
''' # [s|d]*.f
cz_lasrc = '''
bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev
heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv
hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd
hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf
hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7
laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe
laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv
spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq
ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2
unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr
''' # [c|z]*.f
#######
sclaux = laux + ' econd ' # s*.f
dzlaux = laux + ' secnd ' # d*.f
slasrc = lasrc + sd_lasrc # s*.f
dlasrc = lasrc + sd_lasrc # d*.f
clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f
zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f
oclasrc = ' icmax1 scsum1 ' # *.f
ozlasrc = ' izmax1 dzsum1 ' # *.f
sources = ['s%s.f' % f for f in (sclaux + slasrc).split()] \
+ ['d%s.f' % f for f in (dzlaux + dlasrc).split()] \
+ ['c%s.f' % f for f in (clasrc).split()] \
+ ['z%s.f' % f for f in (zlasrc).split()] \
+ ['%s.f' % f for f in (allaux + oclasrc + ozlasrc).split()]
sources = [os.path.join(src_dir, f) for f in sources]
# Lapack 3.1:
src_dir2 = os.path.join(src_dir, '..', 'INSTALL')
sources += [os.path.join(src_dir2, p + 'lamch.f') for p in 'sdcz']
# Lapack 3.2.1:
sources += [os.path.join(src_dir, p + 'larfp.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lr.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lc.f') for p in 'sdcz']
# Should we check here actual existence of source files?
# Yes, the file listing is different between 3.0 and 3.1
# versions.
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
atlas_version_c_text = r'''
/* This file is generated from numpy/distutils/system_info.py */
void ATL_buildinfo(void);
int main(void) {
ATL_buildinfo();
return 0;
}
'''
_cached_atlas_version = {}
def get_atlas_version(**config):
libraries = config.get('libraries', [])
library_dirs = config.get('library_dirs', [])
key = (tuple(libraries), tuple(library_dirs))
if key in _cached_atlas_version:
return _cached_atlas_version[key]
c = cmd_config(Distribution())
atlas_version = None
info = {}
try:
s, o = c.get_output(atlas_version_c_text,
libraries=libraries, library_dirs=library_dirs,
use_tee=(system_info.verbosity > 0))
if s and re.search(r'undefined reference to `_gfortran', o, re.M):
s, o = c.get_output(atlas_version_c_text,
libraries=libraries + ['gfortran'],
library_dirs=library_dirs,
use_tee=(system_info.verbosity > 0))
if not s:
warnings.warn("""
*****************************************************
Linkage with ATLAS requires gfortran. Use
python setup.py config_fc --fcompiler=gnu95 ...
when building extension libraries that use ATLAS.
Make sure that -lgfortran is used for C++ extensions.
*****************************************************
""", stacklevel=2)
dict_append(info, language='f90',
define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)])
except Exception: # failed to get version from file -- maybe on Windows
# look at directory name
for o in library_dirs:
m = re.search(r'ATLAS_(?P<version>\d+[.]\d+[.]\d+)_', o)
if m:
atlas_version = m.group('version')
if atlas_version is not None:
break
# final choice --- look at ATLAS_VERSION environment
# variable
if atlas_version is None:
atlas_version = os.environ.get('ATLAS_VERSION', None)
if atlas_version:
dict_append(info, define_macros=[(
'ATLAS_INFO', '"\\"%s\\""' % atlas_version)
])
else:
dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)])
return atlas_version or '?.?.?', info
if not s:
m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)', o)
if m:
atlas_version = m.group('version')
if atlas_version is None:
if re.search(r'undefined symbol: ATL_buildinfo', o, re.M):
atlas_version = '3.2.1_pre3.3.6'
else:
log.info('Status: %d', s)
log.info('Output: %s', o)
if atlas_version == '3.2.1_pre3.3.6':
dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)])
else:
dict_append(info, define_macros=[(
'ATLAS_INFO', '"\\"%s\\""' % atlas_version)
])
result = _cached_atlas_version[key] = atlas_version, info
return result
class lapack_opt_info(system_info):
notfounderror = LapackNotFoundError
def calc_info(self):
lapack_mkl_info = get_info('lapack_mkl')
if lapack_mkl_info:
self.set_info(**lapack_mkl_info)
return
openblas_info = get_info('openblas_lapack')
if openblas_info:
self.set_info(**openblas_info)
return
atlas_info = get_info('atlas_3_10_threads')
if not atlas_info:
atlas_info = get_info('atlas_3_10')
if not atlas_info:
atlas_info = get_info('atlas_threads')
if not atlas_info:
atlas_info = get_info('atlas')
if sys.platform == 'darwin' and not (atlas_info or openblas_info or
lapack_mkl_info):
# Use the system lapack from Accelerate or vecLib under OSX
args = []
link_args = []
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=[('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None)])
return
need_lapack = 0
need_blas = 0
info = {}
if atlas_info:
l = atlas_info.get('define_macros', [])
if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
or ('ATLAS_WITHOUT_LAPACK', None) in l:
need_lapack = 1
info = atlas_info
else:
warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2)
need_blas = 1
need_lapack = 1
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
if need_lapack:
lapack_info = get_info('lapack')
#lapack_info = {} ## uncomment for testing
if lapack_info:
dict_append(info, **lapack_info)
else:
warnings.warn(LapackNotFoundError.__doc__, stacklevel=2)
lapack_src_info = get_info('lapack_src')
if not lapack_src_info:
warnings.warn(LapackSrcNotFoundError.__doc__, stacklevel=2)
return
dict_append(info, libraries=[('flapack_src', lapack_src_info)])
if need_blas:
blas_info = get_info('blas')
if blas_info:
dict_append(info, **blas_info)
else:
warnings.warn(BlasNotFoundError.__doc__, stacklevel=2)
blas_src_info = get_info('blas_src')
if not blas_src_info:
warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2)
return
dict_append(info, libraries=[('fblas_src', blas_src_info)])
self.set_info(**info)
return
class blas_opt_info(system_info):
notfounderror = BlasNotFoundError
def calc_info(self):
blas_mkl_info = get_info('blas_mkl')
if blas_mkl_info:
self.set_info(**blas_mkl_info)
return
blis_info = get_info('blis')
if blis_info:
self.set_info(**blis_info)
return
openblas_info = get_info('openblas')
if openblas_info:
self.set_info(**openblas_info)
return
atlas_info = get_info('atlas_3_10_blas_threads')
if not atlas_info:
atlas_info = get_info('atlas_3_10_blas')
if not atlas_info:
atlas_info = get_info('atlas_blas_threads')
if not atlas_info:
atlas_info = get_info('atlas_blas')
if sys.platform == 'darwin' and not (atlas_info or openblas_info or
blas_mkl_info or blis_info):
# Use the system BLAS from Accelerate or vecLib under OSX
args = []
link_args = []
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=[('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None)])
return
need_blas = 0
info = {}
if atlas_info:
info = atlas_info
else:
warnings.warn(AtlasNotFoundError.__doc__, stacklevel=2)
need_blas = 1
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
if need_blas:
blas_info = get_info('blas')
if blas_info:
dict_append(info, **blas_info)
else:
warnings.warn(BlasNotFoundError.__doc__, stacklevel=2)
blas_src_info = get_info('blas_src')
if not blas_src_info:
warnings.warn(BlasSrcNotFoundError.__doc__, stacklevel=2)
return
dict_append(info, libraries=[('fblas_src', blas_src_info)])
self.set_info(**info)
return
class blas_info(system_info):
section = 'blas'
dir_env_var = 'BLAS'
_lib_names = ['blas']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
blas_libs = self.get_libs('blas_libs', self._lib_names)
info = self.check_libs(lib_dirs, blas_libs, [])
if info is None:
return
else:
info['include_dirs'] = self.get_include_dirs()
if platform.system() == 'Windows':
# The check for windows is needed because has_cblas uses the
# same compiler that was used to compile Python and msvc is
# often not installed when mingw is being used. This rough
# treatment is not desirable, but windows is tricky.
info['language'] = 'f77' # XXX: is it generally true?
else:
lib = self.has_cblas(info)
if lib is not None:
info['language'] = 'c'
info['libraries'] = [lib]
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
def has_cblas(self, info):
# primitive cblas check by looking for the header and trying to link
# cblas or blas
res = False
c = distutils.ccompiler.new_compiler()
c.customize('')
tmpdir = tempfile.mkdtemp()
s = """#include <cblas.h>
int main(int argc, const char *argv[])
{
double a[4] = {1,2,3,4};
double b[4] = {5,6,7,8};
return cblas_ddot(4, a, 1, b, 1) > 10;
}"""
src = os.path.join(tmpdir, 'source.c')
try:
with open(src, 'wt') as f:
f.write(s)
try:
# check we can compile (find headers)
obj = c.compile([src], output_dir=tmpdir,
include_dirs=self.get_include_dirs())
# check we can link (find library)
# some systems have separate cblas and blas libs. First
# check for cblas lib, and if not present check for blas lib.
try:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=["cblas"],
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
res = "cblas"
except distutils.ccompiler.LinkError:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=["blas"],
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
res = "blas"
except distutils.ccompiler.CompileError:
res = None
finally:
shutil.rmtree(tmpdir)
return res
class openblas_info(blas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
notfounderror = BlasNotFoundError
def check_embedded_lapack(self, info):
return True
def calc_info(self):
lib_dirs = self.get_lib_dirs()
openblas_libs = self.get_libs('libraries', self._lib_names)
if openblas_libs == self._lib_names: # backward compat with 1.8.0
openblas_libs = self.get_libs('openblas_libs', self._lib_names)
info = self.check_libs(lib_dirs, openblas_libs, [])
if info is None:
return
# Add extra info for OpenBLAS
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if not self.check_embedded_lapack(info):
return
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
class openblas_lapack_info(openblas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
notfounderror = BlasNotFoundError
def check_embedded_lapack(self, info):
res = False
c = distutils.ccompiler.new_compiler()
c.customize('')
tmpdir = tempfile.mkdtemp()
s = """void zungqr();
int main(int argc, const char *argv[])
{
zungqr_();
return 0;
}"""
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
# Add the additional "extra" arguments
try:
extra_args = info['extra_link_args']
except Exception:
extra_args = []
try:
with open(src, 'wt') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
res = True
except distutils.ccompiler.LinkError:
res = False
finally:
shutil.rmtree(tmpdir)
return res
class blis_info(blas_info):
section = 'blis'
dir_env_var = 'BLIS'
_lib_names = ['blis']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
blis_libs = self.get_libs('libraries', self._lib_names)
if blis_libs == self._lib_names:
blis_libs = self.get_libs('blis_libs', self._lib_names)
info = self.check_libs2(lib_dirs, blis_libs, [])
if info is None:
return
# Add include dirs
incl_dirs = self.get_include_dirs()
dict_append(info,
language='c',
define_macros=[('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
self.set_info(**info)
class blas_src_info(system_info):
section = 'blas_src'
dir_env_var = 'BLAS_SRC'
notfounderror = BlasSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['blas']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'daxpy.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
blas1 = '''
caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
scabs1
'''
blas2 = '''
cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
'''
blas3 = '''
cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
'''
sources = [os.path.join(src_dir, f + '.f') \
for f in (blas1 + blas2 + blas3).split()]
#XXX: should we check here actual existence of source files?
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
class x11_info(system_info):
section = 'x11'
notfounderror = X11NotFoundError
def __init__(self):
system_info.__init__(self,
default_lib_dirs=default_x11_lib_dirs,
default_include_dirs=default_x11_include_dirs)
def calc_info(self):
if sys.platform in ['win32']:
return
lib_dirs = self.get_lib_dirs()
include_dirs = self.get_include_dirs()
x11_libs = self.get_libs('x11_libs', ['X11'])
info = self.check_libs(lib_dirs, x11_libs, [])
if info is None:
return
inc_dir = None
for d in include_dirs:
if self.combine_paths(d, 'X11/X.h'):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
self.set_info(**info)
class _numpy_info(system_info):
section = 'Numeric'
modulename = 'Numeric'
notfounderror = NumericNotFoundError
def __init__(self):
include_dirs = []
try:
module = __import__(self.modulename)
prefix = []
for name in module.__file__.split(os.sep):
if name == 'lib':
break
prefix.append(name)
# Ask numpy for its own include path before attempting
# anything else
try:
include_dirs.append(getattr(module, 'get_include')())
except AttributeError:
pass
include_dirs.append(distutils.sysconfig.get_python_inc(
prefix=os.sep.join(prefix)))
except ImportError:
pass
py_incl_dir = distutils.sysconfig.get_python_inc()
include_dirs.append(py_incl_dir)
py_pincl_dir = distutils.sysconfig.get_python_inc(plat_specific=True)
if py_pincl_dir not in include_dirs:
include_dirs.append(py_pincl_dir)
for d in default_include_dirs:
d = os.path.join(d, os.path.basename(py_incl_dir))
if d not in include_dirs:
include_dirs.append(d)
system_info.__init__(self,
default_lib_dirs=[],
default_include_dirs=include_dirs)
def calc_info(self):
try:
module = __import__(self.modulename)
except ImportError:
return
info = {}
macros = []
for v in ['__version__', 'version']:
vrs = getattr(module, v, None)
if vrs is None:
continue
macros = [(self.modulename.upper() + '_VERSION',
'"\\"%s\\""' % (vrs)),
(self.modulename.upper(), None)]
break
dict_append(info, define_macros=macros)
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
if self.combine_paths(d,
os.path.join(self.modulename,
'arrayobject.h')):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
if info:
self.set_info(**info)
return
class numarray_info(_numpy_info):
section = 'numarray'
modulename = 'numarray'
class Numeric_info(_numpy_info):
section = 'Numeric'
modulename = 'Numeric'
class numpy_info(_numpy_info):
section = 'numpy'
modulename = 'numpy'
class numerix_info(system_info):
section = 'numerix'
def calc_info(self):
which = None, None
if os.getenv("NUMERIX"):
which = os.getenv("NUMERIX"), "environment var"
# If all the above fail, default to numpy.
if which[0] is None:
which = "numpy", "defaulted"
try:
import numpy
which = "numpy", "defaulted"
except ImportError:
msg1 = str(get_exception())
try:
import Numeric
which = "numeric", "defaulted"
except ImportError:
msg2 = str(get_exception())
try:
import numarray
which = "numarray", "defaulted"
except ImportError:
msg3 = str(get_exception())
log.info(msg1)
log.info(msg2)
log.info(msg3)
which = which[0].strip().lower(), which[1]
if which[0] not in ["numeric", "numarray", "numpy"]:
raise ValueError("numerix selector must be either 'Numeric' "
"or 'numarray' or 'numpy' but the value obtained"
" from the %s was '%s'." % (which[1], which[0]))
os.environ['NUMERIX'] = which[0]
self.set_info(**get_info(which[0]))
class f2py_info(system_info):
def calc_info(self):
try:
import numpy.f2py as f2py
except ImportError:
return
f2py_dir = os.path.join(os.path.dirname(f2py.__file__), 'src')
self.set_info(sources=[os.path.join(f2py_dir, 'fortranobject.c')],
include_dirs=[f2py_dir])
return
class boost_python_info(system_info):
section = 'boost_python'
dir_env_var = 'BOOST'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['boost*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'libs', 'python', 'src',
'module.cpp')):
src_dir = d
break
if not src_dir:
return
py_incl_dirs = [distutils.sysconfig.get_python_inc()]
py_pincl_dir = distutils.sysconfig.get_python_inc(plat_specific=True)
if py_pincl_dir not in py_incl_dirs:
py_incl_dirs.append(py_pincl_dir)
srcs_dir = os.path.join(src_dir, 'libs', 'python', 'src')
bpl_srcs = glob(os.path.join(srcs_dir, '*.cpp'))
bpl_srcs += glob(os.path.join(srcs_dir, '*', '*.cpp'))
info = {'libraries': [('boost_python_src',
{'include_dirs': [src_dir] + py_incl_dirs,
'sources':bpl_srcs}
)],
'include_dirs': [src_dir],
}
if info:
self.set_info(**info)
return
class agg2_info(system_info):
section = 'agg2'
dir_env_var = 'AGG2'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['agg2*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'src', 'agg_affine_matrix.cpp')):
src_dir = d
break
if not src_dir:
return
if sys.platform == 'win32':
agg2_srcs = glob(os.path.join(src_dir, 'src', 'platform',
'win32', 'agg_win32_bmp.cpp'))
else:
agg2_srcs = glob(os.path.join(src_dir, 'src', '*.cpp'))
agg2_srcs += [os.path.join(src_dir, 'src', 'platform',
'X11',
'agg_platform_support.cpp')]
info = {'libraries':
[('agg2_src',
{'sources': agg2_srcs,
'include_dirs': [os.path.join(src_dir, 'include')],
}
)],
'include_dirs': [os.path.join(src_dir, 'include')],
}
if info:
self.set_info(**info)
return
class _pkg_config_info(system_info):
section = None
config_env_var = 'PKG_CONFIG'
default_config_exe = 'pkg-config'
append_config_exe = ''
version_macro_name = None
release_macro_name = None
version_flag = '--modversion'
cflags_flag = '--cflags'
def get_config_exe(self):
if self.config_env_var in os.environ:
return os.environ[self.config_env_var]
return self.default_config_exe
def get_config_output(self, config_exe, option):
cmd = config_exe + ' ' + self.append_config_exe + ' ' + option
s, o = exec_command(cmd, use_tee=0)
if not s:
return o
def calc_info(self):
config_exe = find_executable(self.get_config_exe())
if not config_exe:
log.warn('File not found: %s. Cannot determine %s info.' \
% (config_exe, self.section))
return
info = {}
macros = []
libraries = []
library_dirs = []
include_dirs = []
extra_link_args = []
extra_compile_args = []
version = self.get_config_output(config_exe, self.version_flag)
if version:
macros.append((self.__class__.__name__.split('.')[-1].upper(),
'"\\"%s\\""' % (version)))
if self.version_macro_name:
macros.append((self.version_macro_name + '_%s'
% (version.replace('.', '_')), None))
if self.release_macro_name:
release = self.get_config_output(config_exe, '--release')
if release:
macros.append((self.release_macro_name + '_%s'
% (release.replace('.', '_')), None))
opts = self.get_config_output(config_exe, '--libs')
if opts:
for opt in opts.split():
if opt[:2] == '-l':
libraries.append(opt[2:])
elif opt[:2] == '-L':
library_dirs.append(opt[2:])
else:
extra_link_args.append(opt)
opts = self.get_config_output(config_exe, self.cflags_flag)
if opts:
for opt in opts.split():
if opt[:2] == '-I':
include_dirs.append(opt[2:])
elif opt[:2] == '-D':
if '=' in opt:
n, v = opt[2:].split('=')
macros.append((n, v))
else:
macros.append((opt[2:], None))
else:
extra_compile_args.append(opt)
if macros:
dict_append(info, define_macros=macros)
if libraries:
dict_append(info, libraries=libraries)
if library_dirs:
dict_append(info, library_dirs=library_dirs)
if include_dirs:
dict_append(info, include_dirs=include_dirs)
if extra_link_args:
dict_append(info, extra_link_args=extra_link_args)
if extra_compile_args:
dict_append(info, extra_compile_args=extra_compile_args)
if info:
self.set_info(**info)
return
class wx_info(_pkg_config_info):
section = 'wx'
config_env_var = 'WX_CONFIG'
default_config_exe = 'wx-config'
append_config_exe = ''
version_macro_name = 'WX_VERSION'
release_macro_name = 'WX_RELEASE'
version_flag = '--version'
cflags_flag = '--cxxflags'
class gdk_pixbuf_xlib_2_info(_pkg_config_info):
section = 'gdk_pixbuf_xlib_2'
append_config_exe = 'gdk-pixbuf-xlib-2.0'
version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
class gdk_pixbuf_2_info(_pkg_config_info):
section = 'gdk_pixbuf_2'
append_config_exe = 'gdk-pixbuf-2.0'
version_macro_name = 'GDK_PIXBUF_VERSION'
class gdk_x11_2_info(_pkg_config_info):
section = 'gdk_x11_2'
append_config_exe = 'gdk-x11-2.0'
version_macro_name = 'GDK_X11_VERSION'
class gdk_2_info(_pkg_config_info):
section = 'gdk_2'
append_config_exe = 'gdk-2.0'
version_macro_name = 'GDK_VERSION'
class gdk_info(_pkg_config_info):
section = 'gdk'
append_config_exe = 'gdk'
version_macro_name = 'GDK_VERSION'
class gtkp_x11_2_info(_pkg_config_info):
section = 'gtkp_x11_2'
append_config_exe = 'gtk+-x11-2.0'
version_macro_name = 'GTK_X11_VERSION'
class gtkp_2_info(_pkg_config_info):
section = 'gtkp_2'
append_config_exe = 'gtk+-2.0'
version_macro_name = 'GTK_VERSION'
class xft_info(_pkg_config_info):
section = 'xft'
append_config_exe = 'xft'
version_macro_name = 'XFT_VERSION'
class freetype2_info(_pkg_config_info):
section = 'freetype2'
append_config_exe = 'freetype2'
version_macro_name = 'FREETYPE2_VERSION'
class amd_info(system_info):
section = 'amd'
dir_env_var = 'AMD'
_lib_names = ['amd']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
amd_libs = self.get_libs('amd_libs', self._lib_names)
info = self.check_libs(lib_dirs, amd_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, 'amd.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_AMD_H', None)],
swig_opts=['-I' + inc_dir])
self.set_info(**info)
return
class umfpack_info(system_info):
section = 'umfpack'
dir_env_var = 'UMFPACK'
notfounderror = UmfpackNotFoundError
_lib_names = ['umfpack']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
umfpack_libs = self.get_libs('umfpack_libs', self._lib_names)
info = self.check_libs(lib_dirs, umfpack_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, ['', 'umfpack'], 'umfpack.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_UMFPACK_H', None)],
swig_opts=['-I' + inc_dir])
amd = get_info('amd')
dict_append(info, **get_info('amd'))
self.set_info(**info)
return
def combine_paths(*args, **kws):
""" Return a list of existing paths composed by all combinations of
items from arguments.
"""
r = []
for a in args:
if not a:
continue
if is_string(a):
a = [a]
r.append(a)
args = r
if not args:
return []
if len(args) == 1:
result = reduce(lambda a, b: a + b, map(glob, args[0]), [])
elif len(args) == 2:
result = []
for a0 in args[0]:
for a1 in args[1]:
result.extend(glob(os.path.join(a0, a1)))
else:
result = combine_paths(*(combine_paths(args[0], args[1]) + args[2:]))
log.debug('(paths: %s)', ','.join(result))
return result
language_map = {'c': 0, 'c++': 1, 'f77': 2, 'f90': 3}
inv_language_map = {0: 'c', 1: 'c++', 2: 'f77', 3: 'f90'}
def dict_append(d, **kws):
languages = []
for k, v in kws.items():
if k == 'language':
languages.append(v)
continue
if k in d:
if k in ['library_dirs', 'include_dirs',
'extra_compile_args', 'extra_link_args',
'runtime_library_dirs', 'define_macros']:
[d[k].append(vv) for vv in v if vv not in d[k]]
else:
d[k].extend(v)
else:
d[k] = v
if languages:
l = inv_language_map[max([language_map.get(l, 0) for l in languages])]
d['language'] = l
return
def parseCmdLine(argv=(None,)):
import optparse
parser = optparse.OptionParser("usage: %prog [-v] [info objs]")
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False,
help='be verbose and print more messages')
opts, args = parser.parse_args(args=argv[1:])
return opts, args
def show_all(argv=None):
import inspect
if argv is None:
argv = sys.argv
opts, args = parseCmdLine(argv)
if opts.verbose:
log.set_threshold(log.DEBUG)
else:
log.set_threshold(log.INFO)
show_only = []
for n in args:
if n[-5:] != '_info':
n = n + '_info'
show_only.append(n)
show_all = not show_only
_gdict_ = globals().copy()
for name, c in _gdict_.items():
if not inspect.isclass(c):
continue
if not issubclass(c, system_info) or c is system_info:
continue
if not show_all:
if name not in show_only:
continue
del show_only[show_only.index(name)]
conf = c()
conf.verbosity = 2
r = conf.get_info()
if show_only:
log.info('Info classes not defined: %s', ','.join(show_only))
if __name__ == "__main__":
show_all()
| bsd-3-clause |
evensonbryan/yocto-autobuilder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/trial/runner.py | 5 | 28076 | # -*- test-case-name: twisted.trial.test.test_runner -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A miscellany of code used to run Trial tests.
Maintainer: Jonathan Lange
"""
__all__ = [
'suiteVisit', 'TestSuite',
'DestructiveTestSuite', 'DryRunVisitor',
'ErrorHolder', 'LoggedSuite', 'PyUnitTestCase',
'TestHolder', 'TestLoader', 'TrialRunner', 'TrialSuite',
'filenameToModule', 'isPackage', 'isPackageDirectory', 'isTestCase',
'name', 'samefile', 'NOT_IN_TEST',
]
import pdb
import os, types, warnings, sys, inspect, imp
import doctest, time
from twisted.python import reflect, log, failure, modules, filepath
from twisted.python.compat import set
from twisted.internet import defer
from twisted.trial import util, unittest
from twisted.trial.itrial import ITestCase
from twisted.trial.reporter import UncleanWarningsReporterWrapper
# These are imported so that they remain in the public API for t.trial.runner
from twisted.trial.unittest import suiteVisit, TestSuite
from zope.interface import implements
pyunit = __import__('unittest')
def isPackage(module):
"""Given an object return True if the object looks like a package"""
if not isinstance(module, types.ModuleType):
return False
basename = os.path.splitext(os.path.basename(module.__file__))[0]
return basename == '__init__'
def isPackageDirectory(dirname):
"""Is the directory at path 'dirname' a Python package directory?
Returns the name of the __init__ file (it may have a weird extension)
if dirname is a package directory. Otherwise, returns False"""
for ext in zip(*imp.get_suffixes())[0]:
initFile = '__init__' + ext
if os.path.exists(os.path.join(dirname, initFile)):
return initFile
return False
def samefile(filename1, filename2):
"""
A hacky implementation of C{os.path.samefile}. Used by L{filenameToModule}
when the platform doesn't provide C{os.path.samefile}. Do not use this.
"""
return os.path.abspath(filename1) == os.path.abspath(filename2)
def filenameToModule(fn):
"""
Given a filename, do whatever possible to return a module object matching
that file.
If the file in question is a module in Python path, properly import and
return that module. Otherwise, load the source manually.
@param fn: A filename.
@return: A module object.
@raise ValueError: If C{fn} does not exist.
"""
if not os.path.exists(fn):
raise ValueError("%r doesn't exist" % (fn,))
try:
ret = reflect.namedAny(reflect.filenameToModuleName(fn))
except (ValueError, AttributeError):
# Couldn't find module. The file 'fn' is not in PYTHONPATH
return _importFromFile(fn)
# ensure that the loaded module matches the file
retFile = os.path.splitext(ret.__file__)[0] + '.py'
# not all platforms (e.g. win32) have os.path.samefile
same = getattr(os.path, 'samefile', samefile)
if os.path.isfile(fn) and not same(fn, retFile):
del sys.modules[ret.__name__]
ret = _importFromFile(fn)
return ret
def _importFromFile(fn, moduleName=None):
fn = _resolveDirectory(fn)
if not moduleName:
moduleName = os.path.splitext(os.path.split(fn)[-1])[0]
if moduleName in sys.modules:
return sys.modules[moduleName]
fd = open(fn, 'r')
try:
module = imp.load_source(moduleName, fn, fd)
finally:
fd.close()
return module
def _resolveDirectory(fn):
if os.path.isdir(fn):
initFile = isPackageDirectory(fn)
if initFile:
fn = os.path.join(fn, initFile)
else:
raise ValueError('%r is not a package directory' % (fn,))
return fn
def _getMethodNameInClass(method):
"""
Find the attribute name on the method's class which refers to the method.
For some methods, notably decorators which have not had __name__ set correctly:
getattr(method.im_class, method.__name__) != method
"""
if getattr(method.im_class, method.__name__, object()) != method:
for alias in dir(method.im_class):
if getattr(method.im_class, alias, object()) == method:
return alias
return method.__name__
class DestructiveTestSuite(TestSuite):
"""
A test suite which remove the tests once run, to minimize memory usage.
"""
def run(self, result):
"""
Almost the same as L{TestSuite.run}, but with C{self._tests} being
empty at the end.
"""
while self._tests:
if result.shouldStop:
break
test = self._tests.pop(0)
test(result)
return result
# When an error occurs outside of any test, the user will see this string
# in place of a test's name.
NOT_IN_TEST = "<not in test>"
class LoggedSuite(TestSuite):
"""
Any errors logged in this suite will be reported to the L{TestResult}
object.
"""
def run(self, result):
"""
Run the suite, storing all errors in C{result}. If an error is logged
while no tests are running, then it will be added as an error to
C{result}.
@param result: A L{TestResult} object.
"""
observer = unittest._logObserver
observer._add()
super(LoggedSuite, self).run(result)
observer._remove()
for error in observer.getErrors():
result.addError(TestHolder(NOT_IN_TEST), error)
observer.flushErrors()
class PyUnitTestCase(object):
"""
DEPRECATED in Twisted 8.0.
This class decorates the pyunit.TestCase class, mainly to work around the
differences between unittest in Python 2.3, 2.4, and 2.5. These
differences are::
- The way doctest unittests describe themselves
- Where the implementation of TestCase.run is (used to be in __call__)
- Where the test method name is kept (mangled-private or non-mangled
private variable)
It also implements visit, which we like.
"""
def __init__(self, test):
warnings.warn("Deprecated in Twisted 8.0.",
category=DeprecationWarning)
self._test = test
test.id = self.id
def id(self):
cls = self._test.__class__
tmn = getattr(self._test, '_TestCase__testMethodName', None)
if tmn is None:
# python2.5's 'unittest' module is more sensible; but different.
tmn = self._test._testMethodName
return (cls.__module__ + '.' + cls.__name__ + '.' +
tmn)
def __repr__(self):
return 'PyUnitTestCase<%r>'%(self.id(),)
def __call__(self, results):
return self._test(results)
def visit(self, visitor):
"""
Call the given visitor with the original, standard library, test case
that C{self} wraps. See L{unittest.TestCase.visit}.
Deprecated in Twisted 8.0.
"""
warnings.warn("Test visitors deprecated in Twisted 8.0",
category=DeprecationWarning)
visitor(self._test)
def __getattr__(self, name):
return getattr(self._test, name)
class TrialSuite(TestSuite):
"""
Suite to wrap around every single test in a C{trial} run. Used internally
by Trial to set up things necessary for Trial tests to work, regardless of
what context they are run in.
"""
def __init__(self, tests=()):
suite = LoggedSuite(tests)
super(TrialSuite, self).__init__([suite])
def _bail(self):
from twisted.internet import reactor
d = defer.Deferred()
reactor.addSystemEventTrigger('after', 'shutdown',
lambda: d.callback(None))
reactor.fireSystemEvent('shutdown') # radix's suggestion
# As long as TestCase does crap stuff with the reactor we need to
# manually shutdown the reactor here, and that requires util.wait
# :(
# so that the shutdown event completes
unittest.TestCase('mktemp')._wait(d)
def run(self, result):
try:
TestSuite.run(self, result)
finally:
self._bail()
def name(thing):
"""
@param thing: an object from modules (instance of PythonModule,
PythonAttribute), a TestCase subclass, or an instance of a TestCase.
"""
if isTestCase(thing):
# TestCase subclass
theName = reflect.qual(thing)
else:
# thing from trial, or thing from modules.
# this monstrosity exists so that modules' objects do not have to
# implement id(). -jml
try:
theName = thing.id()
except AttributeError:
theName = thing.name
return theName
def isTestCase(obj):
"""
@return: C{True} if C{obj} is a class that contains test cases, C{False}
otherwise. Used to find all the tests in a module.
"""
try:
return issubclass(obj, pyunit.TestCase)
except TypeError:
return False
class TestHolder(object):
"""
Placeholder for a L{TestCase} inside a reporter. As far as a L{TestResult}
is concerned, this looks exactly like a unit test.
"""
implements(ITestCase)
failureException = None
def __init__(self, description):
"""
@param description: A string to be displayed L{TestResult}.
"""
self.description = description
def __call__(self, result):
return self.run(result)
def id(self):
return self.description
def countTestCases(self):
return 0
def run(self, result):
"""
This test is just a placeholder. Run the test successfully.
@param result: The C{TestResult} to store the results in.
@type result: L{twisted.trial.itrial.ITestResult}.
"""
result.startTest(self)
result.addSuccess(self)
result.stopTest(self)
def shortDescription(self):
return self.description
class ErrorHolder(TestHolder):
"""
Used to insert arbitrary errors into a test suite run. Provides enough
methods to look like a C{TestCase}, however, when it is run, it simply adds
an error to the C{TestResult}. The most common use-case is for when a
module fails to import.
"""
def __init__(self, description, error):
"""
@param description: A string used by C{TestResult}s to identify this
error. Generally, this is the name of a module that failed to import.
@param error: The error to be added to the result. Can be an `exc_info`
tuple or a L{twisted.python.failure.Failure}.
"""
super(ErrorHolder, self).__init__(description)
self.error = util.excInfoOrFailureToExcInfo(error)
def __repr__(self):
return "<ErrorHolder description=%r error=%s%s>" % (
# Format the exception type and arguments explicitly, as exception
# objects do not have nice looking string formats on Python 2.4.
self.description, self.error[0].__name__, self.error[1].args)
def run(self, result):
"""
Run the test, reporting the error.
@param result: The C{TestResult} to store the results in.
@type result: L{twisted.trial.itrial.ITestResult}.
"""
result.startTest(self)
result.addError(self, self.error)
result.stopTest(self)
def visit(self, visitor):
"""
See L{unittest.TestCase.visit}.
"""
visitor(self)
class TestLoader(object):
"""
I find tests inside function, modules, files -- whatever -- then return
them wrapped inside a Test (either a L{TestSuite} or a L{TestCase}).
@ivar methodPrefix: A string prefix. C{TestLoader} will assume that all the
methods in a class that begin with C{methodPrefix} are test cases.
@ivar modulePrefix: A string prefix. Every module in a package that begins
with C{modulePrefix} is considered a module full of tests.
@ivar forceGarbageCollection: A flag applied to each C{TestCase} loaded.
See L{unittest.TestCase} for more information.
@ivar sorter: A key function used to sort C{TestCase}s, test classes,
modules and packages.
@ivar suiteFactory: A callable which is passed a list of tests (which
themselves may be suites of tests). Must return a test suite.
"""
methodPrefix = 'test'
modulePrefix = 'test_'
def __init__(self):
self.suiteFactory = TestSuite
self.sorter = name
self._importErrors = []
def sort(self, xs):
"""
Sort the given things using L{sorter}.
@param xs: A list of test cases, class or modules.
"""
return sorted(xs, key=self.sorter)
def findTestClasses(self, module):
"""Given a module, return all Trial test classes"""
classes = []
for name, val in inspect.getmembers(module):
if isTestCase(val):
classes.append(val)
return self.sort(classes)
def findByName(self, name):
"""
Return a Python object given a string describing it.
@param name: a string which may be either a filename or a
fully-qualified Python name.
@return: If C{name} is a filename, return the module. If C{name} is a
fully-qualified Python name, return the object it refers to.
"""
if os.path.exists(name):
return filenameToModule(name)
return reflect.namedAny(name)
def loadModule(self, module):
"""
Return a test suite with all the tests from a module.
Included are TestCase subclasses and doctests listed in the module's
__doctests__ module. If that's not good for you, put a function named
either C{testSuite} or C{test_suite} in your module that returns a
TestSuite, and I'll use the results of that instead.
If C{testSuite} and C{test_suite} are both present, then I'll use
C{testSuite}.
"""
## XXX - should I add an optional parameter to disable the check for
## a custom suite.
## OR, should I add another method
if not isinstance(module, types.ModuleType):
raise TypeError("%r is not a module" % (module,))
if hasattr(module, 'testSuite'):
return module.testSuite()
elif hasattr(module, 'test_suite'):
return module.test_suite()
suite = self.suiteFactory()
for testClass in self.findTestClasses(module):
suite.addTest(self.loadClass(testClass))
if not hasattr(module, '__doctests__'):
return suite
docSuite = self.suiteFactory()
for doctest in module.__doctests__:
docSuite.addTest(self.loadDoctests(doctest))
return self.suiteFactory([suite, docSuite])
loadTestsFromModule = loadModule
def loadClass(self, klass):
"""
Given a class which contains test cases, return a sorted list of
C{TestCase} instances.
"""
if not (isinstance(klass, type) or isinstance(klass, types.ClassType)):
raise TypeError("%r is not a class" % (klass,))
if not isTestCase(klass):
raise ValueError("%r is not a test case" % (klass,))
names = self.getTestCaseNames(klass)
tests = self.sort([self._makeCase(klass, self.methodPrefix+name)
for name in names])
return self.suiteFactory(tests)
loadTestsFromTestCase = loadClass
def getTestCaseNames(self, klass):
"""
Given a class that contains C{TestCase}s, return a list of names of
methods that probably contain tests.
"""
return reflect.prefixedMethodNames(klass, self.methodPrefix)
def loadMethod(self, method):
"""
Given a method of a C{TestCase} that represents a test, return a
C{TestCase} instance for that test.
"""
if not isinstance(method, types.MethodType):
raise TypeError("%r not a method" % (method,))
return self._makeCase(method.im_class, _getMethodNameInClass(method))
def _makeCase(self, klass, methodName):
return klass(methodName)
def loadPackage(self, package, recurse=False):
"""
Load tests from a module object representing a package, and return a
TestSuite containing those tests.
Tests are only loaded from modules whose name begins with 'test_'
(or whatever C{modulePrefix} is set to).
@param package: a types.ModuleType object (or reasonable facsimilie
obtained by importing) which may contain tests.
@param recurse: A boolean. If True, inspect modules within packages
within the given package (and so on), otherwise, only inspect modules
in the package itself.
@raise: TypeError if 'package' is not a package.
@return: a TestSuite created with my suiteFactory, containing all the
tests.
"""
if not isPackage(package):
raise TypeError("%r is not a package" % (package,))
pkgobj = modules.getModule(package.__name__)
if recurse:
discovery = pkgobj.walkModules()
else:
discovery = pkgobj.iterModules()
discovered = []
for disco in discovery:
if disco.name.split(".")[-1].startswith(self.modulePrefix):
discovered.append(disco)
suite = self.suiteFactory()
for modinfo in self.sort(discovered):
try:
module = modinfo.load()
except:
thingToAdd = ErrorHolder(modinfo.name, failure.Failure())
else:
thingToAdd = self.loadModule(module)
suite.addTest(thingToAdd)
return suite
def loadDoctests(self, module):
"""
Return a suite of tests for all the doctests defined in C{module}.
@param module: A module object or a module name.
"""
if isinstance(module, str):
try:
module = reflect.namedAny(module)
except:
return ErrorHolder(module, failure.Failure())
if not inspect.ismodule(module):
warnings.warn("trial only supports doctesting modules")
return
extraArgs = {}
if sys.version_info > (2, 4):
# Work around Python issue2604: DocTestCase.tearDown clobbers globs
def saveGlobals(test):
"""
Save C{test.globs} and replace it with a copy so that if
necessary, the original will be available for the next test
run.
"""
test._savedGlobals = getattr(test, '_savedGlobals', test.globs)
test.globs = test._savedGlobals.copy()
extraArgs['setUp'] = saveGlobals
return doctest.DocTestSuite(module, **extraArgs)
def loadAnything(self, thing, recurse=False):
"""
Given a Python object, return whatever tests that are in it. Whatever
'in' might mean.
@param thing: A Python object. A module, method, class or package.
@param recurse: Whether or not to look in subpackages of packages.
Defaults to False.
@return: A C{TestCase} or C{TestSuite}.
"""
if isinstance(thing, types.ModuleType):
if isPackage(thing):
return self.loadPackage(thing, recurse)
return self.loadModule(thing)
elif isinstance(thing, types.ClassType):
return self.loadClass(thing)
elif isinstance(thing, type):
return self.loadClass(thing)
elif isinstance(thing, types.MethodType):
return self.loadMethod(thing)
raise TypeError("No loader for %r. Unrecognized type" % (thing,))
def loadByName(self, name, recurse=False):
"""
Given a string representing a Python object, return whatever tests
are in that object.
If C{name} is somehow inaccessible (e.g. the module can't be imported,
there is no Python object with that name etc) then return an
L{ErrorHolder}.
@param name: The fully-qualified name of a Python object.
"""
try:
thing = self.findByName(name)
except:
return ErrorHolder(name, failure.Failure())
return self.loadAnything(thing, recurse)
loadTestsFromName = loadByName
def loadByNames(self, names, recurse=False):
"""
Construct a TestSuite containing all the tests found in 'names', where
names is a list of fully qualified python names and/or filenames. The
suite returned will have no duplicate tests, even if the same object
is named twice.
"""
things = []
errors = []
for name in names:
try:
things.append(self.findByName(name))
except:
errors.append(ErrorHolder(name, failure.Failure()))
suites = [self.loadAnything(thing, recurse)
for thing in self._uniqueTests(things)]
suites.extend(errors)
return self.suiteFactory(suites)
def _uniqueTests(self, things):
"""
Gather unique suite objects from loaded things. This will guarantee
uniqueness of inherited methods on TestCases which would otherwise hash
to same value and collapse to one test unexpectedly if using simpler
means: e.g. set().
"""
entries = []
for thing in things:
if isinstance(thing, types.MethodType):
entries.append((thing, thing.im_class))
else:
entries.append((thing,))
return [entry[0] for entry in set(entries)]
class DryRunVisitor(object):
"""
A visitor that makes a reporter think that every test visited has run
successfully.
"""
def __init__(self, reporter):
"""
@param reporter: A C{TestResult} object.
"""
self.reporter = reporter
def markSuccessful(self, testCase):
"""
Convince the reporter that this test has been run successfully.
"""
self.reporter.startTest(testCase)
self.reporter.addSuccess(testCase)
self.reporter.stopTest(testCase)
class TrialRunner(object):
"""
A specialised runner that the trial front end uses.
"""
DEBUG = 'debug'
DRY_RUN = 'dry-run'
def _getDebugger(self):
dbg = pdb.Pdb()
try:
import readline
except ImportError:
print "readline module not available"
sys.exc_clear()
for path in ('.pdbrc', 'pdbrc'):
if os.path.exists(path):
try:
rcFile = file(path, 'r')
except IOError:
sys.exc_clear()
else:
dbg.rcLines.extend(rcFile.readlines())
return dbg
def _setUpTestdir(self):
self._tearDownLogFile()
currentDir = os.getcwd()
base = filepath.FilePath(self.workingDirectory)
testdir, self._testDirLock = util._unusedTestDirectory(base)
os.chdir(testdir.path)
return currentDir
def _tearDownTestdir(self, oldDir):
os.chdir(oldDir)
self._testDirLock.unlock()
_log = log
def _makeResult(self):
reporter = self.reporterFactory(self.stream, self.tbformat,
self.rterrors, self._log)
if self.uncleanWarnings:
reporter = UncleanWarningsReporterWrapper(reporter)
return reporter
def __init__(self, reporterFactory,
mode=None,
logfile='test.log',
stream=sys.stdout,
profile=False,
tracebackFormat='default',
realTimeErrors=False,
uncleanWarnings=False,
workingDirectory=None,
forceGarbageCollection=False):
self.reporterFactory = reporterFactory
self.logfile = logfile
self.mode = mode
self.stream = stream
self.tbformat = tracebackFormat
self.rterrors = realTimeErrors
self.uncleanWarnings = uncleanWarnings
self._result = None
self.workingDirectory = workingDirectory or '_trial_temp'
self._logFileObserver = None
self._logFileObject = None
self._forceGarbageCollection = forceGarbageCollection
if profile:
self.run = util.profiled(self.run, 'profile.data')
def _tearDownLogFile(self):
if self._logFileObserver is not None:
log.removeObserver(self._logFileObserver.emit)
self._logFileObserver = None
if self._logFileObject is not None:
self._logFileObject.close()
self._logFileObject = None
def _setUpLogFile(self):
self._tearDownLogFile()
if self.logfile == '-':
logFile = sys.stdout
else:
logFile = file(self.logfile, 'a')
self._logFileObject = logFile
self._logFileObserver = log.FileLogObserver(logFile)
log.startLoggingWithObserver(self._logFileObserver.emit, 0)
def run(self, test):
"""
Run the test or suite and return a result object.
"""
test = unittest.decorate(test, ITestCase)
if self._forceGarbageCollection:
test = unittest.decorate(
test, unittest._ForceGarbageCollectionDecorator)
return self._runWithoutDecoration(test)
def _runWithoutDecoration(self, test):
"""
Private helper that runs the given test but doesn't decorate it.
"""
result = self._makeResult()
# decorate the suite with reactor cleanup and log starting
# This should move out of the runner and be presumed to be
# present
suite = TrialSuite([test])
startTime = time.time()
if self.mode == self.DRY_RUN:
for single in unittest._iterateTests(suite):
result.startTest(single)
result.addSuccess(single)
result.stopTest(single)
else:
if self.mode == self.DEBUG:
# open question - should this be self.debug() instead.
debugger = self._getDebugger()
run = lambda: debugger.runcall(suite.run, result)
else:
run = lambda: suite.run(result)
oldDir = self._setUpTestdir()
try:
self._setUpLogFile()
run()
finally:
self._tearDownLogFile()
self._tearDownTestdir(oldDir)
endTime = time.time()
done = getattr(result, 'done', None)
if done is None:
warnings.warn(
"%s should implement done() but doesn't. Falling back to "
"printErrors() and friends." % reflect.qual(result.__class__),
category=DeprecationWarning, stacklevel=3)
result.printErrors()
result.writeln(result.separator)
result.writeln('Ran %d tests in %.3fs', result.testsRun,
endTime - startTime)
result.write('\n')
result.printSummary()
else:
result.done()
return result
def runUntilFailure(self, test):
"""
Repeatedly run C{test} until it fails.
"""
count = 0
while True:
count += 1
self.stream.write("Test Pass %d\n" % (count,))
if count == 1:
result = self.run(test)
else:
result = self._runWithoutDecoration(test)
if result.testsRun == 0:
break
if not result.wasSuccessful():
break
return result
| gpl-2.0 |
sebrandon1/nova | nova/tests/unit/virt/libvirt/fake_imagebackend.py | 3 | 7680 | # Copyright 2012 Grid Dynamics
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import fixtures
import functools
import mock
import os
import six
from nova.virt.libvirt import config
from nova.virt.libvirt import imagebackend
from nova.virt.libvirt import utils as libvirt_utils
class ImageBackendFixture(fixtures.Fixture):
def __init__(self, got_files=None, imported_files=None, exists=None):
"""This fixture mocks imagebackend.Backend.backend, which is the
only entry point to libvirt.imagebackend from libvirt.driver.
:param got_files: A list of {'filename': path, 'size': size} for every
file which was created.
:param imported_files: A list of (local_filename, remote_filename) for
every invocation of import_file().
:param exists: An optional lambda which takes the disk name as an
argument, and returns True if the disk exists,
False otherwise.
"""
self.got_files = got_files
self.imported_files = imported_files
self.disks = collections.defaultdict(self._mock_disk)
"""A dict of name -> Mock image object. This is a defaultdict,
so tests may access it directly before a disk has been created."""
self._exists = exists
def setUp(self):
super(ImageBackendFixture, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.imagebackend.Backend.backend',
self._mock_backend))
@property
def created_disks(self):
"""disks, filtered to contain only disks which were actually created
by calling a relevant method.
"""
# A disk was created iff either cache() or import_file() was called.
return {name: disk for name, disk in six.iteritems(self.disks)
if any([disk.cache.called, disk.import_file.called])}
def _mock_disk(self):
# This is the generator passed to the disks defaultdict. It returns
# a mocked Image object, but note that the returned object has not
# yet been 'constructed'. We don't know at this stage what arguments
# will be passed to the constructor, so we don't know, eg, its type
# or path.
#
# The reason for this 2 phase construction is to allow tests to
# manipulate mocks for disks before they have been created. eg a
# test can do the following before executing the method under test:
#
# disks['disk'].cache.side_effect = ImageNotFound...
#
# When the 'constructor' (image_init in _mock_backend) later runs,
# it will return the same object we created here, and when the
# caller calls cache() it will raise the requested exception.
disk = mock.create_autospec(imagebackend.Image)
# NOTE(mdbooth): fake_cache and fake_import_file are for compatiblity
# with existing tests which test got_files and imported_files. They
# should be removed when they have no remaining users.
disk.cache.side_effect = self._fake_cache
disk.import_file.side_effect = self._fake_import_file
# NOTE(mdbooth): test_virt_drivers assumes libvirt_info has functional
# output
disk.libvirt_info.side_effect = \
functools.partial(self._fake_libvirt_info, disk)
return disk
def _mock_backend(self, backend_self, image_type=None):
# This method mocks Backend.backend, which returns a subclass of Image
# (it returns a class, not an instance). This mocked method doesn't
# return a class; it returns a function which returns a Mock. IOW,
# instead of the getting a QCow2, the caller gets image_init,
# so instead of:
#
# QCow2(instance, disk_name='disk')
#
# the caller effectively does:
#
# image_init(instance, disk_name='disk')
#
# Therefore image_init() must have the same signature as an Image
# subclass constructor, and return a mocked Image object.
#
# The returned mocked Image object has the following additional
# properties which are useful for testing:
#
# * Calls with the same disk_name return the same object from
# self.disks. This means tests can assert on multiple calls for
# the same disk without worrying about whether they were also on
# the same object.
#
# * Mocked objects have an additional image_type attribute set to
# the image_type originally passed to Backend.backend() during
# their construction. Tests can use this to assert that disks were
# created of the expected type.
def image_init(instance=None, disk_name=None, path=None):
# There's nothing special about this path except that it's
# predictable and unique for (instance, disk).
if path is None:
path = os.path.join(
libvirt_utils.get_instance_path(instance), disk_name)
else:
disk_name = os.path.basename(path)
disk = self.disks[disk_name]
# Used directly by callers. These would have been set if called
# the real constructor.
setattr(disk, 'path', path)
setattr(disk, 'is_block_dev', False)
# Used by tests. Note that image_init is a closure over image_type.
setattr(disk, 'image_type', image_type)
# Used by tests to manipulate which disks exist.
if self._exists is not None:
# We don't just cache the return value here because the
# caller may want, eg, a test where the disk initially does not
# exist and later exists.
disk.exists.side_effect = lambda: self._exists(disk_name)
else:
disk.exists.return_value = True
return disk
return image_init
def _fake_cache(self, fetch_func, filename, size=None, *args, **kwargs):
# For legacy tests which use got_files
if self.got_files is not None:
self.got_files.append({'filename': filename, 'size': size})
def _fake_import_file(self, instance, local_filename, remote_filename):
# For legacy tests which use imported_files
if self.imported_files is not None:
self.imported_files.append((local_filename, remote_filename))
def _fake_libvirt_info(self, mock_disk, disk_bus, disk_dev, device_type,
cache_mode, extra_specs, hypervisor_version):
# For tests in test_virt_drivers which expect libvirt_info to be
# functional
info = config.LibvirtConfigGuestDisk()
info.source_type = 'file'
info.source_device = device_type
info.target_bus = disk_bus
info.target_dev = disk_dev
info.driver_cache = cache_mode
info.driver_format = 'raw'
info.source_path = mock_disk.path
return info
| apache-2.0 |
repotvsupertuga/tvsupertuga.repository | instal/script.module.universalscrapers/lib/universalscrapers/modules/js2py/prototypes/jsarray.py | 15 | 14886 | import six
if six.PY3:
xrange = range
import functools
def to_arr(this):
"""Returns Python array from Js array"""
return [this.get(str(e)) for e in xrange(len(this))]
ARR_STACK = set({})
class ArrayPrototype:
def toString():
# this function is wrong but I will leave it here fore debugging purposes.
func = this.get('join')
if not func.is_callable():
@this.Js
def func():
return '[object %s]'%this.Class
return func.call(this, ())
def toLocaleString():
array = this.to_object()
arr_len = array.get('length').to_uint32()
# separator is simply a comma ','
if not arr_len:
return ''
res = []
for i in xrange(arr_len):
element = array[str(i)]
if element.is_undefined() or element.is_null():
res.append('')
else:
cand = element.to_object()
str_func = element.get('toLocaleString')
if not str_func.is_callable():
raise this.MakeError('TypeError', 'toLocaleString method of item at index %d is not callable'%i)
res.append(element.callprop('toLocaleString').value)
return ','.join(res)
def concat():
array = this.to_object()
A = this.Js([])
items = [array]
items.extend(to_arr(arguments))
n = 0
for E in items:
if E.Class=='Array':
k = 0
e_len = len(E)
while k<e_len:
if E.has_property(str(k)):
A.put(str(n), E.get(str(k)))
n+=1
k+=1
else:
A.put(str(n), E)
n+=1
return A
def join(separator):
ARR_STACK.add(this)
array = this.to_object()
arr_len = array.get('length').to_uint32()
separator = ',' if separator.is_undefined() else separator.to_string().value
elems = []
for e in xrange(arr_len):
elem = array.get(str(e))
if elem in ARR_STACK:
s = ''
else:
s = elem.to_string().value
elems.append(s if not (elem.is_undefined() or elem.is_null()) else '')
res = separator.join(elems)
ARR_STACK.remove(this)
return res
def pop(): #todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not arr_len:
array.put('length', this.Js(arr_len))
return None
ind = str(arr_len-1)
element = array.get(ind)
array.delete(ind)
array.put('length', this.Js(arr_len-1))
return element
def push(item): # todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
to_put = arguments.to_list()
i = arr_len
for i, e in enumerate(to_put, arr_len):
array.put(str(i), e)
if to_put:
i+=1
array.put('length', this.Js(i))
return i
def reverse():
array = this.to_object() # my own algorithm
vals = to_arr(array)
has_props = [array.has_property(str(e)) for e in xrange(len(array))]
vals.reverse()
has_props.reverse()
for i, val in enumerate(vals):
if has_props[i]:
array.put(str(i), val)
else:
array.delete(str(i))
return array
def shift(): #todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not arr_len:
array.put('length', this.Js(0))
return None
first = array.get('0')
for k in xrange(1, arr_len):
from_s, to_s = str(k), str(k-1)
if array.has_property(from_s):
array.put(to_s, array.get(from_s))
else:
array.delete(to)
array.delete(str(arr_len-1))
array.put('length', this.Js(str(arr_len-1)))
return first
def slice(start, end): # todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
relative_start = start.to_int()
k = max((arr_len + relative_start), 0) if relative_start<0 else min(relative_start, arr_len)
relative_end = arr_len if end.is_undefined() else end.to_int()
final = max((arr_len + relative_end), 0) if relative_end<0 else min(relative_end, arr_len)
res = []
n = 0
while k<final:
pk = str(k)
if array.has_property(pk):
res.append(array.get(pk))
k += 1
n += 1
return res
def sort(cmpfn):
if not this.Class in ['Array', 'Arguments']:
return this.to_object() # do nothing
arr = []
for i in xrange(len(this)):
arr.append(this.get(six.text_type(i)))
if not arr:
return this
if not cmpfn.is_callable():
cmpfn = None
cmp = lambda a,b: sort_compare(a, b, cmpfn)
if six.PY3:
key = functools.cmp_to_key(cmp)
arr.sort(key=key)
else:
arr.sort(cmp=cmp)
for i in xrange(len(arr)):
this.put(six.text_type(i), arr[i])
return this
def splice(start, deleteCount):
# 1-8
array = this.to_object()
arr_len = array.get('length').to_uint32()
relative_start = start.to_int()
actual_start = max((arr_len + relative_start),0) if relative_start<0 else min(relative_start, arr_len)
actual_delete_count = min(max(deleteCount.to_int(),0 ), arr_len - actual_start)
k = 0
A = this.Js([])
# 9
while k<actual_delete_count:
if array.has_property(str(actual_start+k)):
A.put(str(k), array.get(str(actual_start+k)))
k += 1
# 10-11
items = to_arr(arguments)[2:]
items_len = len(items)
# 12
if items_len<actual_delete_count:
k = actual_start
while k < (arr_len-actual_delete_count):
fr = str(k+actual_delete_count)
to = str(k+items_len)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k += 1
k = arr_len
while k > (arr_len - actual_delete_count + items_len):
array.delete(str(k-1))
k -= 1
# 13
elif items_len>actual_delete_count:
k = arr_len - actual_delete_count
while k>actual_start:
fr = str(k + actual_delete_count - 1)
to = str(k + items_len - 1)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k -= 1
# 14-17
k = actual_start
while items:
E = items.pop(0)
array.put(str(k), E)
k += 1
array.put('length', this.Js(arr_len - actual_delete_count + items_len))
return A
def unshift():
array = this.to_object()
arr_len = array.get('length').to_uint32()
argCount = len(arguments)
k = arr_len
while k > 0:
fr = str(k - 1)
to = str(k + argCount - 1)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k -= 1
j = 0
items = to_arr(arguments)
while items:
E = items.pop(0)
array.put(str(j), E)
j += 1
array.put('length', this.Js(arr_len + argCount))
return arr_len + argCount
def indexOf(searchElement):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if arr_len == 0:
return -1
if len(arguments)>1:
n = arguments[1].to_int()
else:
n = 0
if n >= arr_len:
return -1
if n >= 0:
k = n
else:
k = arr_len - abs(n)
if k < 0:
k = 0
while k < arr_len:
if array.has_property(str(k)):
elementK = array.get(str(k))
if searchElement.strict_equality_comparison(elementK):
return k
k += 1
return -1
def lastIndexOf(searchElement):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if arr_len == 0:
return -1
if len(arguments)>1:
n = arguments[1].to_int()
else:
n = arr_len - 1
if n >= 0:
k = min(n, arr_len-1)
else:
k = arr_len - abs(n)
while k >= 0:
if array.has_property(str(k)):
elementK = array.get(str(k))
if searchElement.strict_equality_comparison(elementK):
return k
k -= 1
return -1
def every(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if not callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
return False
k += 1
return True
def some(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
return True
k += 1
return False
def forEach(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
callbackfn.call(T, (kValue, this.Js(k), array))
k+=1
def map(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
A = this.Js([])
k = 0
while k<arr_len:
Pk = str(k)
if array.has_property(Pk):
kValue = array.get(Pk)
mappedValue = callbackfn.call(T, (kValue, this.Js(k), array))
A.define_own_property(Pk, {'value': mappedValue, 'writable': True,
'enumerable': True, 'configurable': True})
k += 1
return A
def filter(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
res = []
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
res.append(kValue)
k += 1
return res # converted to js array automatically
def reduce(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
if not arr_len and len(arguments)<2:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
k = 0
if len(arguments)>1: # initial value present
accumulator = arguments[1]
else:
kPresent = False
while not kPresent and k<arr_len:
kPresent = array.has_property(str(k))
if kPresent:
accumulator = array.get(str(k))
k += 1
if not kPresent:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
k += 1
return accumulator
def reduceRight(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
if not arr_len and len(arguments)<2:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
k = arr_len - 1
if len(arguments)>1: # initial value present
accumulator = arguments[1]
else:
kPresent = False
while not kPresent and k>=0:
kPresent = array.has_property(str(k))
if kPresent:
accumulator = array.get(str(k))
k -= 1
if not kPresent:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
while k>=0:
if array.has_property(str(k)):
kValue = array.get(str(k))
accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
k -= 1
return accumulator
def sort_compare(a, b, comp):
if a is None:
if b is None:
return 0
return 1
if b is None:
if a is None:
return 0
return -1
if a.is_undefined():
if b.is_undefined():
return 0
return 1
if b.is_undefined():
if a.is_undefined():
return 0
return -1
if comp is not None:
res = comp.call(a.undefined, (a, b))
return res.to_int()
x, y = a.to_string(), b.to_string()
if x<y:
return -1
elif x>y:
return 1
return 0
| gpl-2.0 |
mogproject/javactl | src/javactl/setting/setting.py | 1 | 3543 | from __future__ import division, print_function, absolute_import, unicode_literals
import subprocess
from itertools import chain
import yaml
import six
from mog_commons.case_class import CaseClass
from mog_commons.functional import oget
from javactl.setting import arg_parser
from javactl.setting.app_setting import AppSetting
from javactl.setting.java_setting import JavaSetting
from javactl.setting.log_setting import LogSetting
from javactl.setting.os_setting import OSSetting
class Setting(CaseClass):
"""Manages all settings."""
def __init__(self,
config_path=None,
extra_args=None,
dry_run=False,
debug=False,
app_setting=None,
java_setting=None,
log_setting=None,
os_setting=None,
pre_commands=None,
post_commands=None):
"""
:param config_path:
:param extra_args: arguments for Java application
:param dry_run:
:param debug: debug mode if true
:param app_setting:
:param java_setting:
:param log_setting:
:param os_setting:
:param pre_commands:
:param post_commands:
:return:
"""
CaseClass.__init__(
self,
('config_path', config_path),
('extra_args', oget(extra_args, [])),
('dry_run', dry_run),
('debug', debug),
('app_setting', app_setting),
('java_setting', java_setting),
('log_setting', log_setting),
('os_setting', os_setting),
('pre_commands', oget(pre_commands, [])),
('post_commands', oget(post_commands, []))
)
def parse_args(self, argv):
option, args = arg_parser.parser.parse_args(argv[1:])
if not args:
arg_parser.parser.print_help()
arg_parser.parser.exit(2)
return self.copy(config_path=args[0], extra_args=args[1:], dry_run=option.dry_run, debug=option.debug)
def load_config(self):
if not self.config_path:
return self
with open(self.config_path, 'rb') as f:
data = yaml.load(f.read().decode('utf-8'))
app_setting = AppSetting(**data.get('app', {}))
java_setting = JavaSetting(**data.get('java', {}))
log_setting = LogSetting(app_setting.home, **data.get('log', {}))
os_setting = OSSetting(**data.get('os', {}))
pre_commands = data.get('pre', [])
post_commands = data.get('post', [])
for commands in [pre_commands, post_commands]:
assert isinstance(commands, list), 'pre/post must be lists or empty'
assert all(isinstance(s, six.string_types) for s in commands), 'each element of pre/post must be a string'
return self.copy(app_setting=app_setting, java_setting=java_setting, log_setting=log_setting,
os_setting=os_setting, pre_commands=pre_commands, post_commands=post_commands)
def get_args(self, now):
return list(chain.from_iterable([
self.app_setting.get_args(self.java_setting.get_args() + self.log_setting.get_opts(now)),
self.extra_args,
]))
def get_environ(self, now):
d = {
'JAVA_HOME': self.java_setting.home,
'JAVA_OPTS': subprocess.list2cmdline(self.java_setting.get_opts() + self.log_setting.get_opts(now))
}
d.update(self.os_setting.env)
return d
| apache-2.0 |
gurneyalex/OpenUpgrade | addons/purchase_double_validation/purchase_double_validation_installer.py | 432 | 2315 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_config_settings(osv.osv_memory):
_inherit = 'purchase.config.settings'
_columns = {
'limit_amount': fields.integer('limit to require a second approval',required=True,
help="Amount after which validation of purchase is required."),
}
_defaults = {
'limit_amount': 5000,
}
def get_default_limit_amount(self, cr, uid, fields, context=None):
ir_model_data = self.pool.get('ir.model.data')
transition = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
field, value = transition.condition.split('<', 1)
return {'limit_amount': int(value)}
def set_limit_amount(self, cr, uid, ids, context=None):
ir_model_data = self.pool.get('ir.model.data')
config = self.browse(cr, uid, ids[0], context)
waiting = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_gt')
waiting.write({'condition': 'amount_total >= %s' % config.limit_amount})
confirm = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
confirm.write({'condition': 'amount_total < %s' % config.limit_amount})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hellhovnd/django | tests/validation/models.py | 56 | 4426 | from __future__ import unicode_literals
from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError('This is not the answer to life, universe and everything!', code='not42')
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column='number_val')
parent = models.ForeignKey('self', blank=True, null=True, limit_choices_to={'number': 10})
email = models.EmailField(blank=True)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe])
def clean(self):
super(ModelToValidate, self).clean()
if self.number == 11:
raise ValidationError('Invalid number supplied!')
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (('ifield', 'cfield',), ['ifield', 'efield'])
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date")
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
number = models.IntegerField(db_column='number_val',
error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'},
validators=[validate_answer_to_universe]
)
class Author(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now()
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.name
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'})
no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'})
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
ip_verbose_name = models.GenericIPAddressField("IP Address Verbose",
blank=True, null=True)
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(blank=True, unique=True, unpack_ipv4=True)
# A model can't have multiple AutoFields
# Refs #12467.
assertion_error = None
try:
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
except AssertionError as exc:
assertion_error = exc
assert str(assertion_error) == "A model can't have more than one AutoField."
| bsd-3-clause |
OS2World/APP-INTERNET-torpak_2 | Lib/repr.py | 2 | 3743 | """Redo the `...` (representation) but with limits on most sizes."""
__all__ = ["Repr","repr"]
import sys
class Repr:
def __init__(self):
self.maxlevel = 6
self.maxtuple = 6
self.maxlist = 6
self.maxarray = 5
self.maxdict = 4
self.maxstring = 30
self.maxlong = 40
self.maxother = 20
def repr(self, x):
return self.repr1(x, self.maxlevel)
def repr1(self, x, level):
typename = type(x).__name__
if ' ' in typename:
parts = typename.split()
typename = '_'.join(parts)
if hasattr(self, 'repr_' + typename):
return getattr(self, 'repr_' + typename)(x, level)
else:
s = `x`
if len(s) > self.maxother:
i = max(0, (self.maxother-3)//2)
j = max(0, self.maxother-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_tuple(self, x, level):
n = len(x)
if n == 0: return '()'
if level <= 0: return '(...)'
s = ''
for i in range(min(n, self.maxtuple)):
if s: s = s + ', '
s = s + self.repr1(x[i], level-1)
if n > self.maxtuple: s = s + ', ...'
elif n == 1: s = s + ','
return '(' + s + ')'
def repr_list(self, x, level):
n = len(x)
if n == 0: return '[]'
if level <= 0: return '[...]'
s = ''
for i in range(min(n, self.maxlist)):
if s: s = s + ', '
s = s + self.repr1(x[i], level-1)
if n > self.maxlist: s = s + ', ...'
return '[' + s + ']'
def repr_array(self, x, level):
n = len(x)
header = "array('%s', [" % x.typecode
if n == 0:
return header + "])"
if level <= 0:
return header + "...])"
s = ''
for i in range(min(n, self.maxarray)):
if s:
s += ', '
s += self.repr1(x[i], level-1)
if n > self.maxarray:
s += ', ...'
return header + s + "])"
def repr_dict(self, x, level):
n = len(x)
if n == 0: return '{}'
if level <= 0: return '{...}'
s = ''
keys = x.keys()
keys.sort()
for i in range(min(n, self.maxdict)):
if s: s = s + ', '
key = keys[i]
s = s + self.repr1(key, level-1)
s = s + ': ' + self.repr1(x[key], level-1)
if n > self.maxdict: s = s + ', ...'
return '{' + s + '}'
def repr_str(self, x, level):
s = `x[:self.maxstring]`
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = `x[:i] + x[len(x)-j:]`
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_long(self, x, level):
s = `x` # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)//2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_instance(self, x, level):
try:
s = `x`
# Bugs in x.__repr__() can cause arbitrary
# exceptions -- then make up something
except:
# On some systems (RH10) id() can be a negative number.
# work around this.
MAX = 2L*sys.maxint+1
return '<' + x.__class__.__name__ + ' instance at %x>'%(id(x)&MAX)
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
aRepr = Repr()
repr = aRepr.repr
| mit |
jjmleiro/hue | desktop/core/ext-py/Babel-0.9.6/setup.py | 40 | 2769 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
from distutils.cmd import Command
import doctest
from glob import glob
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sys
sys.path.append(os.path.join('doc', 'common'))
try:
from doctools import build_doc, test_doc
except ImportError:
build_doc = test_doc = None
setup(
name = 'Babel',
version = '0.9.6',
description = 'Internationalization utilities',
long_description = \
"""A collection of tools for internationalizing Python applications.""",
author = 'Edgewall Software',
author_email = 'info@edgewall.org',
license = 'BSD',
url = 'http://babel.edgewall.org/',
download_url = 'http://babel.edgewall.org/wiki/Download',
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages = ['babel', 'babel.messages'],
package_data = {'babel': ['global.dat', 'localedata/*.dat']},
test_suite = 'babel.tests.suite',
tests_require = ['pytz'],
entry_points = """
[console_scripts]
pybabel = babel.messages.frontend:main
[distutils.commands]
compile_catalog = babel.messages.frontend:compile_catalog
extract_messages = babel.messages.frontend:extract_messages
init_catalog = babel.messages.frontend:init_catalog
update_catalog = babel.messages.frontend:update_catalog
[distutils.setup_keywords]
message_extractors = babel.messages.frontend:check_message_extractors
[babel.checkers]
num_plurals = babel.messages.checkers:num_plurals
python_format = babel.messages.checkers:python_format
[babel.extractors]
ignore = babel.messages.extract:extract_nothing
python = babel.messages.extract:extract_python
javascript = babel.messages.extract:extract_javascript
""",
cmdclass = {'build_doc': build_doc, 'test_doc': test_doc}
)
| apache-2.0 |
lumig242/Hue-Integration-with-CDAP | desktop/core/ext-py/Django-1.6.10/tests/check/tests.py | 51 | 5087 | from django.core.checks.compatibility import base
from django.core.checks.compatibility import django_1_6_0
from django.core.management.commands import check
from django.core.management import call_command
from django.db.models.fields import NOT_PROVIDED
from django.test import TestCase
from .models import Book
class StubCheckModule(object):
# Has no ``run_checks`` attribute & will trigger a warning.
__name__ = 'StubCheckModule'
class FakeWarnings(object):
def __init__(self):
self._warnings = []
def warn(self, message):
self._warnings.append(message)
class CompatChecksTestCase(TestCase):
def setUp(self):
super(CompatChecksTestCase, self).setUp()
# We're going to override the list of checks to perform for test
# consistency in the future.
self.old_compat_checks = base.COMPAT_CHECKS
base.COMPAT_CHECKS = [
django_1_6_0,
]
def tearDown(self):
# Restore what's supposed to be in ``COMPAT_CHECKS``.
base.COMPAT_CHECKS = self.old_compat_checks
super(CompatChecksTestCase, self).tearDown()
def test_check_test_runner_new_default(self):
with self.settings(TEST_RUNNER='django.test.runner.DiscoverRunner'):
result = django_1_6_0.check_test_runner()
self.assertTrue("Django 1.6 introduced a new default test runner" in result)
def test_check_test_runner_overridden(self):
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
self.assertEqual(django_1_6_0.check_test_runner(), None)
def test_run_checks_new_default(self):
with self.settings(TEST_RUNNER='django.test.runner.DiscoverRunner'):
result = django_1_6_0.run_checks()
self.assertEqual(len(result), 1)
self.assertTrue("Django 1.6 introduced a new default test runner" in result[0])
def test_run_checks_overridden(self):
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
self.assertEqual(len(django_1_6_0.run_checks()), 0)
def test_boolean_field_default_value(self):
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
# We patch the field's default value to trigger the warning
boolean_field = Book._meta.get_field('is_published')
old_default = boolean_field.default
try:
boolean_field.default = NOT_PROVIDED
result = django_1_6_0.run_checks()
self.assertEqual(len(result), 1)
self.assertTrue("You have not set a default value for one or more BooleanFields" in result[0])
self.assertTrue('check.Book: "is_published"' in result[0])
# We did not patch the BlogPost.is_published field so
# there should not be a warning about it
self.assertFalse('check.BlogPost' in result[0])
finally:
# Restore the ``default``
boolean_field.default = old_default
def test_check_compatibility(self):
with self.settings(TEST_RUNNER='django.test.runner.DiscoverRunner'):
result = base.check_compatibility()
self.assertEqual(len(result), 1)
self.assertTrue("Django 1.6 introduced a new default test runner" in result[0])
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
self.assertEqual(len(base.check_compatibility()), 0)
def test_check_compatibility_warning(self):
# First, we're patching over the ``COMPAT_CHECKS`` with a stub which
# will trigger the warning.
base.COMPAT_CHECKS = [
StubCheckModule(),
]
# Next, we unfortunately have to patch out ``warnings``.
old_warnings = base.warnings
base.warnings = FakeWarnings()
self.assertEqual(len(base.warnings._warnings), 0)
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
self.assertEqual(len(base.check_compatibility()), 0)
self.assertEqual(len(base.warnings._warnings), 1)
self.assertTrue("The 'StubCheckModule' module lacks a 'run_checks'" in base.warnings._warnings[0])
# Restore the ``warnings``.
base.warnings = old_warnings
def test_management_command(self):
# Again, we unfortunately have to patch out ``warnings``. Different
old_warnings = check.warnings
check.warnings = FakeWarnings()
self.assertEqual(len(check.warnings._warnings), 0)
# Should not produce any warnings.
with self.settings(TEST_RUNNER='myapp.test.CustomRunnner'):
call_command('check')
self.assertEqual(len(check.warnings._warnings), 0)
with self.settings(TEST_RUNNER='django.test.runner.DiscoverRunner'):
call_command('check')
self.assertEqual(len(check.warnings._warnings), 1)
self.assertTrue("Django 1.6 introduced a new default test runner" in check.warnings._warnings[0])
# Restore the ``warnings``.
base.warnings = old_warnings
| apache-2.0 |
Gota7/Miyamoto | loading.py | 2 | 29315 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Miyamoto! Level Editor - New Super Mario Bros. U Level Editor
# Copyright (C) 2009-2021 Treeki, Tempus, angelsl, JasonP27, Kinnay,
# MalStar1000, RoadrunnerWMC, MrRean, Grop, AboodXD, Gota7, John10v10,
# mrbengtsson
# This file is part of Miyamoto!.
# Miyamoto! is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Miyamoto! is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Miyamoto!. If not, see <http://www.gnu.org/licenses/>.
################################################################
################################################################
############ Imports ############
from PyQt5 import QtGui, QtWidgets
import struct
from xml.etree import ElementTree as etree
import globals
import spritelib as SLib
from gamedefs import MiyamotoGameDefinition, GetPath
from misc import SpriteDefinition, BGName, setting, setSetting
import SarcLib
from strings import MiyamotoTranslation
from tileset import TilesetTile, ObjectDef
from tileset import loadGTX, ProcessOverrides
from tileset import CascadeTilesetNames_Category
from tileset import SortTilesetNames_Category
from ui import MiyamotoTheme
#################################
def LoadTheme():
"""
Loads the theme
"""
id = setting('Theme')
if id is None: id = 'Classic'
print('THEME ID: ' + str(id))
globals.theme = MiyamotoTheme(id)
def LoadBGNames():
"""
Loads the BG names and their translations
"""
# Sort BG Names
globals.names_bg = []
with open(GetPath('bg'), 'r') as txt, open(GetPath('bgTrans'), 'r') as txt2:
for line, lineTrans in zip(txt.readlines(), txt2.readlines()):
name, trans = line.rstrip(), lineTrans.rstrip()
if name and trans:
globals.names_bg.append(BGName(name, trans))
globals.names_bg.append(BGName.Custom())
def LoadLevelNames():
"""
Ensures that the level name info is loaded
"""
paths, isPatch = globals.gamedef.recursiveFiles('levelnames', True)
if isPatch:
paths = [globals.trans.files['levelnames']] + paths
globals.LevelNames = []
for path in paths:
# Parse the file
tree = etree.parse( path )
root = tree.getroot()
# Parse the nodes (root acts like a large category)
patchLevelNames = LoadLevelNames_Category(root)
LoadLevelNames_ReplaceCategory(globals.LevelNames, patchLevelNames)
LoadLevelNames_AddMissingCategories(globals.LevelNames, patchLevelNames)
def LoadLevelNames_ReplaceCategory(node, node_patch):
for child in node:
for child_patch in node_patch:
if isinstance(child[1], list) and isinstance(child_patch[1], list) and child[0] == child_patch[0]:
LoadLevelNames_ReplaceCategory(child[1], child_patch[1])
break
elif isinstance(child[1], str) and isinstance(child_patch[1], str) and child[1] == child_patch[1]:
child[0] = child_patch[0]
break
def LoadLevelNames_AddMissingCategories(node, node_patch):
for child_patch in node_patch:
if isinstance(child_patch[1], list):
found = False
for child in node:
if isinstance(child[1], list) and child[0] == child_patch[0]:
found = True
break
if found:
LoadLevelNames_AddMissingCategories(child[1], child_patch[1])
else:
node += [child_patch]
else:
for child in node:
if isinstance(child[1], str) and child[1] == child_patch[1]:
break
else:
node += [child_patch]
def LoadLevelNames_Category(node):
"""
Loads a LevelNames XML category
"""
cat = []
for child in node:
if child.tag.lower() == 'category':
cat.append([str(child.attrib['name']), LoadLevelNames_Category(child)])
elif child.tag.lower() == 'level':
cat.append([str(child.attrib['name']), str(child.attrib['file'])])
return cat
def LoadTilesetNames(reload_=False):
"""
Ensures that the tileset name info is loaded
"""
if (globals.TilesetNames is not None) and (not reload_): return
# Get paths
paths = globals.gamedef.recursiveFiles('tilesets')
new = [globals.trans.files['tilesets']]
for path in paths: new.append(path)
paths = new
# Read each file
globals.TilesetNames = [[[], False], [[], False], [[], False], [[], False]]
for path in paths:
tree = etree.parse(path)
root = tree.getroot()
# Go through each slot
for node in root:
if node.tag.lower() != 'slot': continue
try:
slot = int(node.attrib['num'])
except ValueError:
continue
if slot > 3: continue
# Parse the category data into a list
newlist = [LoadTilesetNames_Category(node), ]
if 'sorted' in node.attrib:
newlist.append(node.attrib['sorted'].lower() == 'true')
else:
newlist.append(globals.TilesetNames[slot][1]) # inherit
# Apply it as a patch over the current entry
newlist[0] = CascadeTilesetNames_Category(globals.TilesetNames[slot][0], newlist[0])
# Sort it
if not newlist[1]:
newlist[0] = SortTilesetNames_Category(newlist[0])
globals.TilesetNames[slot] = newlist
def LoadTilesetNames_Category(node):
"""
Loads a TilesetNames XML category
"""
cat = []
for child in node:
if child.tag.lower() == 'category':
new = [
str(child.attrib['name']),
LoadTilesetNames_Category(child),
]
if 'sorted' in child.attrib:
new.append(str(child.attrib['sorted'].lower()) == 'true')
else:
new.append(False)
cat.append(new)
elif child.tag.lower() == 'tileset':
cat.append((str(child.attrib['filename']), str(child.attrib['name'])))
return list(cat)
def LoadObjDescriptions(reload_=False):
"""
Ensures that the object description is loaded
"""
if (globals.ObjDesc is not None) and not reload_: return
paths, isPatch = globals.gamedef.recursiveFiles('ts1_descriptions', True)
if isPatch:
new = []
new.append(globals.trans.files['ts1_descriptions'])
for path in paths: new.append(path)
paths = new
globals.ObjDesc = {}
for path in paths:
f = open(path)
raw = [x.strip() for x in f.readlines()]
f.close()
for line in raw:
w = line.split('=')
globals.ObjDesc[int(w[0])] = w[1]
def LoadSpriteData():
"""
Ensures that the sprite data info is loaded
"""
errors = []
errortext = []
spriteIds = [-1]
# It works this way so that it can overwrite settings based on order of precedence
paths = [(globals.trans.files['spritedata'], None)]
for pathtuple in globals.gamedef.multipleRecursiveFiles('spritedata', 'spritenames'):
paths.append(pathtuple)
for sdpath, snpath in paths:
# Add XML sprite data, if there is any
if sdpath not in (None, ''):
path = sdpath if isinstance(sdpath, str) else sdpath.path
tree = etree.parse(path)
root = tree.getroot()
for sprite in root:
if sprite.tag.lower() != 'sprite':
continue
try:
spriteIds.append(int(sprite.attrib['id']))
except ValueError:
continue
globals.NumSprites = max(spriteIds) + 1
globals.Sprites = [None] * globals.NumSprites
for sdpath, snpath in paths:
# Add XML sprite data, if there is any
if sdpath not in (None, ''):
path = sdpath if isinstance(sdpath, str) else sdpath.path
tree = etree.parse(path)
root = tree.getroot()
for sprite in root:
if sprite.tag.lower() != 'sprite':
continue
try:
spriteid = int(sprite.attrib['id'])
except ValueError:
continue
spritename = sprite.attrib['name']
notes = None
relatedObjFiles = None
if 'notes' in sprite.attrib:
notes = globals.trans.string('SpriteDataEditor', 2, '[notes]', sprite.attrib['notes'])
if 'files' in sprite.attrib:
relatedObjFiles = globals.trans.string('SpriteDataEditor', 8, '[list]',
sprite.attrib['files'].replace(';', '<br>'))
sdef = SpriteDefinition()
sdef.id = spriteid
sdef.name = spritename
sdef.notes = notes
sdef.relatedObjFiles = relatedObjFiles
try:
sdef.loadFrom(sprite)
except Exception as e:
errors.append(str(spriteid))
errortext.append(str(e))
globals.Sprites[spriteid] = sdef
# Add TXT sprite names, if there are any
# This code is only ever run when a custom
# gamedef is loaded, because spritenames.txt
# is a file only ever used by custom gamedefs.
if (snpath is not None) and (snpath.path is not None):
with open(snpath.path) as snfile:
data = snfile.read()
# Split the data
data = data.split('\n')
for i, line in enumerate(data):
data[i] = line.split(':')
# Apply it
for spriteid, name in data:
try:
globals.Sprites[int(spriteid)].name = name
except Exception as e:
errors.append(spriteid)
errortext.append(str(e))
# Warn the user if errors occurred
if len(errors) > 0:
QtWidgets.QMessageBox.warning(None, globals.trans.string('Err_BrokenSpriteData', 0),
globals.trans.string('Err_BrokenSpriteData', 1, '[sprites]', ', '.join(errors)),
QtWidgets.QMessageBox.Ok)
QtWidgets.QMessageBox.warning(None, globals.trans.string('Err_BrokenSpriteData', 2), repr(errortext))
def LoadSpriteCategories(reload_=False):
"""
Ensures that the sprite category info is loaded
"""
if (globals.SpriteCategories is not None) and not reload_: return
paths, isPatch = globals.gamedef.recursiveFiles('spritecategories', True)
if isPatch:
new = []
new.append(globals.trans.files['spritecategories'])
for path in paths: new.append(path)
paths = new
globals.SpriteCategories = []
# Add a Search category
globals.SpriteCategories.append((globals.trans.string('Sprites', 19), [(globals.trans.string('Sprites', 16), list(range(globals.NumSprites)))], []))
globals.SpriteCategories[-1][1][0][1].append(9999) # 'no results' special case
for path in paths:
tree = etree.parse(path)
root = tree.getroot()
CurrentView = None
for view in root:
if view.tag.lower() != 'view': continue
viewname = view.attrib['name']
# See if it's in there already
CurrentView = []
for potentialview in globals.SpriteCategories:
if potentialview[0] == viewname: CurrentView = potentialview[1]
if CurrentView == []: globals.SpriteCategories.append((viewname, CurrentView, []))
CurrentCategory = None
for category in view:
if category.tag.lower() != 'category': continue
catname = category.attrib['name']
# See if it's in there already
CurrentCategory = []
for potentialcat in CurrentView:
if potentialcat[0] == catname: CurrentCategory = potentialcat[1]
if CurrentCategory == []: CurrentView.append((catname, CurrentCategory))
for attach in category:
if attach.tag.lower() != 'attach': continue
sprite = attach.attrib['sprite']
if '-' not in sprite:
if int(sprite) not in CurrentCategory:
CurrentCategory.append(int(sprite))
else:
x = sprite.split('-')
for i in range(int(x[0]), int(x[1]) + 1):
if i not in CurrentCategory:
CurrentCategory.append(i)
def LoadSpriteListData(reload_=False):
"""
Ensures that the sprite list modifier data is loaded
"""
if (globals.SpriteListData is not None) and not reload_: return
paths = globals.gamedef.recursiveFiles('spritelistdata')
new = []
new.append('miyamotodata/spritelistdata.txt')
for path in paths: new.append(path)
paths = new
globals.SpriteListData = []
for i in range(24): globals.SpriteListData.append([])
for path in paths:
f = open(path)
data = f.read()
f.close()
split = data.replace('\n', '').split(';')
for lineidx in range(24):
line = split[lineidx]
splitline = line.split(',')
# Add them
for item in splitline:
try:
newitem = int(item)
except ValueError:
continue
if newitem in globals.SpriteListData[lineidx]: continue
globals.SpriteListData[lineidx].append(newitem)
globals.SpriteListData[lineidx].sort()
def LoadEntranceNames(reload_=False):
"""
Ensures that the entrance names are loaded
"""
if (globals.EntranceTypeNames is not None) and not reload_: return
paths, isPatch = globals.gamedef.recursiveFiles('entrancetypes', True)
if isPatch:
new = [globals.trans.files['entrancetypes']]
for path in paths: new.append(path)
paths = new
NameList = {}
for path in paths:
newNames = {}
with open(path, 'r') as f:
for line in f.readlines():
id_ = int(line.split(':')[0])
newNames[id_] = line.split(':')[1].replace('\n', '')
for idx in newNames:
NameList[idx] = newNames[idx]
globals.EntranceTypeNames = []
idx = 0
while idx in NameList:
globals.EntranceTypeNames.append(globals.trans.string('EntranceDataEditor', 28, '[id]', idx, '[name]', NameList[idx]))
idx += 1
def _LoadTileset(idx, name):
"""
Load in a tileset into a specific slot
"""
# if this file's not found, return
if name not in globals.szsData: return
sarcdata = globals.szsData[name]
sarc = SarcLib.SARC_Archive()
sarc.load(sarcdata)
# Decompress the textures
try:
comptiledata = sarc['BG_tex/%s.gtx' % name].data
nmldata = sarc['BG_tex/%s_nml.gtx' % name].data
colldata = sarc['BG_chk/d_bgchk_%s.bin' % name].data
except KeyError:
QtWidgets.QMessageBox.warning(
None, globals.trans.string('Err_CorruptedTilesetData', 0),
globals.trans.string('Err_CorruptedTilesetData', 1, '[file]', name),
)
return False
# load in the textures
img = loadGTX(comptiledata)
nml = loadGTX(nmldata)
# Divide it into individual tiles and
# add collisions at the same time
def getTileFromImage(tilemap, xtilenum, ytilenum):
return tilemap.copy((xtilenum * 64) + 2, (ytilenum * 64) + 2, 60, 60)
dest = QtGui.QPixmap.fromImage(img)
dest2 = QtGui.QPixmap.fromImage(nml)
sourcex = 0
sourcey = 0
tileoffset = idx * 256
for i in range(tileoffset, tileoffset + 256):
T = TilesetTile(getTileFromImage(dest, sourcex, sourcey), getTileFromImage(dest2, sourcex, sourcey))
T.setCollisions(struct.unpack_from('<Q', colldata, (i - tileoffset) * 8)[0])
globals.Tiles[i] = T
sourcex += 1
if sourcex >= 32:
sourcex = 0
sourcey += 1
# Load the tileset animations, if there are any
if idx == 0:
hatena_anime = None
block_anime = None
tuka_coin_anime = None
belt_conveyor_anime = None
try:
hatena_anime = loadGTX(sarc['BG_tex/hatena_anime.gtx'].data)
except:
pass
try:
block_anime = loadGTX(sarc['BG_tex/block_anime.gtx'].data)
except:
pass
try:
tuka_coin_anime = loadGTX(sarc['BG_tex/tuka_coin_anime.gtx'].data)
except:
pass
try:
belt_conveyor_anime = loadGTX(sarc['BG_tex/belt_conveyor_anime.gtx'].data, True)
except:
pass
for i in range(256):
if globals.Tiles[i].coreType == 7:
if hatena_anime:
globals.Tiles[i].addAnimationData(hatena_anime)
elif globals.Tiles[i].coreType == 6:
if block_anime:
globals.Tiles[i].addAnimationData(block_anime)
elif globals.Tiles[i].coreType == 2:
if tuka_coin_anime:
globals.Tiles[i].addAnimationData(tuka_coin_anime)
elif globals.Tiles[i].coreType == 17:
if belt_conveyor_anime:
for x in range(2):
if i == 144 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 0, True)
elif i == 145 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 1, True)
elif i == 146 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 2, True)
elif i == 147 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 0)
elif i == 148 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 1)
elif i == 149 + x * 16:
globals.Tiles[i].addConveyorAnimationData(belt_conveyor_anime, 2)
for tile in globals.Overrides:
if tile.coreType == 7:
if hatena_anime:
tile.addAnimationData(hatena_anime)
elif tile.coreType == 6:
if block_anime:
tile.addAnimationData(block_anime)
# Load the object definitions
defs = [None] * 256
indexfile = sarc['BG_unt/%s_hd.bin' % name].data
deffile = sarc['BG_unt/%s.bin' % name].data
objcount = len(indexfile) // 6
indexstruct = struct.Struct('>HBBH')
for i in range(objcount):
data = indexstruct.unpack_from(indexfile, i * 6)
obj = ObjectDef()
obj.width = data[1]
obj.height = data[2]
obj.randByte = data[3]
obj.load(deffile, data[0])
defs[i] = obj
globals.ObjectDefinitions[idx] = defs
ProcessOverrides(name)
def LoadTileset(idx, name, reload=False):
return _LoadTileset(idx, name)
def LoadOverrides():
"""
Load overrides
"""
OverrideBitmap = QtGui.QPixmap('miyamotodata/overrides.png')
idx = 0
xcount = OverrideBitmap.width() // globals.TileWidth
ycount = OverrideBitmap.height() // globals.TileWidth
globals.Overrides = [None] * (xcount * ycount)
sourcex = 0
sourcey = 0
for y in range(ycount):
for x in range(xcount):
bmp = OverrideBitmap.copy(sourcex, sourcey, globals.TileWidth, globals.TileWidth)
globals.Overrides[idx] = TilesetTile(bmp)
# Set collisions if it's a brick or question
if (x < 11 or x == 14) and y == 2: globals.Overrides[idx].setQuestionCollisions()
elif x < 12 and y == 1: globals.Overrides[idx].setBrickCollisions()
idx += 1
sourcex += globals.TileWidth
sourcex = 0
sourcey += globals.TileWidth
if idx % 16 != 0:
idx -= (idx % 16)
idx += 16
# ? Block for Sprite 59
bmp = OverrideBitmap.copy(14 * globals.TileWidth, 2 * globals.TileWidth, globals.TileWidth, globals.TileWidth)
globals.Overrides.append(TilesetTile(bmp))
def LoadTranslation():
"""
Loads the translation
"""
name = setting('Translation')
eng = (None, 'None', 'English', '', 0)
if name in eng:
globals.trans = MiyamotoTranslation(None)
else:
globals.trans = MiyamotoTranslation(name)
if globals.generateStringsXML: globals.trans.generateXML()
def LoadGameDef(name=None, dlg=None):
"""
Loads a game definition
"""
# Put the whole thing into a try-except clause
# to catch whatever errors may happen
try:
# Load the gamedef
globals.gamedef = MiyamotoGameDefinition(name)
if globals.gamedef.custom and (not globals.settings.contains('GamePath_' + globals.gamedef.name)) and globals.mainWindow:
# First-time usage of this gamedef. Have the
# user pick a stage folder so we can load stages
# and tilesets from there
QtWidgets.QMessageBox.information(None, globals.trans.string('Gamedefs', 2),
globals.trans.string('Gamedefs', 3, '[game]', globals.gamedef.name),
QtWidgets.QMessageBox.Ok)
result = globals.mainWindow.HandleChangeGamePath(True)
if result is not True:
QtWidgets.QMessageBox.information(None, globals.trans.string('Gamedefs', 4),
globals.trans.string('Gamedefs', 5, '[game]', globals.gamedef.name),
QtWidgets.QMessageBox.Ok)
else:
QtWidgets.QMessageBox.information(None, globals.trans.string('Gamedefs', 6),
globals.trans.string('Gamedefs', 7, '[game]', globals.gamedef.name),
QtWidgets.QMessageBox.Ok)
# Load BG names
LoadBGNames()
# Load spritedata.xml and spritecategories.xml
LoadSpriteData()
LoadSpriteListData(True)
LoadSpriteCategories(True)
if globals.mainWindow:
globals.mainWindow.spriteViewPicker.clear()
for cat in globals.SpriteCategories:
globals.mainWindow.spriteViewPicker.addItem(cat[0])
globals.mainWindow.sprPicker.LoadItems() # Reloads the sprite picker list items
globals.mainWindow.spriteViewPicker.setCurrentIndex(0) # Sets the sprite picker to category 0 (enemies)
globals.mainWindow.spriteDataEditor.setSprite(globals.mainWindow.spriteDataEditor.spritetype,
True) # Reloads the sprite data editor fields
globals.mainWindow.spriteDataEditor.update()
# Reload tilesets
LoadObjDescriptions(True) # reloads ts1_descriptions
LoadTilesetNames(True) # reloads tileset names
# Load sprites.py
SLib.SpritesFolders = globals.gamedef.recursiveFiles('sprites', False, True)
SLib.ImageCache.clear()
SLib.SpriteImagesLoaded.clear()
SLib.loadVines()
if globals.Area is not None:
spriteClasses = globals.gamedef.getImageClasses()
for s in globals.Area.sprites:
if s.type in SLib.SpriteImagesLoaded: continue
if s.type not in spriteClasses: continue
spriteClasses[s.type].loadImages()
SLib.SpriteImagesLoaded.add(s.type)
for s in globals.Area.sprites:
if s.type in spriteClasses:
s.setImageObj(spriteClasses[s.type])
else:
s.setImageObj(SLib.SpriteImage)
# Reload the sprite-picker text
for spr in globals.Area.sprites:
spr.UpdateListItem() # Reloads the sprite-picker text
# Load entrance names
LoadEntranceNames(True)
except Exception as e:
raise
# # Something went wrong.
# QtWidgets.QMessageBox.information(None, globals.trans.string('Gamedefs', 17), globals.trans.string('Gamedefs', 18, '[error]', str(e)))
# if name is not None: LoadGameDef(None)
# return False
# Success!
if dlg: setSetting('LastGameDef', name)
return True
def LoadActionsLists():
# Define the menu items, their default settings and their globals.mainWindow.actions keys
# These are used both in the Preferences Dialog and when init'ing the toolbar.
globals.FileActions = (
(globals.trans.string('MenuItems', 0), True, 'newlevel'),
(globals.trans.string('MenuItems', 2), True, 'openfromname'),
(globals.trans.string('MenuItems', 4), False, 'openfromfile'),
(globals.trans.string('MenuItems', 6), False, 'openrecent'),
(globals.trans.string('MenuItems', 8), True, 'save'),
(globals.trans.string('MenuItems', 10), False, 'saveas'),
(globals.trans.string('MenuItems', 12), False, 'metainfo'),
(globals.trans.string('MenuItems', 14), True, 'screenshot'),
(globals.trans.string('MenuItems', 16), False, 'changegamepath'),
(globals.trans.string('MenuItems', 132), False, 'changeobjpath'),
# (globals.trans.string('MenuItems', 16), False, 'changesavepath'),
(globals.trans.string('MenuItems', 18), False, 'preferences'),
(globals.trans.string('MenuItems', 20), False, 'exit'),
)
globals.EditActions = (
(globals.trans.string('MenuItems', 22), False, 'selectall'),
(globals.trans.string('MenuItems', 24), False, 'deselect'),
(globals.trans.string('MenuItems', 26), True, 'cut'),
(globals.trans.string('MenuItems', 28), True, 'copy'),
(globals.trans.string('MenuItems', 30), True, 'paste'),
(globals.trans.string('MenuItems', 146), True, 'raise'),
(globals.trans.string('MenuItems', 148), True, 'lower'),
(globals.trans.string('MenuItems', 32), False, 'shiftitems'),
(globals.trans.string('MenuItems', 34), False, 'mergelocations'),
(globals.trans.string('MenuItems', 38), False, 'freezeobjects'),
(globals.trans.string('MenuItems', 40), False, 'freezesprites'),
(globals.trans.string('MenuItems', 42), False, 'freezeentrances'),
(globals.trans.string('MenuItems', 44), False, 'freezelocations'),
(globals.trans.string('MenuItems', 46), False, 'freezepaths'),
)
globals.ViewActions = (
(globals.trans.string('MenuItems', 48), True, 'showlay0'),
(globals.trans.string('MenuItems', 50), True, 'showlay1'),
(globals.trans.string('MenuItems', 52), True, 'showlay2'),
(globals.trans.string('MenuItems', 54), True, 'showsprites'),
(globals.trans.string('MenuItems', 56), False, 'showspriteimages'),
(globals.trans.string('MenuItems', 150), True, 'showrotation'),
(globals.trans.string('MenuItems', 58), True, 'showlocations'),
(globals.trans.string('MenuItems', 138), True, 'showpaths'),
(globals.trans.string('MenuItems', 60), True, 'grid'),
(globals.trans.string('MenuItems', 62), True, 'zoommax'),
(globals.trans.string('MenuItems', 64), True, 'zoomin'),
(globals.trans.string('MenuItems', 66), True, 'zoomactual'),
(globals.trans.string('MenuItems', 68), True, 'zoomout'),
(globals.trans.string('MenuItems', 70), True, 'zoommin'),
)
globals.SettingsActions = (
(globals.trans.string('MenuItems', 72), True, 'areaoptions'),
(globals.trans.string('MenuItems', 74), True, 'zones'),
(globals.trans.string('MenuItems', 78), True, 'addarea'),
(globals.trans.string('MenuItems', 80), False, 'importarea'),
(globals.trans.string('MenuItems', 82), True, 'deletearea'),
(globals.trans.string('MenuItems', 128), False, 'reloaddata'),
)
globals.HelpActions = (
(globals.trans.string('MenuItems', 86), False, 'infobox'),
(globals.trans.string('MenuItems', 88), False, 'helpbox'),
(globals.trans.string('MenuItems', 90), False, 'tipbox'),
(globals.trans.string('MenuItems', 92), False, 'aboutqt'),
)
| gpl-3.0 |
Havner/smack-namespace | scripts/analyze_suspend.py | 1537 | 120394 | #!/usr/bin/python
#
# Tool for analyzing suspend/resume timing
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors:
# Todd Brandt <todd.e.brandt@linux.intel.com>
#
# Description:
# This tool is designed to assist kernel and OS developers in optimizing
# their linux stack's suspend/resume time. Using a kernel image built
# with a few extra options enabled, the tool will execute a suspend and
# will capture dmesg and ftrace data until resume is complete. This data
# is transformed into a device timeline and a callgraph to give a quick
# and detailed view of which devices and callbacks are taking the most
# time in suspend/resume. The output is a single html file which can be
# viewed in firefox or chrome.
#
# The following kernel build options are required:
# CONFIG_PM_DEBUG=y
# CONFIG_PM_SLEEP_DEBUG=y
# CONFIG_FTRACE=y
# CONFIG_FUNCTION_TRACER=y
# CONFIG_FUNCTION_GRAPH_TRACER=y
#
# For kernel versions older than 3.15:
# The following additional kernel parameters are required:
# (e.g. in file /etc/default/grub)
# GRUB_CMDLINE_LINUX_DEFAULT="... initcall_debug log_buf_len=16M ..."
#
# ----------------- LIBRARIES --------------------
import sys
import time
import os
import string
import re
import platform
from datetime import datetime
import struct
# ----------------- CLASSES --------------------
# Class: SystemValues
# Description:
# A global, single-instance container used to
# store system values and test parameters
class SystemValues:
version = 3.0
verbose = False
testdir = '.'
tpath = '/sys/kernel/debug/tracing/'
fpdtpath = '/sys/firmware/acpi/tables/FPDT'
epath = '/sys/kernel/debug/tracing/events/power/'
traceevents = [
'suspend_resume',
'device_pm_callback_end',
'device_pm_callback_start'
]
modename = {
'freeze': 'Suspend-To-Idle (S0)',
'standby': 'Power-On Suspend (S1)',
'mem': 'Suspend-to-RAM (S3)',
'disk': 'Suspend-to-disk (S4)'
}
mempath = '/dev/mem'
powerfile = '/sys/power/state'
suspendmode = 'mem'
hostname = 'localhost'
prefix = 'test'
teststamp = ''
dmesgfile = ''
ftracefile = ''
htmlfile = ''
rtcwake = False
rtcwaketime = 10
rtcpath = ''
android = False
adb = 'adb'
devicefilter = []
stamp = 0
execcount = 1
x2delay = 0
usecallgraph = False
usetraceevents = False
usetraceeventsonly = False
notestrun = False
altdevname = dict()
postresumetime = 0
tracertypefmt = '# tracer: (?P<t>.*)'
firmwarefmt = '# fwsuspend (?P<s>[0-9]*) fwresume (?P<r>[0-9]*)$'
postresumefmt = '# post resume time (?P<t>[0-9]*)$'
stampfmt = '# suspend-(?P<m>[0-9]{2})(?P<d>[0-9]{2})(?P<y>[0-9]{2})-'+\
'(?P<H>[0-9]{2})(?P<M>[0-9]{2})(?P<S>[0-9]{2})'+\
' (?P<host>.*) (?P<mode>.*) (?P<kernel>.*)$'
def __init__(self):
self.hostname = platform.node()
if(self.hostname == ''):
self.hostname = 'localhost'
rtc = "rtc0"
if os.path.exists('/dev/rtc'):
rtc = os.readlink('/dev/rtc')
rtc = '/sys/class/rtc/'+rtc
if os.path.exists(rtc) and os.path.exists(rtc+'/date') and \
os.path.exists(rtc+'/time') and os.path.exists(rtc+'/wakealarm'):
self.rtcpath = rtc
def setOutputFile(self):
if((self.htmlfile == '') and (self.dmesgfile != '')):
m = re.match('(?P<name>.*)_dmesg\.txt$', self.dmesgfile)
if(m):
self.htmlfile = m.group('name')+'.html'
if((self.htmlfile == '') and (self.ftracefile != '')):
m = re.match('(?P<name>.*)_ftrace\.txt$', self.ftracefile)
if(m):
self.htmlfile = m.group('name')+'.html'
if(self.htmlfile == ''):
self.htmlfile = 'output.html'
def initTestOutput(self, subdir):
if(not self.android):
self.prefix = self.hostname
v = open('/proc/version', 'r').read().strip()
kver = string.split(v)[2]
else:
self.prefix = 'android'
v = os.popen(self.adb+' shell cat /proc/version').read().strip()
kver = string.split(v)[2]
testtime = datetime.now().strftime('suspend-%m%d%y-%H%M%S')
if(subdir != "."):
self.testdir = subdir+"/"+testtime
else:
self.testdir = testtime
self.teststamp = \
'# '+testtime+' '+self.prefix+' '+self.suspendmode+' '+kver
self.dmesgfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_dmesg.txt'
self.ftracefile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_ftrace.txt'
self.htmlfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'.html'
os.mkdir(self.testdir)
def setDeviceFilter(self, devnames):
self.devicefilter = string.split(devnames)
def rtcWakeAlarm(self):
os.system('echo 0 > '+self.rtcpath+'/wakealarm')
outD = open(self.rtcpath+'/date', 'r').read().strip()
outT = open(self.rtcpath+'/time', 'r').read().strip()
mD = re.match('^(?P<y>[0-9]*)-(?P<m>[0-9]*)-(?P<d>[0-9]*)', outD)
mT = re.match('^(?P<h>[0-9]*):(?P<m>[0-9]*):(?P<s>[0-9]*)', outT)
if(mD and mT):
# get the current time from hardware
utcoffset = int((datetime.now() - datetime.utcnow()).total_seconds())
dt = datetime(\
int(mD.group('y')), int(mD.group('m')), int(mD.group('d')),
int(mT.group('h')), int(mT.group('m')), int(mT.group('s')))
nowtime = int(dt.strftime('%s')) + utcoffset
else:
# if hardware time fails, use the software time
nowtime = int(datetime.now().strftime('%s'))
alarm = nowtime + self.rtcwaketime
os.system('echo %d > %s/wakealarm' % (alarm, self.rtcpath))
sysvals = SystemValues()
# Class: DeviceNode
# Description:
# A container used to create a device hierachy, with a single root node
# and a tree of child nodes. Used by Data.deviceTopology()
class DeviceNode:
name = ''
children = 0
depth = 0
def __init__(self, nodename, nodedepth):
self.name = nodename
self.children = []
self.depth = nodedepth
# Class: Data
# Description:
# The primary container for suspend/resume test data. There is one for
# each test run. The data is organized into a cronological hierarchy:
# Data.dmesg {
# root structure, started as dmesg & ftrace, but now only ftrace
# contents: times for suspend start/end, resume start/end, fwdata
# phases {
# 10 sequential, non-overlapping phases of S/R
# contents: times for phase start/end, order/color data for html
# devlist {
# device callback or action list for this phase
# device {
# a single device callback or generic action
# contents: start/stop times, pid/cpu/driver info
# parents/children, html id for timeline/callgraph
# optionally includes an ftrace callgraph
# optionally includes intradev trace events
# }
# }
# }
# }
#
class Data:
dmesg = {} # root data structure
phases = [] # ordered list of phases
start = 0.0 # test start
end = 0.0 # test end
tSuspended = 0.0 # low-level suspend start
tResumed = 0.0 # low-level resume start
tLow = 0.0 # time spent in low-level suspend (standby/freeze)
fwValid = False # is firmware data available
fwSuspend = 0 # time spent in firmware suspend
fwResume = 0 # time spent in firmware resume
dmesgtext = [] # dmesg text file in memory
testnumber = 0
idstr = ''
html_device_id = 0
stamp = 0
outfile = ''
def __init__(self, num):
idchar = 'abcdefghijklmnopqrstuvwxyz'
self.testnumber = num
self.idstr = idchar[num]
self.dmesgtext = []
self.phases = []
self.dmesg = { # fixed list of 10 phases
'suspend_prepare': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#CCFFCC', 'order': 0},
'suspend': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#88FF88', 'order': 1},
'suspend_late': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#00AA00', 'order': 2},
'suspend_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#008888', 'order': 3},
'suspend_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#0000FF', 'order': 4},
'resume_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF0000', 'order': 5},
'resume_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF9900', 'order': 6},
'resume_early': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFCC00', 'order': 7},
'resume': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFF88', 'order': 8},
'resume_complete': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFFCC', 'order': 9}
}
self.phases = self.sortedPhases()
def getStart(self):
return self.dmesg[self.phases[0]]['start']
def setStart(self, time):
self.start = time
self.dmesg[self.phases[0]]['start'] = time
def getEnd(self):
return self.dmesg[self.phases[-1]]['end']
def setEnd(self, time):
self.end = time
self.dmesg[self.phases[-1]]['end'] = time
def isTraceEventOutsideDeviceCalls(self, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
return False
return True
def addIntraDevTraceEvent(self, action, name, pid, time):
if(action == 'mutex_lock_try'):
color = 'red'
elif(action == 'mutex_lock_pass'):
color = 'green'
elif(action == 'mutex_unlock'):
color = 'blue'
else:
# create separate colors based on the name
v1 = len(name)*10 % 256
v2 = string.count(name, 'e')*100 % 256
v3 = ord(name[0])*20 % 256
color = '#%06X' % ((v1*0x10000) + (v2*0x100) + v3)
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
e = TraceEvent(action, name, color, time)
if('traceevents' not in d):
d['traceevents'] = []
d['traceevents'].append(e)
return d
break
return 0
def capIntraDevTraceEvent(self, action, name, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
if('traceevents' not in d):
return
for e in d['traceevents']:
if(e.action == action and
e.name == name and not e.ready):
e.length = time - e.time
e.ready = True
break
return
def trimTimeVal(self, t, t0, dT, left):
if left:
if(t > t0):
if(t - dT < t0):
return t0
return t - dT
else:
return t
else:
if(t < t0 + dT):
if(t > t0):
return t0 + dT
return t + dT
else:
return t
def trimTime(self, t0, dT, left):
self.tSuspended = self.trimTimeVal(self.tSuspended, t0, dT, left)
self.tResumed = self.trimTimeVal(self.tResumed, t0, dT, left)
self.start = self.trimTimeVal(self.start, t0, dT, left)
self.end = self.trimTimeVal(self.end, t0, dT, left)
for phase in self.phases:
p = self.dmesg[phase]
p['start'] = self.trimTimeVal(p['start'], t0, dT, left)
p['end'] = self.trimTimeVal(p['end'], t0, dT, left)
list = p['list']
for name in list:
d = list[name]
d['start'] = self.trimTimeVal(d['start'], t0, dT, left)
d['end'] = self.trimTimeVal(d['end'], t0, dT, left)
if('ftrace' in d):
cg = d['ftrace']
cg.start = self.trimTimeVal(cg.start, t0, dT, left)
cg.end = self.trimTimeVal(cg.end, t0, dT, left)
for line in cg.list:
line.time = self.trimTimeVal(line.time, t0, dT, left)
if('traceevents' in d):
for e in d['traceevents']:
e.time = self.trimTimeVal(e.time, t0, dT, left)
def normalizeTime(self, tZero):
# first trim out any standby or freeze clock time
if(self.tSuspended != self.tResumed):
if(self.tResumed > tZero):
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, True)
else:
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, False)
# shift the timeline so that tZero is the new 0
self.tSuspended -= tZero
self.tResumed -= tZero
self.start -= tZero
self.end -= tZero
for phase in self.phases:
p = self.dmesg[phase]
p['start'] -= tZero
p['end'] -= tZero
list = p['list']
for name in list:
d = list[name]
d['start'] -= tZero
d['end'] -= tZero
if('ftrace' in d):
cg = d['ftrace']
cg.start -= tZero
cg.end -= tZero
for line in cg.list:
line.time -= tZero
if('traceevents' in d):
for e in d['traceevents']:
e.time -= tZero
def newPhaseWithSingleAction(self, phasename, devname, start, end, color):
for phase in self.phases:
self.dmesg[phase]['order'] += 1
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = dict()
list[devname] = \
{'start': start, 'end': end, 'pid': 0, 'par': '',
'length': (end-start), 'row': 0, 'id': devid, 'drv': '' };
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': 0}
self.phases = self.sortedPhases()
def newPhase(self, phasename, start, end, color, order):
if(order < 0):
order = len(self.phases)
for phase in self.phases[order:]:
self.dmesg[phase]['order'] += 1
if(order > 0):
p = self.phases[order-1]
self.dmesg[p]['end'] = start
if(order < len(self.phases)):
p = self.phases[order]
self.dmesg[p]['start'] = end
list = dict()
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': order}
self.phases = self.sortedPhases()
def setPhase(self, phase, ktime, isbegin):
if(isbegin):
self.dmesg[phase]['start'] = ktime
else:
self.dmesg[phase]['end'] = ktime
def dmesgSortVal(self, phase):
return self.dmesg[phase]['order']
def sortedPhases(self):
return sorted(self.dmesg, key=self.dmesgSortVal)
def sortedDevices(self, phase):
list = self.dmesg[phase]['list']
slist = []
tmp = dict()
for devname in list:
dev = list[devname]
tmp[dev['start']] = devname
for t in sorted(tmp):
slist.append(tmp[t])
return slist
def fixupInitcalls(self, phase, end):
# if any calls never returned, clip them at system resume end
phaselist = self.dmesg[phase]['list']
for devname in phaselist:
dev = phaselist[devname]
if(dev['end'] < 0):
dev['end'] = end
vprint('%s (%s): callback didnt return' % (devname, phase))
def deviceFilter(self, devicefilter):
# remove all by the relatives of the filter devnames
filter = []
for phase in self.phases:
list = self.dmesg[phase]['list']
for name in devicefilter:
dev = name
while(dev in list):
if(dev not in filter):
filter.append(dev)
dev = list[dev]['par']
children = self.deviceDescendants(name, phase)
for dev in children:
if(dev not in filter):
filter.append(dev)
for phase in self.phases:
list = self.dmesg[phase]['list']
rmlist = []
for name in list:
pid = list[name]['pid']
if(name not in filter and pid >= 0):
rmlist.append(name)
for name in rmlist:
del list[name]
def fixupInitcallsThatDidntReturn(self):
# if any calls never returned, clip them at system resume end
for phase in self.phases:
self.fixupInitcalls(phase, self.getEnd())
def newActionGlobal(self, name, start, end):
# which phase is this device callback or action "in"
targetphase = "none"
overlap = 0.0
for phase in self.phases:
pstart = self.dmesg[phase]['start']
pend = self.dmesg[phase]['end']
o = max(0, min(end, pend) - max(start, pstart))
if(o > overlap):
targetphase = phase
overlap = o
if targetphase in self.phases:
self.newAction(targetphase, name, -1, '', start, end, '')
return True
return False
def newAction(self, phase, name, pid, parent, start, end, drv):
# new device callback for a specific phase
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = self.dmesg[phase]['list']
length = -1.0
if(start >= 0 and end >= 0):
length = end - start
list[name] = {'start': start, 'end': end, 'pid': pid, 'par': parent,
'length': length, 'row': 0, 'id': devid, 'drv': drv }
def deviceIDs(self, devlist, phase):
idlist = []
list = self.dmesg[phase]['list']
for devname in list:
if devname in devlist:
idlist.append(list[devname]['id'])
return idlist
def deviceParentID(self, devname, phase):
pdev = ''
pdevid = ''
list = self.dmesg[phase]['list']
if devname in list:
pdev = list[devname]['par']
if pdev in list:
return list[pdev]['id']
return pdev
def deviceChildren(self, devname, phase):
devlist = []
list = self.dmesg[phase]['list']
for child in list:
if(list[child]['par'] == devname):
devlist.append(child)
return devlist
def deviceDescendants(self, devname, phase):
children = self.deviceChildren(devname, phase)
family = children
for child in children:
family += self.deviceDescendants(child, phase)
return family
def deviceChildrenIDs(self, devname, phase):
devlist = self.deviceChildren(devname, phase)
return self.deviceIDs(devlist, phase)
def printDetails(self):
vprint(' test start: %f' % self.start)
for phase in self.phases:
dc = len(self.dmesg[phase]['list'])
vprint(' %16s: %f - %f (%d devices)' % (phase, \
self.dmesg[phase]['start'], self.dmesg[phase]['end'], dc))
vprint(' test end: %f' % self.end)
def masterTopology(self, name, list, depth):
node = DeviceNode(name, depth)
for cname in list:
clist = self.deviceChildren(cname, 'resume')
cnode = self.masterTopology(cname, clist, depth+1)
node.children.append(cnode)
return node
def printTopology(self, node):
html = ''
if node.name:
info = ''
drv = ''
for phase in self.phases:
list = self.dmesg[phase]['list']
if node.name in list:
s = list[node.name]['start']
e = list[node.name]['end']
if list[node.name]['drv']:
drv = ' {'+list[node.name]['drv']+'}'
info += ('<li>%s: %.3fms</li>' % (phase, (e-s)*1000))
html += '<li><b>'+node.name+drv+'</b>'
if info:
html += '<ul>'+info+'</ul>'
html += '</li>'
if len(node.children) > 0:
html += '<ul>'
for cnode in node.children:
html += self.printTopology(cnode)
html += '</ul>'
return html
def rootDeviceList(self):
# list of devices graphed
real = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
if list[dev]['pid'] >= 0 and dev not in real:
real.append(dev)
# list of top-most root devices
rootlist = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
pdev = list[dev]['par']
if(re.match('[0-9]*-[0-9]*\.[0-9]*[\.0-9]*\:[\.0-9]*$', pdev)):
continue
if pdev and pdev not in real and pdev not in rootlist:
rootlist.append(pdev)
return rootlist
def deviceTopology(self):
rootlist = self.rootDeviceList()
master = self.masterTopology('', rootlist, 0)
return self.printTopology(master)
# Class: TraceEvent
# Description:
# A container for trace event data found in the ftrace file
class TraceEvent:
ready = False
name = ''
time = 0.0
color = '#FFFFFF'
length = 0.0
action = ''
def __init__(self, a, n, c, t):
self.action = a
self.name = n
self.color = c
self.time = t
# Class: FTraceLine
# Description:
# A container for a single line of ftrace data. There are six basic types:
# callgraph line:
# call: " dpm_run_callback() {"
# return: " }"
# leaf: " dpm_run_callback();"
# trace event:
# tracing_mark_write: SUSPEND START or RESUME COMPLETE
# suspend_resume: phase or custom exec block data
# device_pm_callback: device callback info
class FTraceLine:
time = 0.0
length = 0.0
fcall = False
freturn = False
fevent = False
depth = 0
name = ''
type = ''
def __init__(self, t, m, d):
self.time = float(t)
# is this a trace event
if(d == 'traceevent' or re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)):
if(d == 'traceevent'):
# nop format trace event
msg = m
else:
# function_graph format trace event
em = re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)
msg = em.group('msg')
emm = re.match('^(?P<call>.*?): (?P<msg>.*)', msg)
if(emm):
self.name = emm.group('msg')
self.type = emm.group('call')
else:
self.name = msg
self.fevent = True
return
# convert the duration to seconds
if(d):
self.length = float(d)/1000000
# the indentation determines the depth
match = re.match('^(?P<d> *)(?P<o>.*)$', m)
if(not match):
return
self.depth = self.getDepth(match.group('d'))
m = match.group('o')
# function return
if(m[0] == '}'):
self.freturn = True
if(len(m) > 1):
# includes comment with function name
match = re.match('^} *\/\* *(?P<n>.*) *\*\/$', m)
if(match):
self.name = match.group('n')
# function call
else:
self.fcall = True
# function call with children
if(m[-1] == '{'):
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# function call with no children (leaf)
elif(m[-1] == ';'):
self.freturn = True
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# something else (possibly a trace marker)
else:
self.name = m
def getDepth(self, str):
return len(str)/2
def debugPrint(self, dev):
if(self.freturn and self.fcall):
print('%s -- %f (%02d): %s(); (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
elif(self.freturn):
print('%s -- %f (%02d): %s} (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
else:
print('%s -- %f (%02d): %s() { (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
# Class: FTraceCallGraph
# Description:
# A container for the ftrace callgraph of a single recursive function.
# This can be a dpm_run_callback, dpm_prepare, or dpm_complete callgraph
# Each instance is tied to a single device in a single phase, and is
# comprised of an ordered list of FTraceLine objects
class FTraceCallGraph:
start = -1.0
end = -1.0
list = []
invalid = False
depth = 0
def __init__(self):
self.start = -1.0
self.end = -1.0
self.list = []
self.depth = 0
def setDepth(self, line):
if(line.fcall and not line.freturn):
line.depth = self.depth
self.depth += 1
elif(line.freturn and not line.fcall):
self.depth -= 1
line.depth = self.depth
else:
line.depth = self.depth
def addLine(self, line, match):
if(not self.invalid):
self.setDepth(line)
if(line.depth == 0 and line.freturn):
if(self.start < 0):
self.start = line.time
self.end = line.time
self.list.append(line)
return True
if(self.invalid):
return False
if(len(self.list) >= 1000000 or self.depth < 0):
if(len(self.list) > 0):
first = self.list[0]
self.list = []
self.list.append(first)
self.invalid = True
if(not match):
return False
id = 'task %s cpu %s' % (match.group('pid'), match.group('cpu'))
window = '(%f - %f)' % (self.start, line.time)
if(self.depth < 0):
print('Too much data for '+id+\
' (buffer overflow), ignoring this callback')
else:
print('Too much data for '+id+\
' '+window+', ignoring this callback')
return False
self.list.append(line)
if(self.start < 0):
self.start = line.time
return False
def slice(self, t0, tN):
minicg = FTraceCallGraph()
count = -1
firstdepth = 0
for l in self.list:
if(l.time < t0 or l.time > tN):
continue
if(count < 0):
if(not l.fcall or l.name == 'dev_driver_string'):
continue
firstdepth = l.depth
count = 0
l.depth -= firstdepth
minicg.addLine(l, 0)
if((count == 0 and l.freturn and l.fcall) or
(count > 0 and l.depth <= 0)):
break
count += 1
return minicg
def sanityCheck(self):
stack = dict()
cnt = 0
for l in self.list:
if(l.fcall and not l.freturn):
stack[l.depth] = l
cnt += 1
elif(l.freturn and not l.fcall):
if(l.depth not in stack):
return False
stack[l.depth].length = l.length
stack[l.depth] = 0
l.length = 0
cnt -= 1
if(cnt == 0):
return True
return False
def debugPrint(self, filename):
if(filename == 'stdout'):
print('[%f - %f]') % (self.start, self.end)
for l in self.list:
if(l.freturn and l.fcall):
print('%f (%02d): %s(); (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
print('%f (%02d): %s} (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
print('%f (%02d): %s() { (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
print(' ')
else:
fp = open(filename, 'w')
print(filename)
for l in self.list:
if(l.freturn and l.fcall):
fp.write('%f (%02d): %s(); (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
fp.write('%f (%02d): %s} (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
fp.write('%f (%02d): %s() { (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
fp.close()
# Class: Timeline
# Description:
# A container for a suspend/resume html timeline. In older versions
# of the script there were multiple timelines, but in the latest
# there is only one.
class Timeline:
html = {}
scaleH = 0.0 # height of the row as a percent of the timeline height
rowH = 0.0 # height of each row in percent of the timeline height
row_height_pixels = 30
maxrows = 0
height = 0
def __init__(self):
self.html = {
'timeline': '',
'legend': '',
'scale': ''
}
def setRows(self, rows):
self.maxrows = int(rows)
self.scaleH = 100.0/float(self.maxrows)
self.height = self.maxrows*self.row_height_pixels
r = float(self.maxrows - 1)
if(r < 1.0):
r = 1.0
self.rowH = (100.0 - self.scaleH)/r
# Class: TestRun
# Description:
# A container for a suspend/resume test run. This is necessary as
# there could be more than one, and they need to be separate.
class TestRun:
ftrace_line_fmt_fg = \
'^ *(?P<time>[0-9\.]*) *\| *(?P<cpu>[0-9]*)\)'+\
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\|'+\
'[ +!]*(?P<dur>[0-9\.]*) .*\| (?P<msg>.*)'
ftrace_line_fmt_nop = \
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\[(?P<cpu>[0-9]*)\] *'+\
'(?P<flags>.{4}) *(?P<time>[0-9\.]*): *'+\
'(?P<msg>.*)'
ftrace_line_fmt = ftrace_line_fmt_nop
cgformat = False
ftemp = dict()
ttemp = dict()
inthepipe = False
tracertype = ''
data = 0
def __init__(self, dataobj):
self.data = dataobj
self.ftemp = dict()
self.ttemp = dict()
def isReady(self):
if(tracertype == '' or not data):
return False
return True
def setTracerType(self, tracer):
self.tracertype = tracer
if(tracer == 'function_graph'):
self.cgformat = True
self.ftrace_line_fmt = self.ftrace_line_fmt_fg
elif(tracer == 'nop'):
self.ftrace_line_fmt = self.ftrace_line_fmt_nop
else:
doError('Invalid tracer format: [%s]' % tracer, False)
# ----------------- FUNCTIONS --------------------
# Function: vprint
# Description:
# verbose print (prints only with -verbose option)
# Arguments:
# msg: the debug/log message to print
def vprint(msg):
global sysvals
if(sysvals.verbose):
print(msg)
# Function: initFtrace
# Description:
# Configure ftrace to use trace events and/or a callgraph
def initFtrace():
global sysvals
tp = sysvals.tpath
cf = 'dpm_run_callback'
if(sysvals.usetraceeventsonly):
cf = '-e dpm_prepare -e dpm_complete -e dpm_run_callback'
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system('echo 0 > '+tp+'tracing_on')
# set the trace clock to global
os.system('echo global > '+tp+'trace_clock')
# set trace buffer to a huge value
os.system('echo nop > '+tp+'current_tracer')
os.system('echo 100000 > '+tp+'buffer_size_kb')
# initialize the callgraph trace, unless this is an x2 run
if(sysvals.usecallgraph and sysvals.execcount == 1):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | grep '+\
cf+' > '+tp+'set_graph_function')
if(sysvals.usetraceevents):
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system('echo 1 > '+sysvals.epath+e+'/enable')
# clear the trace buffer
os.system('echo "" > '+tp+'trace')
# Function: initFtraceAndroid
# Description:
# Configure ftrace to capture trace events
def initFtraceAndroid():
global sysvals
tp = sysvals.tpath
if(sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
# set the trace clock to global
os.system(sysvals.adb+" shell 'echo global > "+tp+"trace_clock'")
# set trace buffer to a huge value
os.system(sysvals.adb+" shell 'echo nop > "+tp+"current_tracer'")
os.system(sysvals.adb+" shell 'echo 10000 > "+tp+"buffer_size_kb'")
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system(sysvals.adb+" shell 'echo 1 > "+\
sysvals.epath+e+"/enable'")
# clear the trace buffer
os.system(sysvals.adb+" shell 'echo \"\" > "+tp+"trace'")
# Function: verifyFtrace
# Description:
# Check that ftrace is working on the system
# Output:
# True or False
def verifyFtrace():
global sysvals
# files needed for any trace data
files = ['buffer_size_kb', 'current_tracer', 'trace', 'trace_clock',
'trace_marker', 'trace_options', 'tracing_on']
# files needed for callgraph trace data
tp = sysvals.tpath
if(sysvals.usecallgraph):
files += [
'available_filter_functions',
'set_ftrace_filter',
'set_graph_function'
]
for f in files:
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+tp+f).read().strip()
if(out != tp+f):
return False
else:
if(os.path.exists(tp+f) == False):
return False
return True
# Function: parseStamp
# Description:
# Pull in the stamp comment line from the data file(s),
# create the stamp, and add it to the global sysvals object
# Arguments:
# m: the valid re.match output for the stamp line
def parseStamp(m, data):
global sysvals
data.stamp = {'time': '', 'host': '', 'mode': ''}
dt = datetime(int(m.group('y'))+2000, int(m.group('m')),
int(m.group('d')), int(m.group('H')), int(m.group('M')),
int(m.group('S')))
data.stamp['time'] = dt.strftime('%B %d %Y, %I:%M:%S %p')
data.stamp['host'] = m.group('host')
data.stamp['mode'] = m.group('mode')
data.stamp['kernel'] = m.group('kernel')
sysvals.suspendmode = data.stamp['mode']
if not sysvals.stamp:
sysvals.stamp = data.stamp
# Function: diffStamp
# Description:
# compare the host, kernel, and mode fields in 3 stamps
# Arguments:
# stamp1: string array with mode, kernel, and host
# stamp2: string array with mode, kernel, and host
# Return:
# True if stamps differ, False if they're the same
def diffStamp(stamp1, stamp2):
if 'host' in stamp1 and 'host' in stamp2:
if stamp1['host'] != stamp2['host']:
return True
if 'kernel' in stamp1 and 'kernel' in stamp2:
if stamp1['kernel'] != stamp2['kernel']:
return True
if 'mode' in stamp1 and 'mode' in stamp2:
if stamp1['mode'] != stamp2['mode']:
return True
return False
# Function: doesTraceLogHaveTraceEvents
# Description:
# Quickly determine if the ftrace log has some or all of the trace events
# required for primary parsing. Set the usetraceevents and/or
# usetraceeventsonly flags in the global sysvals object
def doesTraceLogHaveTraceEvents():
global sysvals
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
out = os.popen('cat '+sysvals.ftracefile+' | grep "'+e+': "').read()
if(not out):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and out):
sysvals.usetraceevents = True
# Function: appendIncompleteTraceLog
# Description:
# [deprecated for kernel 3.15 or newer]
# Legacy support of ftrace outputs that lack the device_pm_callback
# and/or suspend_resume trace events. The primary data should be
# taken from dmesg, and this ftrace is used only for callgraph data
# or custom actions in the timeline. The data is appended to the Data
# objects provided.
# Arguments:
# testruns: the array of Data objects obtained from parseKernelLog
def appendIncompleteTraceLog(testruns):
global sysvals
# create TestRun vessels for ftrace parsing
testcnt = len(testruns)
testidx = -1
testrun = []
for data in testruns:
testrun.append(TestRun(data))
# extract the callgraph and traceevent data
vprint('Analyzing the ftrace data...')
tf = open(sysvals.ftracefile, 'r')
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# grab the time stamp first (signifies the start of the test run)
m = re.match(sysvals.stampfmt, line)
if(m):
testidx += 1
parseStamp(m, testrun[testidx].data)
continue
# pull out any firmware data
if(re.match(sysvals.firmwarefmt, line)):
continue
# if we havent found a test time stamp yet keep spinning til we do
if(testidx < 0):
continue
# determine the trace data type (required for further parsing)
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun[testidx].setTracerType(tracer)
continue
# parse only valid lines, if this isnt one move on
m = re.match(testrun[testidx].ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun[testidx].cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
data = testrun[testidx].data
if(not testrun[testidx].inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun[testidx].inthepipe = True
data.setStart(t.time)
continue
else:
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
testrun[testidx].inthepipe = False
data.setEnd(t.time)
if(testidx == testcnt - 1):
break
continue
# general trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# special processing for trace events
if re.match('dpm_prepare\[.*', name):
continue
elif re.match('machine_suspend.*', name):
continue
elif re.match('suspend_enter\[.*', name):
if(not isbegin):
data.dmesg['suspend_prepare']['end'] = t.time
continue
elif re.match('dpm_suspend\[.*', name):
if(not isbegin):
data.dmesg['suspend']['end'] = t.time
continue
elif re.match('dpm_suspend_late\[.*', name):
if(isbegin):
data.dmesg['suspend_late']['start'] = t.time
else:
data.dmesg['suspend_late']['end'] = t.time
continue
elif re.match('dpm_suspend_noirq\[.*', name):
if(isbegin):
data.dmesg['suspend_noirq']['start'] = t.time
else:
data.dmesg['suspend_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_noirq\[.*', name):
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
data.dmesg['resume_noirq']['start'] = t.time
else:
data.dmesg['resume_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_early\[.*', name):
if(isbegin):
data.dmesg['resume_early']['start'] = t.time
else:
data.dmesg['resume_early']['end'] = t.time
continue
elif re.match('dpm_resume\[.*', name):
if(isbegin):
data.dmesg['resume']['start'] = t.time
else:
data.dmesg['resume']['end'] = t.time
continue
elif re.match('dpm_complete\[.*', name):
if(isbegin):
data.dmesg['resume_complete']['start'] = t.time
else:
data.dmesg['resume_complete']['end'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(isbegin):
# store each trace event in ttemp
if(name not in testrun[testidx].ttemp):
testrun[testidx].ttemp[name] = []
testrun[testidx].ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
# finish off matching trace event in ttemp
if(name in testrun[testidx].ttemp):
testrun[testidx].ttemp[name][-1]['end'] = t.time
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# call/return processing
elif sysvals.usecallgraph:
# create a callgraph object for the data
if(pid not in testrun[testidx].ftemp):
testrun[testidx].ftemp[pid] = []
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun[testidx].ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testrun:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
if(sysvals.verbose):
test.data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testruns) > 1):
t1e = testruns[0].getEnd()
t2s = testruns[-1].getStart()
testruns[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
# Function: parseTraceLog
# Description:
# Analyze an ftrace log output file generated from this app during
# the execution phase. Used when the ftrace log is the primary data source
# and includes the suspend_resume and device_pm_callback trace events
# The ftrace filename is taken from sysvals
# Output:
# An array of Data objects
def parseTraceLog():
global sysvals
vprint('Analyzing the ftrace data...')
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
# extract the callgraph and traceevent data
testruns = []
testdata = []
testrun = 0
data = 0
tf = open(sysvals.ftracefile, 'r')
phase = 'suspend_prepare'
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# stamp line: each stamp means a new test run
m = re.match(sysvals.stampfmt, line)
if(m):
data = Data(len(testdata))
testdata.append(data)
testrun = TestRun(data)
testruns.append(testrun)
parseStamp(m, data)
continue
if(not data):
continue
# firmware line: pull out any firmware data
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
# tracer type line: determine the trace data type
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun.setTracerType(tracer)
continue
# post resume time line: did this test run include post-resume data
m = re.match(sysvals.postresumefmt, line)
if(m):
t = int(m.group('t'))
if(t > 0):
sysvals.postresumetime = t
continue
# ftrace line: parse only valid lines
m = re.match(testrun.ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun.cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
if(not testrun.inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun.inthepipe = True
data.setStart(t.time)
continue
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
if(sysvals.postresumetime > 0):
phase = 'post_resume'
data.newPhase(phase, t.time, t.time, '#FF9966', -1)
else:
testrun.inthepipe = False
data.setEnd(t.time)
continue
if(phase == 'post_resume'):
data.setEnd(t.time)
if(t.type == 'suspend_resume'):
# suspend_resume trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# ignore these events
if(re.match('acpi_suspend\[.*', t.name) or
re.match('suspend_enter\[.*', name)):
continue
# -- phase changes --
# suspend_prepare start
if(re.match('dpm_prepare\[.*', t.name)):
phase = 'suspend_prepare'
if(not isbegin):
data.dmesg[phase]['end'] = t.time
continue
# suspend start
elif(re.match('dpm_suspend\[.*', t.name)):
phase = 'suspend'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_late start
elif(re.match('dpm_suspend_late\[.*', t.name)):
phase = 'suspend_late'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_noirq start
elif(re.match('dpm_suspend_noirq\[.*', t.name)):
phase = 'suspend_noirq'
data.setPhase(phase, t.time, isbegin)
if(not isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['start'] = t.time
continue
# suspend_machine/resume_machine
elif(re.match('machine_suspend\[.*', t.name)):
if(isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['end'] = t.time
data.tSuspended = t.time
else:
if(sysvals.suspendmode in ['mem', 'disk']):
data.dmesg['suspend_machine']['end'] = t.time
data.tSuspended = t.time
phase = 'resume_machine'
data.dmesg[phase]['start'] = t.time
data.tResumed = t.time
data.tLow = data.tResumed - data.tSuspended
continue
# resume_noirq start
elif(re.match('dpm_resume_noirq\[.*', t.name)):
phase = 'resume_noirq'
data.setPhase(phase, t.time, isbegin)
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
continue
# resume_early start
elif(re.match('dpm_resume_early\[.*', t.name)):
phase = 'resume_early'
data.setPhase(phase, t.time, isbegin)
continue
# resume start
elif(re.match('dpm_resume\[.*', t.name)):
phase = 'resume'
data.setPhase(phase, t.time, isbegin)
continue
# resume complete start
elif(re.match('dpm_complete\[.*', t.name)):
phase = 'resume_complete'
if(isbegin):
data.dmesg[phase]['start'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(name not in testrun.ttemp):
testrun.ttemp[name] = []
if(isbegin):
# create a new list entry
testrun.ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
if(len(testrun.ttemp[name]) > 0):
# if an antry exists, assume this is its end
testrun.ttemp[name][-1]['end'] = t.time
elif(phase == 'post_resume'):
# post resume events can just have ends
testrun.ttemp[name].append({
'begin': data.dmesg[phase]['start'],
'end': t.time})
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# device callback start
elif(t.type == 'device_pm_callback_start'):
m = re.match('(?P<drv>.*) (?P<d>.*), parent: *(?P<p>.*), .*',\
t.name);
if(not m):
continue
drv = m.group('drv')
n = m.group('d')
p = m.group('p')
if(n and p):
data.newAction(phase, n, pid, p, t.time, -1, drv)
# device callback finish
elif(t.type == 'device_pm_callback_end'):
m = re.match('(?P<drv>.*) (?P<d>.*), err.*', t.name);
if(not m):
continue
n = m.group('d')
list = data.dmesg[phase]['list']
if(n in list):
dev = list[n]
dev['length'] = t.time - dev['start']
dev['end'] = t.time
# callgraph processing
elif sysvals.usecallgraph:
# this shouldn't happen, but JIC, ignore callgraph data post-res
if(phase == 'post_resume'):
continue
# create a callgraph object for the data
if(pid not in testrun.ftemp):
testrun.ftemp[pid] = []
testrun.ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun.ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun.ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testruns:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
borderphase = {
'dpm_prepare': 'suspend_prepare',
'dpm_complete': 'resume_complete'
}
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if len(cg.list) < 2:
continue
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
if(cg.list[0].name in borderphase):
p = borderphase[cg.list[0].name]
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg.slice(dev['start'], dev['end'])
continue
if(cg.list[0].name != 'dpm_run_callback'):
continue
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
# fill in any missing phases
for data in testdata:
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing!' % p)
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
if(sysvals.verbose):
data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testdata) > 1):
t1e = testdata[0].getEnd()
t2s = testdata[-1].getStart()
testdata[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
return testdata
# Function: loadKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# load the dmesg file into memory and fix up any ordering issues
# The dmesg filename is taken from sysvals
# Output:
# An array of empty Data objects with only their dmesgtext attributes set
def loadKernelLog():
global sysvals
vprint('Analyzing the dmesg data...')
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
# there can be multiple test runs in a single file delineated by stamps
testruns = []
data = 0
lf = open(sysvals.dmesgfile, 'r')
for line in lf:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match(sysvals.stampfmt, line)
if(m):
if(data):
testruns.append(data)
data = Data(len(testruns))
parseStamp(m, data)
continue
if(not data):
continue
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
data.dmesgtext.append(line)
if(re.match('ACPI: resume from mwait', m.group('msg'))):
print('NOTE: This suspend appears to be freeze rather than'+\
' %s, it will be treated as such' % sysvals.suspendmode)
sysvals.suspendmode = 'freeze'
else:
vprint('ignoring dmesg line: %s' % line.replace('\n', ''))
testruns.append(data)
lf.close()
if(not data):
print('ERROR: analyze_suspend header missing from dmesg log')
sys.exit()
# fix lines with same timestamp/function with the call and return swapped
for data in testruns:
last = ''
for line in data.dmesgtext:
mc = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) calling '+\
'(?P<f>.*)\+ @ .*, parent: .*', line)
mr = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) call '+\
'(?P<f>.*)\+ returned .* after (?P<dt>.*) usecs', last)
if(mc and mr and (mc.group('t') == mr.group('t')) and
(mc.group('f') == mr.group('f'))):
i = data.dmesgtext.index(last)
j = data.dmesgtext.index(line)
data.dmesgtext[i] = line
data.dmesgtext[j] = last
last = line
return testruns
# Function: parseKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# Analyse a dmesg log output file generated from this app during
# the execution phase. Create a set of device structures in memory
# for subsequent formatting in the html output file
# This call is only for legacy support on kernels where the ftrace
# data lacks the suspend_resume or device_pm_callbacks trace events.
# Arguments:
# data: an empty Data object (with dmesgtext) obtained from loadKernelLog
# Output:
# The filled Data object
def parseKernelLog(data):
global sysvals
phase = 'suspend_runtime'
if(data.fwValid):
vprint('Firmware Suspend = %u ns, Firmware Resume = %u ns' % \
(data.fwSuspend, data.fwResume))
# dmesg phase match table
dm = {
'suspend_prepare': 'PM: Syncing filesystems.*',
'suspend': 'PM: Entering [a-z]* sleep.*',
'suspend_late': 'PM: suspend of devices complete after.*',
'suspend_noirq': 'PM: late suspend of devices complete after.*',
'suspend_machine': 'PM: noirq suspend of devices complete after.*',
'resume_machine': 'ACPI: Low-level resume complete.*',
'resume_noirq': 'ACPI: Waking up from system sleep state.*',
'resume_early': 'PM: noirq resume of devices complete after.*',
'resume': 'PM: early resume of devices complete after.*',
'resume_complete': 'PM: resume of devices complete after.*',
'post_resume': '.*Restarting tasks \.\.\..*',
}
if(sysvals.suspendmode == 'standby'):
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
elif(sysvals.suspendmode == 'disk'):
dm['suspend_late'] = 'PM: freeze of devices complete after.*'
dm['suspend_noirq'] = 'PM: late freeze of devices complete after.*'
dm['suspend_machine'] = 'PM: noirq freeze of devices complete after.*'
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
dm['resume_early'] = 'PM: noirq restore of devices complete after.*'
dm['resume'] = 'PM: early restore of devices complete after.*'
dm['resume_complete'] = 'PM: restore of devices complete after.*'
elif(sysvals.suspendmode == 'freeze'):
dm['resume_machine'] = 'ACPI: resume from mwait'
# action table (expected events that occur and show up in dmesg)
at = {
'sync_filesystems': {
'smsg': 'PM: Syncing filesystems.*',
'emsg': 'PM: Preparing system for mem sleep.*' },
'freeze_user_processes': {
'smsg': 'Freezing user space processes .*',
'emsg': 'Freezing remaining freezable tasks.*' },
'freeze_tasks': {
'smsg': 'Freezing remaining freezable tasks.*',
'emsg': 'PM: Entering (?P<mode>[a-z,A-Z]*) sleep.*' },
'ACPI prepare': {
'smsg': 'ACPI: Preparing to enter system sleep state.*',
'emsg': 'PM: Saving platform NVS memory.*' },
'PM vns': {
'smsg': 'PM: Saving platform NVS memory.*',
'emsg': 'Disabling non-boot CPUs .*' },
}
t0 = -1.0
cpu_start = -1.0
prevktime = -1.0
actions = dict()
for line in data.dmesgtext:
# -- preprocessing --
# parse each dmesg line into the time and message
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
val = m.group('ktime')
try:
ktime = float(val)
except:
doWarning('INVALID DMESG LINE: '+\
line.replace('\n', ''), 'dmesg')
continue
msg = m.group('msg')
# initialize data start to first line time
if t0 < 0:
data.setStart(ktime)
t0 = ktime
else:
continue
# hack for determining resume_machine end for freeze
if(not sysvals.usetraceevents and sysvals.suspendmode == 'freeze' \
and phase == 'resume_machine' and \
re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# -- phase changes --
# suspend start
if(re.match(dm['suspend_prepare'], msg)):
phase = 'suspend_prepare'
data.dmesg[phase]['start'] = ktime
data.setStart(ktime)
# suspend start
elif(re.match(dm['suspend'], msg)):
data.dmesg['suspend_prepare']['end'] = ktime
phase = 'suspend'
data.dmesg[phase]['start'] = ktime
# suspend_late start
elif(re.match(dm['suspend_late'], msg)):
data.dmesg['suspend']['end'] = ktime
phase = 'suspend_late'
data.dmesg[phase]['start'] = ktime
# suspend_noirq start
elif(re.match(dm['suspend_noirq'], msg)):
data.dmesg['suspend_late']['end'] = ktime
phase = 'suspend_noirq'
data.dmesg[phase]['start'] = ktime
# suspend_machine start
elif(re.match(dm['suspend_machine'], msg)):
data.dmesg['suspend_noirq']['end'] = ktime
phase = 'suspend_machine'
data.dmesg[phase]['start'] = ktime
# resume_machine start
elif(re.match(dm['resume_machine'], msg)):
if(sysvals.suspendmode in ['freeze', 'standby']):
data.tSuspended = prevktime
data.dmesg['suspend_machine']['end'] = prevktime
else:
data.tSuspended = ktime
data.dmesg['suspend_machine']['end'] = ktime
phase = 'resume_machine'
data.tResumed = ktime
data.tLow = data.tResumed - data.tSuspended
data.dmesg[phase]['start'] = ktime
# resume_noirq start
elif(re.match(dm['resume_noirq'], msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# resume_early start
elif(re.match(dm['resume_early'], msg)):
data.dmesg['resume_noirq']['end'] = ktime
phase = 'resume_early'
data.dmesg[phase]['start'] = ktime
# resume start
elif(re.match(dm['resume'], msg)):
data.dmesg['resume_early']['end'] = ktime
phase = 'resume'
data.dmesg[phase]['start'] = ktime
# resume complete start
elif(re.match(dm['resume_complete'], msg)):
data.dmesg['resume']['end'] = ktime
phase = 'resume_complete'
data.dmesg[phase]['start'] = ktime
# post resume start
elif(re.match(dm['post_resume'], msg)):
data.dmesg['resume_complete']['end'] = ktime
data.setEnd(ktime)
phase = 'post_resume'
break
# -- device callbacks --
if(phase in data.phases):
# device init call
if(re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
sm = re.match('calling (?P<f>.*)\+ @ '+\
'(?P<n>.*), parent: (?P<p>.*)', msg);
f = sm.group('f')
n = sm.group('n')
p = sm.group('p')
if(f and n and p):
data.newAction(phase, f, int(n), p, ktime, -1, '')
# device init return
elif(re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs', msg)):
sm = re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs(?P<a>.*)', msg);
f = sm.group('f')
t = sm.group('t')
list = data.dmesg[phase]['list']
if(f in list):
dev = list[f]
dev['length'] = int(t)
dev['end'] = ktime
# -- non-devicecallback actions --
# if trace events are not available, these are better than nothing
if(not sysvals.usetraceevents):
# look for known actions
for a in at:
if(re.match(at[a]['smsg'], msg)):
if(a not in actions):
actions[a] = []
actions[a].append({'begin': ktime, 'end': ktime})
if(re.match(at[a]['emsg'], msg)):
actions[a][-1]['end'] = ktime
# now look for CPU on/off events
if(re.match('Disabling non-boot CPUs .*', msg)):
# start of first cpu suspend
cpu_start = ktime
elif(re.match('Enabling non-boot CPUs .*', msg)):
# start of first cpu resume
cpu_start = ktime
elif(re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)):
# end of a cpu suspend, start of the next
m = re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
elif(re.match('CPU(?P<cpu>[0-9]*) is up', msg)):
# end of a cpu resume, start of the next
m = re.match('CPU(?P<cpu>[0-9]*) is up', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
prevktime = ktime
# fill in any missing phases
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing, something went wrong!' % p)
print(' In %s, this dmesg line denotes the start of %s:' % \
(sysvals.suspendmode, p))
print(' "%s"' % dm[p])
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
# fill in any actions we've found
for name in actions:
for event in actions[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < data.start):
data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > data.end):
data.setEnd(end)
data.newActionGlobal(name, begin, end)
if(sysvals.verbose):
data.printDetails()
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
return True
# Function: setTimelineRows
# Description:
# Organize the timeline entries into the smallest
# number of rows possible, with no entry overlapping
# Arguments:
# list: the list of devices/actions for a single phase
# sortedkeys: cronologically sorted key list to use
# Output:
# The total number of rows needed to display this phase of the timeline
def setTimelineRows(list, sortedkeys):
# clear all rows and set them to undefined
remaining = len(list)
rowdata = dict()
row = 0
for item in list:
list[item]['row'] = -1
# try to pack each row with as many ranges as possible
while(remaining > 0):
if(row not in rowdata):
rowdata[row] = []
for item in sortedkeys:
if(list[item]['row'] < 0):
s = list[item]['start']
e = list[item]['end']
valid = True
for ritem in rowdata[row]:
rs = ritem['start']
re = ritem['end']
if(not (((s <= rs) and (e <= rs)) or
((s >= re) and (e >= re)))):
valid = False
break
if(valid):
rowdata[row].append(list[item])
list[item]['row'] = row
remaining -= 1
row += 1
return row
# Function: createTimeScale
# Description:
# Create the timescale header for the html timeline
# Arguments:
# t0: start time (suspend begin)
# tMax: end time (resume end)
# tSuspend: time when suspend occurs, i.e. the zero time
# Output:
# The html code needed to display the time scale
def createTimeScale(t0, tMax, tSuspended):
timescale = '<div class="t" style="right:{0}%">{1}</div>\n'
output = '<div id="timescale">\n'
# set scale for timeline
tTotal = tMax - t0
tS = 0.1
if(tTotal <= 0):
return output
if(tTotal > 4):
tS = 1
if(tSuspended < 0):
for i in range(int(tTotal/tS)+1):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal))
if(i > 0):
val = '%0.fms' % (float(i)*tS*1000)
else:
val = ''
output += timescale.format(pos, val)
else:
tSuspend = tSuspended - t0
divTotal = int(tTotal/tS) + 1
divSuspend = int(tSuspend/tS)
s0 = (tSuspend - tS*divSuspend)*100/tTotal
for i in range(divTotal):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal) - s0)
if((i == 0) and (s0 < 3)):
val = ''
elif(i == divSuspend):
val = 'S/R'
else:
val = '%0.fms' % (float(i-divSuspend)*tS*1000)
output += timescale.format(pos, val)
output += '</div>\n'
return output
# Function: createHTMLSummarySimple
# Description:
# Create summary html file for a series of tests
# Arguments:
# testruns: array of Data objects from parseTraceLog
def createHTMLSummarySimple(testruns, htmlfile):
global sysvals
# print out the basic summary of all the tests
hf = open(htmlfile, 'w')
# write the html header first (html head, css code, up to body start)
html = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend Summary</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:#495E09;line-height:30px;color:white;font: 25px Arial;}\n\
table {width:100%;border-collapse: collapse;}\n\
.summary {font: 22px Arial;border:1px solid;}\n\
th {border: 1px solid black;background-color:#A7C942;color:white;}\n\
td {text-align: center;}\n\
tr.alt td {background-color:#EAF2D3;}\n\
tr.avg td {background-color:#BDE34C;}\n\
a:link {color: #90B521;}\n\
a:visited {color: #495E09;}\n\
a:hover {color: #B1DF28;}\n\
a:active {color: #FFFFFF;}\n\
</style>\n</head>\n<body>\n'
# group test header
count = len(testruns)
headline_stamp = '<div class="stamp">{0} {1} {2} {3} ({4} tests)</div>\n'
html += headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'],
sysvals.stamp['time'], count)
# check to see if all the tests have the same value
stampcolumns = False
for data in testruns:
if diffStamp(sysvals.stamp, data.stamp):
stampcolumns = True
break
th = '\t<th>{0}</th>\n'
td = '\t<td>{0}</td>\n'
tdlink = '\t<td><a href="{0}">Click Here</a></td>\n'
# table header
html += '<table class="summary">\n<tr>\n'
html += th.format("Test #")
if stampcolumns:
html += th.format("Hostname")
html += th.format("Kernel Version")
html += th.format("Suspend Mode")
html += th.format("Test Time")
html += th.format("Suspend Time")
html += th.format("Resume Time")
html += th.format("Detail")
html += '</tr>\n'
# test data, 1 row per test
sTimeAvg = 0.0
rTimeAvg = 0.0
num = 1
for data in testruns:
# data.end is the end of post_resume
resumeEnd = data.dmesg['resume_complete']['end']
if num % 2 == 1:
html += '<tr class="alt">\n'
else:
html += '<tr>\n'
# test num
html += td.format("test %d" % num)
num += 1
if stampcolumns:
# host name
val = "unknown"
if('host' in data.stamp):
val = data.stamp['host']
html += td.format(val)
# host kernel
val = "unknown"
if('kernel' in data.stamp):
val = data.stamp['kernel']
html += td.format(val)
# suspend mode
val = "unknown"
if('mode' in data.stamp):
val = data.stamp['mode']
html += td.format(val)
# test time
val = "unknown"
if('time' in data.stamp):
val = data.stamp['time']
html += td.format(val)
# suspend time
sTime = (data.tSuspended - data.start)*1000
sTimeAvg += sTime
html += td.format("%3.3f ms" % sTime)
# resume time
rTime = (resumeEnd - data.tResumed)*1000
rTimeAvg += rTime
html += td.format("%3.3f ms" % rTime)
# link to the output html
html += tdlink.format(data.outfile)
html += '</tr>\n'
# last line: test average
if(count > 0):
sTimeAvg /= count
rTimeAvg /= count
html += '<tr class="avg">\n'
html += td.format('Average') # name
if stampcolumns:
html += td.format('') # host
html += td.format('') # kernel
html += td.format('') # mode
html += td.format('') # time
html += td.format("%3.3f ms" % sTimeAvg) # suspend time
html += td.format("%3.3f ms" % rTimeAvg) # resume time
html += td.format('') # output link
html += '</tr>\n'
# flush the data to file
hf.write(html+'</table>\n')
hf.write('</body>\n</html>\n')
hf.close()
# Function: createHTML
# Description:
# Create the output html file from the resident test data
# Arguments:
# testruns: array of Data objects from parseKernelLog or parseTraceLog
# Output:
# True if the html file was created, false if it failed
def createHTML(testruns):
global sysvals
for data in testruns:
data.normalizeTime(testruns[-1].tSuspended)
x2changes = ['', 'absolute']
if len(testruns) > 1:
x2changes = ['1', 'relative']
# html function templates
headline_stamp = '<div class="stamp">{0} {1} {2} {3}</div>\n'
html_devlist1 = '<button id="devlist1" class="devlist" style="float:left;">Device Detail%s</button>' % x2changes[0]
html_zoombox = '<center><button id="zoomin">ZOOM IN</button><button id="zoomout">ZOOM OUT</button><button id="zoomdef">ZOOM 1:1</button></center>\n'
html_devlist2 = '<button id="devlist2" class="devlist" style="float:right;">Device Detail2</button>\n'
html_timeline = '<div id="dmesgzoombox" class="zoombox">\n<div id="{0}" class="timeline" style="height:{1}px">\n'
html_device = '<div id="{0}" title="{1}" class="thread" style="left:{2}%;top:{3}%;height:{4}%;width:{5}%;">{6}</div>\n'
html_traceevent = '<div title="{0}" class="traceevent" style="left:{1}%;top:{2}%;height:{3}%;width:{4}%;border:1px solid {5};background-color:{5}">{6}</div>\n'
html_phase = '<div class="phase" style="left:{0}%;width:{1}%;top:{2}%;height:{3}%;background-color:{4}">{5}</div>\n'
html_phaselet = '<div id="{0}" class="phaselet" style="left:{1}%;width:{2}%;background-color:{3}"></div>\n'
html_legend = '<div class="square" style="left:{0}%;background-color:{1}"> {2}</div>\n'
html_timetotal = '<table class="time1">\n<tr>'\
'<td class="green">{2} Suspend Time: <b>{0} ms</b></td>'\
'<td class="yellow">{2} Resume Time: <b>{1} ms</b></td>'\
'</tr>\n</table>\n'
html_timetotal2 = '<table class="time1">\n<tr>'\
'<td class="green">{3} Suspend Time: <b>{0} ms</b></td>'\
'<td class="gray">'+sysvals.suspendmode+' time: <b>{1} ms</b></td>'\
'<td class="yellow">{3} Resume Time: <b>{2} ms</b></td>'\
'</tr>\n</table>\n'
html_timegroups = '<table class="time2">\n<tr>'\
'<td class="green">{4}Kernel Suspend: {0} ms</td>'\
'<td class="purple">{4}Firmware Suspend: {1} ms</td>'\
'<td class="purple">{4}Firmware Resume: {2} ms</td>'\
'<td class="yellow">{4}Kernel Resume: {3} ms</td>'\
'</tr>\n</table>\n'
# device timeline
vprint('Creating Device Timeline...')
devtl = Timeline()
# Generate the header for this timeline
textnum = ['First', 'Second']
for data in testruns:
tTotal = data.end - data.start
tEnd = data.dmesg['resume_complete']['end']
if(tTotal == 0):
print('ERROR: No timeline data')
sys.exit()
if(data.tLow > 0):
low_time = '%.0f'%(data.tLow*1000)
if data.fwValid:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000 + \
(data.fwSuspend/1000000.0))
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000 + \
(data.fwResume/1000000.0))
testdesc1 = 'Total'
testdesc2 = ''
if(len(testruns) > 1):
testdesc1 = testdesc2 = textnum[data.testnumber]
testdesc2 += ' '
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc1)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc1)
devtl.html['timeline'] += thtml
sktime = '%.3f'%((data.dmesg['suspend_machine']['end'] - \
data.getStart())*1000)
sftime = '%.3f'%(data.fwSuspend / 1000000.0)
rftime = '%.3f'%(data.fwResume / 1000000.0)
rktime = '%.3f'%((data.getEnd() - \
data.dmesg['resume_machine']['start'])*1000)
devtl.html['timeline'] += html_timegroups.format(sktime, \
sftime, rftime, rktime, testdesc2)
else:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000)
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000)
testdesc = 'Kernel'
if(len(testruns) > 1):
testdesc = textnum[data.testnumber]+' '+testdesc
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc)
devtl.html['timeline'] += thtml
# time scale for potentially multiple datasets
t0 = testruns[0].start
tMax = testruns[-1].end
tSuspended = testruns[-1].tSuspended
tTotal = tMax - t0
# determine the maximum number of rows we need to draw
timelinerows = 0
for data in testruns:
for phase in data.dmesg:
list = data.dmesg[phase]['list']
rows = setTimelineRows(list, list)
data.dmesg[phase]['row'] = rows
if(rows > timelinerows):
timelinerows = rows
# calculate the timeline height and create bounding box, add buttons
devtl.setRows(timelinerows + 1)
devtl.html['timeline'] += html_devlist1
if len(testruns) > 1:
devtl.html['timeline'] += html_devlist2
devtl.html['timeline'] += html_zoombox
devtl.html['timeline'] += html_timeline.format('dmesg', devtl.height)
# draw the colored boxes for each of the phases
for data in testruns:
for b in data.dmesg:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
devtl.html['timeline'] += html_phase.format(left, width, \
'%.3f'%devtl.scaleH, '%.3f'%(100-devtl.scaleH), \
data.dmesg[b]['color'], '')
# draw the time scale, try to make the number of labels readable
devtl.html['scale'] = createTimeScale(t0, tMax, tSuspended)
devtl.html['timeline'] += devtl.html['scale']
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for d in phaselist:
name = d
drv = ''
dev = phaselist[d]
if(d in sysvals.altdevname):
name = sysvals.altdevname[d]
if('drv' in dev and dev['drv']):
drv = ' {%s}' % dev['drv']
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((dev['start']-t0)*100)/tTotal)
width = '%.3f' % (((dev['end']-dev['start'])*100)/tTotal)
length = ' (%0.3f ms) ' % ((dev['end']-dev['start'])*1000)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += html_device.format(dev['id'], \
d+drv+length+b, left, top, '%.3f'%height, width, name+drv)
# draw any trace events found
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for name in phaselist:
dev = phaselist[name]
if('traceevents' in dev):
vprint('Debug trace events found for device %s' % name)
vprint('%20s %20s %10s %8s' % ('action', \
'name', 'time(ms)', 'length(ms)'))
for e in dev['traceevents']:
vprint('%20s %20s %10.3f %8.3f' % (e.action, \
e.name, e.time*1000, e.length*1000))
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((e.time-t0)*100)/tTotal)
width = '%.3f' % (e.length*100/tTotal)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += \
html_traceevent.format(e.action+' '+e.name, \
left, top, '%.3f'%height, \
width, e.color, '')
# timeline is finished
devtl.html['timeline'] += '</div>\n</div>\n'
# draw a legend which describes the phases by color
data = testruns[-1]
devtl.html['legend'] = '<div class="legend">\n'
pdelta = 100.0/len(data.phases)
pmargin = pdelta / 4.0
for phase in data.phases:
order = '%.2f' % ((data.dmesg[phase]['order'] * pdelta) + pmargin)
name = string.replace(phase, '_', ' ')
devtl.html['legend'] += html_legend.format(order, \
data.dmesg[phase]['color'], name)
devtl.html['legend'] += '</div>\n'
hf = open(sysvals.htmlfile, 'w')
thread_height = 0
# write the html header first (html head, css code, up to body start)
html_header = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:gray;line-height:30px;color:white;font: 25px Arial;}\n\
.callgraph {margin-top: 30px;box-shadow: 5px 5px 20px black;}\n\
.callgraph article * {padding-left: 28px;}\n\
h1 {color:black;font: bold 30px Times;}\n\
t0 {color:black;font: bold 30px Times;}\n\
t1 {color:black;font: 30px Times;}\n\
t2 {color:black;font: 25px Times;}\n\
t3 {color:black;font: 20px Times;white-space:nowrap;}\n\
t4 {color:black;font: bold 30px Times;line-height:60px;white-space:nowrap;}\n\
table {width:100%;}\n\
.gray {background-color:rgba(80,80,80,0.1);}\n\
.green {background-color:rgba(204,255,204,0.4);}\n\
.purple {background-color:rgba(128,0,128,0.2);}\n\
.yellow {background-color:rgba(255,255,204,0.4);}\n\
.time1 {font: 22px Arial;border:1px solid;}\n\
.time2 {font: 15px Arial;border-bottom:1px solid;border-left:1px solid;border-right:1px solid;}\n\
td {text-align: center;}\n\
r {color:#500000;font:15px Tahoma;}\n\
n {color:#505050;font:15px Tahoma;}\n\
.tdhl {color: red;}\n\
.hide {display: none;}\n\
.pf {display: none;}\n\
.pf:checked + label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/><rect x="8" y="4" width="2" height="10" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:not(:checked) ~ label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:checked ~ *:not(:nth-child(2)) {display: none;}\n\
.zoombox {position: relative; width: 100%; overflow-x: scroll;}\n\
.timeline {position: relative; font-size: 14px;cursor: pointer;width: 100%; overflow: hidden; background-color:#dddddd;}\n\
.thread {position: absolute; height: '+'%.3f'%thread_height+'%; overflow: hidden; line-height: 30px; border:1px solid;text-align:center;white-space:nowrap;background-color:rgba(204,204,204,0.5);}\n\
.thread:hover {background-color:white;border:1px solid red;z-index:10;}\n\
.hover {background-color:white;border:1px solid red;z-index:10;}\n\
.traceevent {position: absolute;opacity: 0.3;height: '+'%.3f'%thread_height+'%;width:0;overflow:hidden;line-height:30px;text-align:center;white-space:nowrap;}\n\
.phase {position: absolute;overflow: hidden;border:0px;text-align:center;}\n\
.phaselet {position:absolute;overflow:hidden;border:0px;text-align:center;height:100px;font-size:24px;}\n\
.t {position:absolute;top:0%;height:100%;border-right:1px solid black;}\n\
.legend {position: relative; width: 100%; height: 40px; text-align: center;margin-bottom:20px}\n\
.legend .square {position:absolute;top:10px; width: 0px;height: 20px;border:1px solid;padding-left:20px;}\n\
button {height:40px;width:200px;margin-bottom:20px;margin-top:20px;font-size:24px;}\n\
.devlist {position:'+x2changes[1]+';width:190px;}\n\
#devicedetail {height:100px;box-shadow: 5px 5px 20px black;}\n\
</style>\n</head>\n<body>\n'
hf.write(html_header)
# write the test title and general info header
if(sysvals.stamp['time'] != ""):
hf.write(headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'], \
sysvals.stamp['time']))
# write the device timeline
hf.write(devtl.html['timeline'])
hf.write(devtl.html['legend'])
hf.write('<div id="devicedetailtitle"></div>\n')
hf.write('<div id="devicedetail" style="display:none;">\n')
# draw the colored boxes for the device detail section
for data in testruns:
hf.write('<div id="devicedetail%d">\n' % data.testnumber)
for b in data.phases:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
hf.write(html_phaselet.format(b, left, width, \
data.dmesg[b]['color']))
hf.write('</div>\n')
hf.write('</div>\n')
# write the ftrace data (callgraph)
data = testruns[-1]
if(sysvals.usecallgraph):
hf.write('<section id="callgraphs" class="callgraph">\n')
# write out the ftrace data converted to html
html_func_top = '<article id="{0}" class="atop" style="background-color:{1}">\n<input type="checkbox" class="pf" id="f{2}" checked/><label for="f{2}">{3} {4}</label>\n'
html_func_start = '<article>\n<input type="checkbox" class="pf" id="f{0}" checked/><label for="f{0}">{1} {2}</label>\n'
html_func_end = '</article>\n'
html_func_leaf = '<article>{0} {1}</article>\n'
num = 0
for p in data.phases:
list = data.dmesg[p]['list']
for devname in data.sortedDevices(p):
if('ftrace' not in list[devname]):
continue
name = devname
if(devname in sysvals.altdevname):
name = sysvals.altdevname[devname]
devid = list[devname]['id']
cg = list[devname]['ftrace']
flen = '<r>(%.3f ms @ %.3f to %.3f)</r>' % \
((cg.end - cg.start)*1000, cg.start*1000, cg.end*1000)
hf.write(html_func_top.format(devid, data.dmesg[p]['color'], \
num, name+' '+p, flen))
num += 1
for line in cg.list:
if(line.length < 0.000000001):
flen = ''
else:
flen = '<n>(%.3f ms @ %.3f)</n>' % (line.length*1000, \
line.time*1000)
if(line.freturn and line.fcall):
hf.write(html_func_leaf.format(line.name, flen))
elif(line.freturn):
hf.write(html_func_end)
else:
hf.write(html_func_start.format(num, line.name, flen))
num += 1
hf.write(html_func_end)
hf.write('\n\n </section>\n')
# write the footer and close
addScriptCode(hf, testruns)
hf.write('</body>\n</html>\n')
hf.close()
return True
# Function: addScriptCode
# Description:
# Adds the javascript code to the output html
# Arguments:
# hf: the open html file pointer
# testruns: array of Data objects from parseKernelLog or parseTraceLog
def addScriptCode(hf, testruns):
t0 = (testruns[0].start - testruns[-1].tSuspended) * 1000
tMax = (testruns[-1].end - testruns[-1].tSuspended) * 1000
# create an array in javascript memory with the device details
detail = ' var devtable = [];\n'
for data in testruns:
topo = data.deviceTopology()
detail += ' devtable[%d] = "%s";\n' % (data.testnumber, topo)
detail += ' var bounds = [%f,%f];\n' % (t0, tMax)
# add the code which will manipulate the data in the browser
script_code = \
'<script type="text/javascript">\n'+detail+\
' function zoomTimeline() {\n'\
' var timescale = document.getElementById("timescale");\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var zoombox = document.getElementById("dmesgzoombox");\n'\
' var val = parseFloat(dmesg.style.width);\n'\
' var newval = 100;\n'\
' var sh = window.outerWidth / 2;\n'\
' if(this.id == "zoomin") {\n'\
' newval = val * 1.2;\n'\
' if(newval > 40000) newval = 40000;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else if (this.id == "zoomout") {\n'\
' newval = val / 1.2;\n'\
' if(newval < 100) newval = 100;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else {\n'\
' zoombox.scrollLeft = 0;\n'\
' dmesg.style.width = "100%";\n'\
' }\n'\
' var html = "";\n'\
' var t0 = bounds[0];\n'\
' var tMax = bounds[1];\n'\
' var tTotal = tMax - t0;\n'\
' var wTotal = tTotal * 100.0 / newval;\n'\
' for(var tS = 1000; (wTotal / tS) < 3; tS /= 10);\n'\
' if(tS < 1) tS = 1;\n'\
' for(var s = ((t0 / tS)|0) * tS; s < tMax; s += tS) {\n'\
' var pos = (tMax - s) * 100.0 / tTotal;\n'\
' var name = (s == 0)?"S/R":(s+"ms");\n'\
' html += "<div class=\\"t\\" style=\\"right:"+pos+"%\\">"+name+"</div>";\n'\
' }\n'\
' timescale.innerHTML = html;\n'\
' }\n'\
' function deviceHover() {\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' dev[i].className = "thread hover";\n'\
' } else {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' }\n'\
' function deviceUnhover() {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' function deviceTitle(title, total, cpu) {\n'\
' var prefix = "Total";\n'\
' if(total.length > 3) {\n'\
' prefix = "Average";\n'\
' total[1] = (total[1]+total[3])/2;\n'\
' total[2] = (total[2]+total[4])/2;\n'\
' }\n'\
' var devtitle = document.getElementById("devicedetailtitle");\n'\
' var name = title.slice(0, title.indexOf(" "));\n'\
' if(cpu >= 0) name = "CPU"+cpu;\n'\
' var driver = "";\n'\
' var tS = "<t2>(</t2>";\n'\
' var tR = "<t2>)</t2>";\n'\
' if(total[1] > 0)\n'\
' tS = "<t2>("+prefix+" Suspend:</t2><t0> "+total[1].toFixed(3)+" ms</t0> ";\n'\
' if(total[2] > 0)\n'\
' tR = " <t2>"+prefix+" Resume:</t2><t0> "+total[2].toFixed(3)+" ms<t2>)</t2></t0>";\n'\
' var s = title.indexOf("{");\n'\
' var e = title.indexOf("}");\n'\
' if((s >= 0) && (e >= 0))\n'\
' driver = title.slice(s+1, e) + " <t1>@</t1> ";\n'\
' if(total[1] > 0 && total[2] > 0)\n'\
' devtitle.innerHTML = "<t0>"+driver+name+"</t0> "+tS+tR;\n'\
' else\n'\
' devtitle.innerHTML = "<t0>"+title+"</t0>";\n'\
' return name;\n'\
' }\n'\
' function deviceDetail() {\n'\
' var devinfo = document.getElementById("devicedetail");\n'\
' devinfo.style.display = "block";\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var idlist = [];\n'\
' var pdata = [[]];\n'\
' var pd = pdata[0];\n'\
' var total = [0.0, 0.0, 0.0];\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' idlist[idlist.length] = dev[i].id;\n'\
' var tidx = 1;\n'\
' if(dev[i].id[0] == "a") {\n'\
' pd = pdata[0];\n'\
' } else {\n'\
' if(pdata.length == 1) pdata[1] = [];\n'\
' if(total.length == 3) total[3]=total[4]=0.0;\n'\
' pd = pdata[1];\n'\
' tidx = 3;\n'\
' }\n'\
' var info = dev[i].title.split(" ");\n'\
' var pname = info[info.length-1];\n'\
' pd[pname] = parseFloat(info[info.length-3].slice(1));\n'\
' total[0] += pd[pname];\n'\
' if(pname.indexOf("suspend") >= 0)\n'\
' total[tidx] += pd[pname];\n'\
' else\n'\
' total[tidx+1] += pd[pname];\n'\
' }\n'\
' }\n'\
' var devname = deviceTitle(this.title, total, cpu);\n'\
' var left = 0.0;\n'\
' for (var t = 0; t < pdata.length; t++) {\n'\
' pd = pdata[t];\n'\
' devinfo = document.getElementById("devicedetail"+t);\n'\
' var phases = devinfo.getElementsByClassName("phaselet");\n'\
' for (var i = 0; i < phases.length; i++) {\n'\
' if(phases[i].id in pd) {\n'\
' var w = 100.0*pd[phases[i].id]/total[0];\n'\
' var fs = 32;\n'\
' if(w < 8) fs = 4*w | 0;\n'\
' var fs2 = fs*3/4;\n'\
' phases[i].style.width = w+"%";\n'\
' phases[i].style.left = left+"%";\n'\
' phases[i].title = phases[i].id+" "+pd[phases[i].id]+" ms";\n'\
' left += w;\n'\
' var time = "<t4 style=\\"font-size:"+fs+"px\\">"+pd[phases[i].id]+" ms<br></t4>";\n'\
' var pname = "<t3 style=\\"font-size:"+fs2+"px\\">"+phases[i].id.replace("_", " ")+"</t3>";\n'\
' phases[i].innerHTML = time+pname;\n'\
' } else {\n'\
' phases[i].style.width = "0%";\n'\
' phases[i].style.left = left+"%";\n'\
' }\n'\
' }\n'\
' }\n'\
' var cglist = document.getElementById("callgraphs");\n'\
' if(!cglist) return;\n'\
' var cg = cglist.getElementsByClassName("atop");\n'\
' for (var i = 0; i < cg.length; i++) {\n'\
' if(idlist.indexOf(cg[i].id) >= 0) {\n'\
' cg[i].style.display = "block";\n'\
' } else {\n'\
' cg[i].style.display = "none";\n'\
' }\n'\
' }\n'\
' }\n'\
' function devListWindow(e) {\n'\
' var sx = e.clientX;\n'\
' if(sx > window.innerWidth - 440)\n'\
' sx = window.innerWidth - 440;\n'\
' var cfg="top="+e.screenY+", left="+sx+", width=440, height=720, scrollbars=yes";\n'\
' var win = window.open("", "_blank", cfg);\n'\
' if(window.chrome) win.moveBy(sx, 0);\n'\
' var html = "<title>"+e.target.innerHTML+"</title>"+\n'\
' "<style type=\\"text/css\\">"+\n'\
' " ul {list-style-type:circle;padding-left:10px;margin-left:10px;}"+\n'\
' "</style>"\n'\
' var dt = devtable[0];\n'\
' if(e.target.id != "devlist1")\n'\
' dt = devtable[1];\n'\
' win.document.write(html+dt);\n'\
' }\n'\
' window.addEventListener("load", function () {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' dmesg.style.width = "100%"\n'\
' document.getElementById("zoomin").onclick = zoomTimeline;\n'\
' document.getElementById("zoomout").onclick = zoomTimeline;\n'\
' document.getElementById("zoomdef").onclick = zoomTimeline;\n'\
' var devlist = document.getElementsByClassName("devlist");\n'\
' for (var i = 0; i < devlist.length; i++)\n'\
' devlist[i].onclick = devListWindow;\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].onclick = deviceDetail;\n'\
' dev[i].onmouseover = deviceHover;\n'\
' dev[i].onmouseout = deviceUnhover;\n'\
' }\n'\
' zoomTimeline();\n'\
' });\n'\
'</script>\n'
hf.write(script_code);
# Function: executeSuspend
# Description:
# Execute system suspend through the sysfs interface, then copy the output
# dmesg and ftrace files to the test output directory.
def executeSuspend():
global sysvals
detectUSB(False)
t0 = time.time()*1000
tp = sysvals.tpath
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system('dmesg -C')
# enable callgraph ftrace only for the second run
if(sysvals.usecallgraph and count == 2):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | '+\
'grep dpm_run_callback > '+tp+'set_graph_function')
# if this is test2 and there's a delay, start here
if(count > 1 and sysvals.x2delay > 0):
tN = time.time()*1000
while (tN - t0) < sysvals.x2delay:
tN = time.time()*1000
time.sleep(0.001)
# start ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('START TRACING')
os.system('echo 1 > '+tp+'tracing_on')
# initiate suspend
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo SUSPEND START > '+tp+'trace_marker')
if(sysvals.rtcwake):
print('SUSPEND START')
print('will autoresume in %d seconds' % sysvals.rtcwaketime)
sysvals.rtcWakeAlarm()
else:
print('SUSPEND START (press a key to resume)')
pf = open(sysvals.powerfile, 'w')
pf.write(sysvals.suspendmode)
# execution will pause here
pf.close()
t0 = time.time()*1000
# return from suspend
print('RESUME COMPLETE')
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo RESUME COMPLETE > '+tp+'trace_marker')
# see if there's firmware timing data to be had
t = sysvals.postresumetime
if(t > 0):
print('Waiting %d seconds for POST-RESUME trace events...' % t)
time.sleep(t)
# stop ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo 0 > '+tp+'tracing_on')
print('CAPTURING TRACE')
writeDatafileHeader(sysvals.ftracefile)
os.system('cat '+tp+'trace >> '+sysvals.ftracefile)
os.system('echo "" > '+tp+'trace')
# grab a copy of the dmesg output
print('CAPTURING DMESG')
writeDatafileHeader(sysvals.dmesgfile)
os.system('dmesg -c >> '+sysvals.dmesgfile)
def writeDatafileHeader(filename):
global sysvals
fw = getFPDT(False)
prt = sysvals.postresumetime
fp = open(filename, 'a')
fp.write(sysvals.teststamp+'\n')
if(fw):
fp.write('# fwsuspend %u fwresume %u\n' % (fw[0], fw[1]))
if(prt > 0):
fp.write('# post resume time %u\n' % prt)
fp.close()
# Function: executeAndroidSuspend
# Description:
# Execute system suspend through the sysfs interface
# on a remote android device, then transfer the output
# dmesg and ftrace files to the local output directory.
def executeAndroidSuspend():
global sysvals
# check to see if the display is currently off
tp = sysvals.tpath
out = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
# if so we need to turn it on so we can issue a new suspend
if(out.endswith('false')):
print('Waking the device up for the test...')
# send the KEYPAD_POWER keyevent to wake it up
os.system(sysvals.adb+' shell input keyevent 26')
# wait a few seconds so the user can see the device wake up
time.sleep(3)
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system(sysvals.adb+' shell dmesg -c > /dev/null 2>&1')
# start ftrace
if(sysvals.usetraceevents):
print('START TRACING')
os.system(sysvals.adb+" shell 'echo 1 > "+tp+"tracing_on'")
# initiate suspend
for count in range(1,sysvals.execcount+1):
if(sysvals.usetraceevents):
os.system(sysvals.adb+\
" shell 'echo SUSPEND START > "+tp+"trace_marker'")
print('SUSPEND START (press a key on the device to resume)')
os.system(sysvals.adb+" shell 'echo "+sysvals.suspendmode+\
" > "+sysvals.powerfile+"'")
# execution will pause here, then adb will exit
while(True):
check = os.popen(sysvals.adb+\
' shell pwd 2>/dev/null').read().strip()
if(len(check) > 0):
break
time.sleep(1)
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo RESUME COMPLETE > "+tp+\
"trace_marker'")
# return from suspend
print('RESUME COMPLETE')
# stop ftrace
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
print('CAPTURING TRACE')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.ftracefile)
os.system(sysvals.adb+' shell cat '+tp+\
'trace >> '+sysvals.ftracefile)
# grab a copy of the dmesg output
print('CAPTURING DMESG')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.dmesgfile)
os.system(sysvals.adb+' shell dmesg >> '+sysvals.dmesgfile)
# Function: setUSBDevicesAuto
# Description:
# Set the autosuspend control parameter of all USB devices to auto
# This can be dangerous, so use at your own risk, most devices are set
# to always-on since the kernel cant determine if the device can
# properly autosuspend
def setUSBDevicesAuto():
global sysvals
rootCheck()
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
os.system('echo auto > %s/power/control' % dirname)
name = dirname.split('/')[-1]
desc = os.popen('cat %s/product 2>/dev/null' % \
dirname).read().replace('\n', '')
ctrl = os.popen('cat %s/power/control 2>/dev/null' % \
dirname).read().replace('\n', '')
print('control is %s for %6s: %s' % (ctrl, name, desc))
# Function: yesno
# Description:
# Print out an equivalent Y or N for a set of known parameter values
# Output:
# 'Y', 'N', or ' ' if the value is unknown
def yesno(val):
yesvals = ['auto', 'enabled', 'active', '1']
novals = ['on', 'disabled', 'suspended', 'forbidden', 'unsupported']
if val in yesvals:
return 'Y'
elif val in novals:
return 'N'
return ' '
# Function: ms2nice
# Description:
# Print out a very concise time string in minutes and seconds
# Output:
# The time string, e.g. "1901m16s"
def ms2nice(val):
ms = 0
try:
ms = int(val)
except:
return 0.0
m = ms / 60000
s = (ms / 1000) - (m * 60)
return '%3dm%2ds' % (m, s)
# Function: detectUSB
# Description:
# Detect all the USB hosts and devices currently connected and add
# a list of USB device names to sysvals for better timeline readability
# Arguments:
# output: True to output the info to stdout, False otherwise
def detectUSB(output):
global sysvals
field = {'idVendor':'', 'idProduct':'', 'product':'', 'speed':''}
power = {'async':'', 'autosuspend':'', 'autosuspend_delay_ms':'',
'control':'', 'persist':'', 'runtime_enabled':'',
'runtime_status':'', 'runtime_usage':'',
'runtime_active_time':'',
'runtime_suspended_time':'',
'active_duration':'',
'connected_duration':''}
if(output):
print('LEGEND')
print('---------------------------------------------------------------------------------------------')
print(' A = async/sync PM queue Y/N D = autosuspend delay (seconds)')
print(' S = autosuspend Y/N rACTIVE = runtime active (min/sec)')
print(' P = persist across suspend Y/N rSUSPEN = runtime suspend (min/sec)')
print(' E = runtime suspend enabled/forbidden Y/N ACTIVE = active duration (min/sec)')
print(' R = runtime status active/suspended Y/N CONNECT = connected duration (min/sec)')
print(' U = runtime usage count')
print('---------------------------------------------------------------------------------------------')
print(' NAME ID DESCRIPTION SPEED A S P E R U D rACTIVE rSUSPEN ACTIVE CONNECT')
print('---------------------------------------------------------------------------------------------')
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
for i in field:
field[i] = os.popen('cat %s/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
name = dirname.split('/')[-1]
if(len(field['product']) > 0):
sysvals.altdevname[name] = \
'%s [%s]' % (field['product'], name)
else:
sysvals.altdevname[name] = \
'%s:%s [%s]' % (field['idVendor'], \
field['idProduct'], name)
if(output):
for i in power:
power[i] = os.popen('cat %s/power/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
if(re.match('usb[0-9]*', name)):
first = '%-8s' % name
else:
first = '%8s' % name
print('%s [%s:%s] %-20s %-4s %1s %1s %1s %1s %1s %1s %1s %s %s %s %s' % \
(first, field['idVendor'], field['idProduct'], \
field['product'][0:20], field['speed'], \
yesno(power['async']), \
yesno(power['control']), \
yesno(power['persist']), \
yesno(power['runtime_enabled']), \
yesno(power['runtime_status']), \
power['runtime_usage'], \
power['autosuspend'], \
ms2nice(power['runtime_active_time']), \
ms2nice(power['runtime_suspended_time']), \
ms2nice(power['active_duration']), \
ms2nice(power['connected_duration'])))
# Function: getModes
# Description:
# Determine the supported power modes on this system
# Output:
# A string list of the available modes
def getModes():
global sysvals
modes = ''
if(not sysvals.android):
if(os.path.exists(sysvals.powerfile)):
fp = open(sysvals.powerfile, 'r')
modes = string.split(fp.read())
fp.close()
else:
line = os.popen(sysvals.adb+' shell cat '+\
sysvals.powerfile).read().strip()
modes = string.split(line)
return modes
# Function: getFPDT
# Description:
# Read the acpi bios tables and pull out FPDT, the firmware data
# Arguments:
# output: True to output the info to stdout, False otherwise
def getFPDT(output):
global sysvals
rectype = {}
rectype[0] = 'Firmware Basic Boot Performance Record'
rectype[1] = 'S3 Performance Table Record'
prectype = {}
prectype[0] = 'Basic S3 Resume Performance Record'
prectype[1] = 'Basic S3 Suspend Performance Record'
rootCheck()
if(not os.path.exists(sysvals.fpdtpath)):
if(output):
doError('file doesnt exist: %s' % sysvals.fpdtpath, False)
return False
if(not os.access(sysvals.fpdtpath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.fpdtpath, False)
return False
if(not os.path.exists(sysvals.mempath)):
if(output):
doError('file doesnt exist: %s' % sysvals.mempath, False)
return False
if(not os.access(sysvals.mempath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.mempath, False)
return False
fp = open(sysvals.fpdtpath, 'rb')
buf = fp.read()
fp.close()
if(len(buf) < 36):
if(output):
doError('Invalid FPDT table data, should '+\
'be at least 36 bytes', False)
return False
table = struct.unpack('4sIBB6s8sI4sI', buf[0:36])
if(output):
print('')
print('Firmware Performance Data Table (%s)' % table[0])
print(' Signature : %s' % table[0])
print(' Table Length : %u' % table[1])
print(' Revision : %u' % table[2])
print(' Checksum : 0x%x' % table[3])
print(' OEM ID : %s' % table[4])
print(' OEM Table ID : %s' % table[5])
print(' OEM Revision : %u' % table[6])
print(' Creator ID : %s' % table[7])
print(' Creator Revision : 0x%x' % table[8])
print('')
if(table[0] != 'FPDT'):
if(output):
doError('Invalid FPDT table')
return False
if(len(buf) <= 36):
return False
i = 0
fwData = [0, 0]
records = buf[36:]
fp = open(sysvals.mempath, 'rb')
while(i < len(records)):
header = struct.unpack('HBB', records[i:i+4])
if(header[0] not in rectype):
continue
if(header[1] != 16):
continue
addr = struct.unpack('Q', records[i+8:i+16])[0]
try:
fp.seek(addr)
first = fp.read(8)
except:
doError('Bad address 0x%x in %s' % (addr, sysvals.mempath), False)
rechead = struct.unpack('4sI', first)
recdata = fp.read(rechead[1]-8)
if(rechead[0] == 'FBPT'):
record = struct.unpack('HBBIQQQQQ', recdata)
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
print(' Reset END : %u ns' % record[4])
print(' OS Loader LoadImage Start : %u ns' % record[5])
print(' OS Loader StartImage Start : %u ns' % record[6])
print(' ExitBootServices Entry : %u ns' % record[7])
print(' ExitBootServices Exit : %u ns' % record[8])
elif(rechead[0] == 'S3PT'):
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
j = 0
while(j < len(recdata)):
prechead = struct.unpack('HBB', recdata[j:j+4])
if(prechead[0] not in prectype):
continue
if(prechead[0] == 0):
record = struct.unpack('IIQQ', recdata[j:j+prechead[1]])
fwData[1] = record[2]
if(output):
print(' %s' % prectype[prechead[0]])
print(' Resume Count : %u' % \
record[1])
print(' FullResume : %u ns' % \
record[2])
print(' AverageResume : %u ns' % \
record[3])
elif(prechead[0] == 1):
record = struct.unpack('QQ', recdata[j+4:j+prechead[1]])
fwData[0] = record[1] - record[0]
if(output):
print(' %s' % prectype[prechead[0]])
print(' SuspendStart : %u ns' % \
record[0])
print(' SuspendEnd : %u ns' % \
record[1])
print(' SuspendTime : %u ns' % \
fwData[0])
j += prechead[1]
if(output):
print('')
i += header[1]
fp.close()
return fwData
# Function: statusCheck
# Description:
# Verify that the requested command and options will work, and
# print the results to the terminal
# Output:
# True if the test will work, False if not
def statusCheck():
global sysvals
status = True
if(sysvals.android):
print('Checking the android system ...')
else:
print('Checking this system (%s)...' % platform.node())
# check if adb is connected to a device
if(sysvals.android):
res = 'NO'
out = os.popen(sysvals.adb+' get-state').read().strip()
if(out == 'device'):
res = 'YES'
print(' is android device connected: %s' % res)
if(res != 'YES'):
print(' Please connect the device before using this tool')
return False
# check we have root access
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell id').read().strip()
if('root' in out):
res = 'YES'
else:
if(os.environ['USER'] == 'root'):
res = 'YES'
print(' have root access: %s' % res)
if(res != 'YES'):
if(sysvals.android):
print(' Try running "adb root" to restart the daemon as root')
else:
print(' Try running this script with sudo')
return False
# check sysfs is mounted
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+\
sysvals.powerfile).read().strip()
if(out == sysvals.powerfile):
res = 'YES'
else:
if(os.path.exists(sysvals.powerfile)):
res = 'YES'
print(' is sysfs mounted: %s' % res)
if(res != 'YES'):
return False
# check target mode is a valid mode
res = 'NO'
modes = getModes()
if(sysvals.suspendmode in modes):
res = 'YES'
else:
status = False
print(' is "%s" a valid power mode: %s' % (sysvals.suspendmode, res))
if(res == 'NO'):
print(' valid power modes are: %s' % modes)
print(' please choose one with -m')
# check if the tool can unlock the device
if(sysvals.android):
res = 'YES'
out1 = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
out2 = os.popen(sysvals.adb+\
' shell input').read().strip()
if(not out1.startswith('mScreenOn') or not out2.startswith('usage')):
res = 'NO (wake the android device up before running the test)'
print(' can I unlock the screen: %s' % res)
# check if ftrace is available
res = 'NO'
ftgood = verifyFtrace()
if(ftgood):
res = 'YES'
elif(sysvals.usecallgraph):
status = False
print(' is ftrace supported: %s' % res)
# what data source are we using
res = 'DMESG'
if(ftgood):
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
check = False
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls -d '+\
sysvals.epath+e).read().strip()
if(out == sysvals.epath+e):
check = True
else:
if(os.path.exists(sysvals.epath+e)):
check = True
if(not check):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and check):
sysvals.usetraceevents = True
if(sysvals.usetraceevents and sysvals.usetraceeventsonly):
res = 'FTRACE (all trace events found)'
elif(sysvals.usetraceevents):
res = 'DMESG and FTRACE (suspend_resume trace event found)'
print(' timeline data source: %s' % res)
# check if rtcwake
res = 'NO'
if(sysvals.rtcpath != ''):
res = 'YES'
elif(sysvals.rtcwake):
status = False
print(' is rtcwake supported: %s' % res)
return status
# Function: doError
# Description:
# generic error function for catastrphic failures
# Arguments:
# msg: the error message to print
# help: True if printHelp should be called after, False otherwise
def doError(msg, help):
if(help == True):
printHelp()
print('ERROR: %s\n') % msg
sys.exit()
# Function: doWarning
# Description:
# generic warning function for non-catastrophic anomalies
# Arguments:
# msg: the warning message to print
# file: If not empty, a filename to request be sent to the owner for debug
def doWarning(msg, file):
print('/* %s */') % msg
if(file):
print('/* For a fix, please send this'+\
' %s file to <todd.e.brandt@intel.com> */' % file)
# Function: rootCheck
# Description:
# quick check to see if we have root access
def rootCheck():
if(os.environ['USER'] != 'root'):
doError('This script must be run as root', False)
# Function: getArgInt
# Description:
# pull out an integer argument from the command line with checks
def getArgInt(name, args, min, max):
try:
arg = args.next()
except:
doError(name+': no argument supplied', True)
try:
val = int(arg)
except:
doError(name+': non-integer value given', True)
if(val < min or val > max):
doError(name+': value should be between %d and %d' % (min, max), True)
return val
# Function: rerunTest
# Description:
# generate an output from an existing set of ftrace/dmesg logs
def rerunTest():
global sysvals
if(sysvals.ftracefile != ''):
doesTraceLogHaveTraceEvents()
if(sysvals.dmesgfile == '' and not sysvals.usetraceeventsonly):
doError('recreating this html output '+\
'requires a dmesg file', False)
sysvals.setOutputFile()
vprint('Output file: %s' % sysvals.htmlfile)
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
testruns = parseTraceLog()
else:
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.ftracefile != ''):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runTest
# Description:
# execute a suspend/resume, gather the logs, and generate the output
def runTest(subdir):
global sysvals
# prepare for the test
if(not sysvals.android):
initFtrace()
else:
initFtraceAndroid()
sysvals.initTestOutput(subdir)
vprint('Output files:\n %s' % sysvals.dmesgfile)
if(sysvals.usecallgraph or
sysvals.usetraceevents or
sysvals.usetraceeventsonly):
vprint(' %s' % sysvals.ftracefile)
vprint(' %s' % sysvals.htmlfile)
# execute the test
if(not sysvals.android):
executeSuspend()
else:
executeAndroidSuspend()
# analyze the data and create the html output
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
# data for kernels 3.15 or newer is entirely in ftrace
testruns = parseTraceLog()
else:
# data for kernels older than 3.15 is primarily in dmesg
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.usecallgraph or sysvals.usetraceevents):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runSummary
# Description:
# create a summary of tests in a sub-directory
def runSummary(subdir, output):
global sysvals
# get a list of ftrace output files
files = []
for dirname, dirnames, filenames in os.walk(subdir):
for filename in filenames:
if(re.match('.*_ftrace.txt', filename)):
files.append("%s/%s" % (dirname, filename))
# process the files in order and get an array of data objects
testruns = []
for file in sorted(files):
if output:
print("Test found in %s" % os.path.dirname(file))
sysvals.ftracefile = file
sysvals.dmesgfile = file.replace('_ftrace.txt', '_dmesg.txt')
doesTraceLogHaveTraceEvents()
sysvals.usecallgraph = False
if not sysvals.usetraceeventsonly:
if(not os.path.exists(sysvals.dmesgfile)):
print("Skipping %s: not a valid test input" % file)
continue
else:
if output:
f = os.path.basename(sysvals.ftracefile)
d = os.path.basename(sysvals.dmesgfile)
print("\tInput files: %s and %s" % (f, d))
testdata = loadKernelLog()
data = testdata[0]
parseKernelLog(data)
testdata = [data]
appendIncompleteTraceLog(testdata)
else:
if output:
print("\tInput file: %s" % os.path.basename(sysvals.ftracefile))
testdata = parseTraceLog()
data = testdata[0]
data.normalizeTime(data.tSuspended)
link = file.replace(subdir+'/', '').replace('_ftrace.txt', '.html')
data.outfile = link
testruns.append(data)
createHTMLSummarySimple(testruns, subdir+'/summary.html')
# Function: printHelp
# Description:
# print out the help text
def printHelp():
global sysvals
modes = getModes()
print('')
print('AnalyzeSuspend v%.1f' % sysvals.version)
print('Usage: sudo analyze_suspend.py <options>')
print('')
print('Description:')
print(' This tool is designed to assist kernel and OS developers in optimizing')
print(' their linux stack\'s suspend/resume time. Using a kernel image built')
print(' with a few extra options enabled, the tool will execute a suspend and')
print(' capture dmesg and ftrace data until resume is complete. This data is')
print(' transformed into a device timeline and an optional callgraph to give')
print(' a detailed view of which devices/subsystems are taking the most')
print(' time in suspend/resume.')
print('')
print(' Generates output files in subdirectory: suspend-mmddyy-HHMMSS')
print(' HTML output: <hostname>_<mode>.html')
print(' raw dmesg output: <hostname>_<mode>_dmesg.txt')
print(' raw ftrace output: <hostname>_<mode>_ftrace.txt')
print('')
print('Options:')
print(' [general]')
print(' -h Print this help text')
print(' -v Print the current tool version')
print(' -verbose Print extra information during execution and analysis')
print(' -status Test to see if the system is enabled to run this tool')
print(' -modes List available suspend modes')
print(' -m mode Mode to initiate for suspend %s (default: %s)') % (modes, sysvals.suspendmode)
print(' -rtcwake t Use rtcwake to autoresume after <t> seconds (default: disabled)')
print(' [advanced]')
print(' -f Use ftrace to create device callgraphs (default: disabled)')
print(' -filter "d1 d2 ..." Filter out all but this list of dev names')
print(' -x2 Run two suspend/resumes back to back (default: disabled)')
print(' -x2delay t Minimum millisecond delay <t> between the two test runs (default: 0 ms)')
print(' -postres t Time after resume completion to wait for post-resume events (default: 0 S)')
print(' -multi n d Execute <n> consecutive tests at <d> seconds intervals. The outputs will')
print(' be created in a new subdirectory with a summary page.')
print(' [utilities]')
print(' -fpdt Print out the contents of the ACPI Firmware Performance Data Table')
print(' -usbtopo Print out the current USB topology with power info')
print(' -usbauto Enable autosuspend for all connected USB devices')
print(' [android testing]')
print(' -adb binary Use the given adb binary to run the test on an android device.')
print(' The device should already be connected and with root access.')
print(' Commands will be executed on the device using "adb shell"')
print(' [re-analyze data from previous runs]')
print(' -ftrace ftracefile Create HTML output using ftrace input')
print(' -dmesg dmesgfile Create HTML output using dmesg (not needed for kernel >= 3.15)')
print(' -summary directory Create a summary of all test in this dir')
print('')
return True
# ----------------- MAIN --------------------
# exec start (skipped if script is loaded as library)
if __name__ == '__main__':
cmd = ''
cmdarg = ''
multitest = {'run': False, 'count': 0, 'delay': 0}
# loop through the command line arguments
args = iter(sys.argv[1:])
for arg in args:
if(arg == '-m'):
try:
val = args.next()
except:
doError('No mode supplied', True)
sysvals.suspendmode = val
elif(arg == '-adb'):
try:
val = args.next()
except:
doError('No adb binary supplied', True)
if(not os.path.exists(val)):
doError('file doesnt exist: %s' % val, False)
if(not os.access(val, os.X_OK)):
doError('file isnt executable: %s' % val, False)
try:
check = os.popen(val+' version').read().strip()
except:
doError('adb version failed to execute', False)
if(not re.match('Android Debug Bridge .*', check)):
doError('adb version failed to execute', False)
sysvals.adb = val
sysvals.android = True
elif(arg == '-x2'):
if(sysvals.postresumetime > 0):
doError('-x2 is not compatible with -postres', False)
sysvals.execcount = 2
elif(arg == '-x2delay'):
sysvals.x2delay = getArgInt('-x2delay', args, 0, 60000)
elif(arg == '-postres'):
if(sysvals.execcount != 1):
doError('-x2 is not compatible with -postres', False)
sysvals.postresumetime = getArgInt('-postres', args, 0, 3600)
elif(arg == '-f'):
sysvals.usecallgraph = True
elif(arg == '-modes'):
cmd = 'modes'
elif(arg == '-fpdt'):
cmd = 'fpdt'
elif(arg == '-usbtopo'):
cmd = 'usbtopo'
elif(arg == '-usbauto'):
cmd = 'usbauto'
elif(arg == '-status'):
cmd = 'status'
elif(arg == '-verbose'):
sysvals.verbose = True
elif(arg == '-v'):
print("Version %.1f" % sysvals.version)
sys.exit()
elif(arg == '-rtcwake'):
sysvals.rtcwake = True
sysvals.rtcwaketime = getArgInt('-rtcwake', args, 0, 3600)
elif(arg == '-multi'):
multitest['run'] = True
multitest['count'] = getArgInt('-multi n (exec count)', args, 2, 1000000)
multitest['delay'] = getArgInt('-multi d (delay between tests)', args, 0, 3600)
elif(arg == '-dmesg'):
try:
val = args.next()
except:
doError('No dmesg file supplied', True)
sysvals.notestrun = True
sysvals.dmesgfile = val
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
elif(arg == '-ftrace'):
try:
val = args.next()
except:
doError('No ftrace file supplied', True)
sysvals.notestrun = True
sysvals.usecallgraph = True
sysvals.ftracefile = val
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
elif(arg == '-summary'):
try:
val = args.next()
except:
doError('No directory supplied', True)
cmd = 'summary'
cmdarg = val
sysvals.notestrun = True
if(os.path.isdir(val) == False):
doError('%s isnt accesible' % val, False)
elif(arg == '-filter'):
try:
val = args.next()
except:
doError('No devnames supplied', True)
sysvals.setDeviceFilter(val)
elif(arg == '-h'):
printHelp()
sys.exit()
else:
doError('Invalid argument: '+arg, True)
# just run a utility command and exit
if(cmd != ''):
if(cmd == 'status'):
statusCheck()
elif(cmd == 'fpdt'):
if(sysvals.android):
doError('cannot read FPDT on android device', False)
getFPDT(True)
elif(cmd == 'usbtopo'):
if(sysvals.android):
doError('cannot read USB topology '+\
'on an android device', False)
detectUSB(True)
elif(cmd == 'modes'):
modes = getModes()
print modes
elif(cmd == 'usbauto'):
setUSBDevicesAuto()
elif(cmd == 'summary'):
print("Generating a summary of folder \"%s\"" % cmdarg)
runSummary(cmdarg, True)
sys.exit()
# run test on android device
if(sysvals.android):
if(sysvals.usecallgraph):
doError('ftrace (-f) is not yet supported '+\
'in the android kernel', False)
if(sysvals.notestrun):
doError('cannot analyze test files on the '+\
'android device', False)
# if instructed, re-analyze existing data files
if(sysvals.notestrun):
rerunTest()
sys.exit()
# verify that we can run a test
if(not statusCheck()):
print('Check FAILED, aborting the test run!')
sys.exit()
if multitest['run']:
# run multiple tests in a separte subdirectory
s = 'x%d' % multitest['count']
subdir = datetime.now().strftime('suspend-'+s+'-%m%d%y-%H%M%S')
os.mkdir(subdir)
for i in range(multitest['count']):
if(i != 0):
print('Waiting %d seconds...' % (multitest['delay']))
time.sleep(multitest['delay'])
print('TEST (%d/%d) START' % (i+1, multitest['count']))
runTest(subdir)
print('TEST (%d/%d) COMPLETE' % (i+1, multitest['count']))
runSummary(subdir, False)
else:
# run the test in the current directory
runTest(".")
| gpl-2.0 |
mtils/ems | examples/qt4/gui/persondata_form.py | 1 | 1430 |
from PyQt4.QtCore import pyqtSignal, Qt
from PyQt4.QtGui import QTableView, QApplication, QWidget, QVBoxLayout
from PyQt4.QtGui import QLineEdit, QSpinBox, QDoubleSpinBox, QCheckBox
class PersonDataForm(QWidget):
def __init__(self, parent=None):
super(PersonDataForm, self).__init__(parent)
self._setupUi()
def _setupUi(self):
self.setLayout(QVBoxLayout())
self.forenameInput = QLineEdit(self)
self.forenameInput.setObjectName('forenameInput')
self.layout().addWidget(self.forenameInput)
self.surnameInput = QLineEdit(self)
self.surnameInput.setObjectName('surnameInput')
self.layout().addWidget(self.surnameInput)
self.ageInput = QSpinBox(self)
self.ageInput.setObjectName('ageInput')
self.ageInput.setRange(0, 200)
self.layout().addWidget(self.ageInput)
self.weightInput = QDoubleSpinBox(self)
self.weightInput.setObjectName('weightInput')
self.weightInput.setRange(0.0, 1000.0)
self.layout().addWidget(self.weightInput)
self.incomeInput = QDoubleSpinBox(self)
self.incomeInput.setObjectName('incomeInput')
self.incomeInput.setRange(0.0, 1000.0)
self.layout().addWidget(self.incomeInput)
self.marriedInput = QCheckBox(self)
self.marriedInput.setObjectName('marriedInput')
self.layout().addWidget(self.marriedInput) | mit |
iocoop/beancount | src/python/beancount/projects/returns.py | 1 | 39336 | #!/usr/bin/env python3
"""Compute the returns of a portfolio.
A document exists to describe the problem in more detail.
http://furius.ca/beancount/doc/portfolio-returns
Calculating the returns is carried out by identifying the entries whose accounts
match a regular expression that defines accounts to consider for valuation, to
compute the returns over. This set of "related accounts" must also cover the
internal flows that occur within that account, that is, the income and expense
accounts that result in the activity of the portfolio itself.
We consider three sets of accounts:
"Assets accounts" or "Value accounts": Accounts whose balances are counted
towards calculating the total value of the portfolio. These are asset
accounts that match the regular expression pattern.
"Internal accounts": Accounts which are not valued, but which are used to post
internal activity of the account. These are income received as a result of
the portfolio activity, such as dividends or realized capital gains, and
expenses incurred as a result of carrying out activity related to the
investment activity, such as commissions and fees. These are income and
expenses accounts.
"External accounts": Accounts that are considered external to the group of
related accounts. These are accounts from which funds will be deposited or
withdrawn. These deposits or withdrawals must be excluded from the portfolio
returns. Their presence is the reason computing portfolio returns isn't just
a trivial exercise!
Given this characterization, we can characterize transactions by which accounts
they have on their postings. Think of it as a Venn diagram with three circles
and all their possible intersections. We will use the following accounts in our
examples below:
;; Value accounts
2014-01-01 open Assets:Invest:Cash USD
2014-01-01 open Assets:Invest:BOOG BOOG
;; Internal accounts (non-value)
2014-01-01 open Income:Invest:PnL USD
2014-01-01 open Income:Invest:Dividends USD
2014-01-01 open Expenses:Commissions USD
2014-01-01 open Expenses:Fees USD
;; External accounts
2014-01-01 open Assets:Bank:Checking USD
2014-01-01 open Income:Salary USD
2014-01-01 open Expenses:Taxes USD
Let us first consider transactions which have at least some value accounts:
VALUE ONLY: All postings are on value accounts. An example would be some cash
converted into units of a stock (with no expenses):
2014-02-01 * "Buying some BOOG"
Assets:Invest:Cash -650.00 USD
Assets:Invest:BOOG 10 BOOG {65 USD}
VALUE + INTERNAL: Such a transaction would be one where, for example, there is
some change that triggers a commission and/or a capital gain:
2014-02-15 * "Selling half my position"
Assets:Invest:BOOG -5 BOOG {65 USD} @ 70 USD
Assets:Invest:Cash 340.05 USD
Expenses:Commissions 9.95 USD
Income:Invest:PnL -25.00 USD
Or the receipt of a dividend:
2014-02-20 * "Dividends from BOOG position"
Assets:Invest:Cash 12.00 USD
Income:Invest:Dividends -12.00 USD
Both of these type of transactions represents transfers within asset accounts
and as such do not present any challenges or events in terms of calculating the
returns. Since internal flow accounts are meant to be considered as revenue or
costs internal to the portofolio, they can just be processed without having to
revalue the portfolio across them.
Other transactions need special treatment , however:
VALUE + EXTERNAL: These would be tranactions either with a deposit or a
withdrawal from/to one of the value accounts:
2014-01-10 * "Transferring money for investing"
Assets:Bank:Checking -500.00 USD
Assets:Invest:Cash 500.00 USD
2014-06-30 * "Taking some money out for car repairs"
Assets:Invest:Cash -400.00 USD
Assets:Bank:Checking 400.00 USD
These transactions require special treatment: We need to compute the value of
the asset accounts before they get applied, book the returns for the previous
leg, then apply the transaction to its accounts and revalue the value accounts,
and begin a new piecewise returns segment.
Other transactions are a bit more problematic:
VALUE + INTERNAL + EXTERNAL: Those transactions with external flows may
sometimes involve posting amounts to one of the internal flow accounts:
2014-04-01 * "Transferring money by wire"
Assets:Bank:Checking -500.00 USD
Assets:Invest:Cash 480.00 USD
Expenses:Fees 20.00 USD
The question here is whether the postings with interflows should be internalized
or not, e.g., whether the 20.00 USD wire fee in the transaction above should be
considered a cost within the portfolio activity or not. We will assume that they
always are, and in order to keep our algorithm simple, we will internalize the
postings by splitting the transaction like this:
2014-04-01 * "Transferring money by wire" ^internalized-27356
Assets:Bank:Checking -500.00 USD
Equity:Internalized 500.00 USD
2014-04-01 * "Transferring money by wire" ^internalized-27356
Equity:Internalized -500.00 USD
Assets:Invest:Cash 480.00 USD
Expenses:Fees 20.00 USD
Here we have created a new "transfer" account called "Equity:Internalized" which
is automatically added to the set of value accounts. Now we have two
transactions, one with only VALUE + EXTERNAL accounts and one with VALUE +
INTERNAL accounts. The 20$ effectively reduces the returns of the segment that
includes the second transaction.
Then, we turn to other groups that don't include value accounts:
EXTERNAL ONLY: These are other types of transactions on completely unrelated
accounts. We simply ignore other transactions that do not affect our value
nor internal flow accounts. Within our limited context above, here is such a
transaction:
2014-01-02 * "Salary Pay"
Income:Salary -3461.54 USD
Expenses:Taxes 1176.92 USD
Assets:Bank:Checking 2284.62 USD
INTERNAL + EXTERNAL: Then we may have transactions that affect some internal
accounts and some external accounts. The treatment for these is less clear.
Some internal accounts are clearly tied to our investment portfolio, such as
"Income:Invest:Dividends" and others are more general and can be used
outside of the context of our investment portfolio, such as "Expenses:Fees"
which could be used to book a monthly bank fee, for example, like this:
2014-03-17 * "Monthly fees"
Assets:Bank:Checking -4.00 USD
Expenses:Fees 4.00 USD
Such a transaction should clearly not be considered as part of our portfolio
in any way. The only relation is the common use of the "Expenses:Fees"
account between transactions in the portfolio and transactions outside the
portfolio. However, consider this transaction where an income account that
is clearly associated with our portfolio is used to receive a dividend in an
external account:
2014-03-20 * "Dividend payment correction with fee"
Income:Invest:Dividends -9.00 USD
Assets:Bank:Checking 9.00 USD
This should clearly be included in the portfolio. The problem here is that
there is no distinction between an internal flow account tied to this
portfolio, such as "Income:Invest:Dividends" and one that is not and which
is used widely outside of this context, such as "Expenses:Fees".
In the context of this example, such transactions never occur. But...
consider what would happen if we were attempting to compute the value of
the portfolio excepting cash: the "Assets:Invest:Cash" account and a
regular dividend contribution becomes one of these transactions:
2014-03-20 * "Dividend payment"
Income:Invest:Dividends -9.00 USD ;; Internal
Assets:Invest:Cash 9.00 USD ;; External
So we will have to do something about those transactions: we provide the
user with the ability to specify a list of accounts that will force
internalization ("accounts_internalize"). When specified, transactions with
no value accounts but with some postings matching one of these accounts
will be internalized explicitly.
INTERNAL ONLY: Finally, consider this contrived example transaction where a
dividend happens to equal exactly some fee:
2014-03-20 * "Dividend payment with fee"
Income:Invest:Dividends -9.00 USD
Expenses:Fees 9.00 USD
It is unclear whether that should be in the portfolio or not. We have no
way to know. In either case, the transaction would have no impact on the
value of the portfolio, so we choose to ignore these transactions safely.
(Examples of these are rare.)
"""
__author__ = "Martin Blais <blais@furius.ca>"
import argparse
import copy
import re
import logging
from dateutil.parser import parse as parse_datetime
from beancount.core.number import ZERO
from beancount import loader
from beancount.parser import printer
from beancount.parser import options
from beancount.core import data
from beancount.core import inventory
from beancount.core import getters
from beancount.core import flags
from beancount.ops import prices
from beancount.utils import misc_utils
def sum_balances_for_accounts(balance, entry, accounts):
"""Accumulate balance for assets postings accounts on entry.
Args:
balance: An instance of Inventory.
entry: A directive (directives other than Transactions are ignored).
accounts: A set of strings, the names of accounts whose postings to include.
Returns:
This destructively modifies balance and returns it.
"""
if isinstance(entry, data.Transaction):
for posting in entry.postings:
if posting.account in accounts:
balance.add_position(posting.position)
return balance
def segment_periods(entries, accounts_value, accounts_intflows,
date_begin=None, date_end=None):
"""Segment entries in terms of piecewise periods of internal flow.
This function iterated through the given entries and computes balances at
the beginning and end of periods without external flow entries. You should be
able to then compute the returns from these informations.
Args:
entries: A list of directives. The list may contain directives other than
than transactions as well as directives with no relation to the assets or
internal flow accounts (the function simply ignores that which is not
relevant).
accounts_value: A set of the asset accounts in the related group.
accounts_intflows: A set of the internal flow accounts in the related group.
date_begin: A datetime.date instance, the beginning date of the period to compute
returns over.
date_end: A datetime.date instance, the end date of the period to compute returns
over.
Returns:
A pair of
periods: A list of period tuples, each of which contains:
period_begin: A datetime.date instance, the first day of the period.
period_end: A datetime.date instance, the last day of the period.
balance_begin: An Inventory instance, the balance at the beginning of the period.
balance_end: An Inventory instance, the balance at the end of the period.
portfolio_entries: A list of the entries that we used in computing the portfolio.
Raises:
ValueError: If the dates create an impossible situation, the beginning
must come before the requested end, if specified.
"""
logging.info("Segmenting periods.")
logging.info("Date begin: %s", date_begin)
logging.info("Date end: %s", date_end)
if date_begin and date_end and date_begin >= date_end:
raise ValueError("Dates are not ordered correctly: {} >= {}".format(
date_begin, date_end))
accounts_related = accounts_value | accounts_intflows
is_external_flow_entry = lambda entry: (isinstance(entry, data.Transaction) and
any(posting.account not in accounts_related
for posting in entry.postings))
# Create an iterator over the entries we care about.
portfolio_entries = [entry
for entry in entries
if getters.get_entry_accounts(entry) & accounts_value]
iter_entries = iter(portfolio_entries)
entry = next(iter_entries)
# If a beginning cut-off has been specified, skip the entries before then
# (and make sure to accumulate the initial balance correctly).
balance = inventory.Inventory()
if date_begin is not None:
period_begin = date_begin
try:
while True:
if entry.date >= date_begin:
break
if date_end and entry.date >= date_end:
break
balance = sum_balances_for_accounts(balance, entry, accounts_value)
entry = next(iter_entries)
except StopIteration:
# No periods found! Just return an empty list.
return [(date_begin, date_end or date_begin, balance, balance)], []
else:
period_begin = entry.date
# Main loop over the entries.
periods = []
entry_logger = misc_utils.LineFileProxy(logging.debug, ' ')
done = False
while True:
balance_begin = copy.copy(balance)
logging.debug(",-----------------------------------------------------------")
logging.debug(" Begin: %s", period_begin)
logging.debug(" Balance: %s", balance_begin.units())
logging.debug("")
# Consume all internal flow entries, simply accumulating the total balance.
while True:
period_end = entry.date
if is_external_flow_entry(entry):
break
if date_end and entry.date >= date_end:
period_end = date_end
done = True
break
if entry:
printer.print_entry(entry, file=entry_logger)
balance = sum_balances_for_accounts(balance, entry, accounts_value)
try:
entry = next(iter_entries)
except StopIteration:
done = True
if date_end:
period_end = date_end
break
else:
done = True
balance_end = copy.copy(balance)
## FIXME: Bring this back in, this fails for now. Something about the
## initialization fails it. assert period_begin <= period_end,
## (period_begin, period_end)
periods.append((period_begin, period_end, balance_begin, balance_end))
logging.debug(" Balance: %s", balance_end.units())
logging.debug(" End: %s", period_end)
logging.debug("`-----------------------------------------------------------")
logging.debug("")
if done:
break
# Absorb the balance of the external flow entry.
assert is_external_flow_entry(entry), entry
if entry:
printer.print_entry(entry, file=entry_logger)
balance = sum_balances_for_accounts(balance, entry, accounts_value)
try:
entry = next(iter_entries)
except StopIteration:
# If there is an end date, insert that final period to cover the end
# date, with no changes.
if date_end:
periods.append((period_end, date_end, balance, balance))
break
period_begin = period_end
## FIXME: Bring this back in, this fails for now.
# assert all(period_begin <= period_end
# for period_begin, period_end, _, _ in periods), periods
return periods, portfolio_entries
def compute_period_returns(date_begin, date_end,
balance_begin, balance_end, price_map):
"""Compute the returns of the given begin/end balances.
Args:
date_begin: A datetime.date instance, the beginning date of the period.
date_end: A datetime.date instance, the end date of the period.
balance_begin: An instance of the Inventory at the beginning of the period.
balance_end: An instance of the Inventory at the end of the period.
price_map: An instance of PriceMap as computed by prices.build_price_map().
Returns:
A pair of:
returns: A dict of currency -> floating-point return for the period. The
union of all currencies for those is returned (this is done to be able
to evaluate and report on returns in multiple currencies).
(mktvalue_begin, mktvalue_end): Both instances of Inventory, the balance
of the porfolio evaluated at the market value at the beginning and end
of the period.
"""
# Evaluate the boundary balances at market value.
mktvalue_begin = prices.get_inventory_market_value(balance_begin, date_begin, price_map)
mktvalue_end = prices.get_inventory_market_value(balance_end, date_end, price_map)
# Compute the union of all currencies. At this point, ignore currencies
# held-at-cost and issue a warning if some are found (if the price database
# covers all the currencies held at cost, this shuold not occur).
currencies = set()
single_begin = {}
single_end = {}
for mktvalue, single in [(mktvalue_begin, single_begin),
(mktvalue_end, single_end)]:
for pos in mktvalue.get_positions():
if pos.lot.cost:
logging.error('Could not reduce position "%s" to its value', pos)
else:
currencies.add(pos.lot.currency)
assert pos.lot.currency not in single
single[pos.lot.currency] = pos.number
# Now for each of the currencies, compute the returns. Handle cases where
# the currency is not present as a zero value for that currency.
#
# Note: In the future, we should instead require more information about the
# desired currency for valuation and convert all contents to a single
# currency above, so this is not needed except to handle really odd cases.
returns = {}
for currency in currencies:
begin = single_begin.get(currency, ZERO)
end = single_end.get(currency, ZERO)
if begin == ZERO:
returns[currency] = 1.0
else:
returns[currency] = float(end / begin)
return returns, (mktvalue_begin, mktvalue_end)
def annualize_returns(returns, date_first, date_last):
"""Annualize the return rates computed from the given date range.
Args:
returns: A dict of floats, the calculated returns.
date_first: A datetime.date instance, the beginning of the period.
date_last: A datetime.date instance, the beginning of the period.
Returns:
A dict of float, the returns adjusted to equivalent annual rates.
Raises:
ValueError: If the data includes 0-day periods to annualize
non-trivially.
"""
num_days = (date_last - date_first).days
if num_days == 0:
for currency, return_ in returns.items():
if return_ != 1:
raise ValueError("Invalid period for return: {} days for {}".format(
num_days, return_))
exponent = 1.
else:
exponent = 365. / num_days
return {currency: return_ ** exponent
for currency, return_ in returns.items()}
# The format of the links that are added to internalized transactions.
LINK_FORMAT = 'internalized-{:05d}'
def internalize(entries, transfer_account,
accounts_value, accounts_intflows, accounts_internalize=None):
"""Internalize internal flows that would be lost because booked against external
flow accounts. This splits up entries that have accounts both in internal
flows and external flows. A new set of entries are returned, along with a
list of entries that were split and replaced by a pair of entries.
Args:
entries: A list of directives to process for internalization.
transfer_account: A string, the name of an account to use for internalizing entries
which need to be split between internal and external flows. A good default value
would be an equity account, 'Equity:Internalized' or something like that.
accounts_value: A set of account name strings, the names of the asset accounts
included in valuing the portfolio.
accounts_intflows: A set of account name strings, the names of internal flow
accounts (normally income and expenses) that aren't external flows.
accounts_internalize: A set of account name strings to trigger explicit
internalization of transactions with no value account. If a transaction
is found that has only internal accounts and external accounts, the
postings whose accounts are in this set of accounts will be internalize.
This is a method that can be used to pull dividends in the portfolio
when valueing portfolios without their cash component. See docstring and
documentation for details. If specific, this set of accounts must be a
subset of the internal flows accounts.
Returns:
A pair of the new list of internalized entries, including all the other entries, and
a short list of just the original entires that were removed and replaced by pairs of
entries.
"""
# Verify that external flow entries only affect balance sheet accounts and
# not income or expenses accounts (internal flows). We do this because we
# want to ensure that all income and expenses are incurred against assets
# that live within the assets group. An example of something we'd like to
# avoid is an external flow paying for fees incurred within the account that
# should diminish the returns of the related accounts. To fix this, we split
# the entry into two entries, one without external flows against an transfer
# account that we consider an assets account, and just the external flows
# against this same tranfer account.
assert(isinstance(transfer_account, str)), (
"Invalid transfer account: {}".format(transfer_account))
if accounts_internalize and not (accounts_internalize <= accounts_intflows):
raise ValueError(
"Internalization accounts is not a subset of internal flows accounts.")
new_entries = []
replaced_entries = []
index = 1
for entry in entries:
if not isinstance(entry, data.Transaction):
new_entries.append(entry)
continue
# Break up postings into the three categories.
postings_assets = []
postings_intflows = []
postings_extflows = []
postings_internalize = []
for posting in entry.postings:
if posting.account in accounts_value:
postings_list = postings_assets
elif posting.account in accounts_intflows:
postings_list = postings_intflows
else:
postings_list = postings_extflows
postings_list.append(posting)
if accounts_internalize and posting.account in accounts_internalize:
postings_internalize.append(posting)
# Check if the entry is to be internalized and split it up in two
# entries and replace the entrie if that's the case.
if (postings_intflows and postings_extflows and
(postings_assets or postings_internalize)):
replaced_entries.append(entry)
# We will attach a link to each of the split entries.
link = LINK_FORMAT.format(index)
index += 1
# Calculate the weight of the balance to transfer.
balance_transfer = inventory.Inventory()
for posting in postings_extflows:
balance_transfer.add_amount(posting.position.get_weight(posting.price))
prototype_entry = entry._replace(flag=flags.FLAG_RETURNS,
links=(entry.links or set()) | set([link]))
# Create internal flows posting.
postings_transfer_int = [
data.Posting(transfer_account, position_, None, None, None)
for position_ in balance_transfer.get_positions()]
new_entries.append(prototype_entry._replace(
postings=(postings_assets + postings_intflows + postings_transfer_int)))
# Create external flows posting.
postings_transfer_ext = [
data.Posting(transfer_account, -position_, None, None, None)
for position_ in balance_transfer.get_positions()]
new_entries.append(prototype_entry._replace(
postings=(postings_transfer_ext + postings_extflows)))
else:
new_entries.append(entry)
# The transfer account does not have an Open entry, insert one. (This is
# just us being pedantic about Beancount requirements, this will not change
# the returns, but if someone looks at internalized entries it produces a
# correct set of entries you can load cleanly).
open_close_map = getters.get_account_open_close(new_entries)
if transfer_account not in open_close_map:
open_transfer_entry = data.Open(data.new_metadata("beancount.projects.returns", 0),
new_entries[0].date,
transfer_account, None, None)
new_entries.insert(0, open_transfer_entry)
return new_entries, replaced_entries
def compute_returns(entries, transfer_account,
accounts_value, accounts_intflows, accounts_internalize=None,
price_map=None,
date_begin=None, date_end=None):
"""Compute the returns of a portfolio of accounts.
Args:
entries: A list of directives that may affect the account.
transfer_account: A string, the name of an account to use for internalizing entries
which need to be split between internal and external flows. A good default value
would be an equity account, 'Equity:Internalized' or something like that.
accounts_value: A set of account name strings, the names of the asset accounts
included in valuing the portfolio.
accounts_intflows: A set of account name strings, the names of internal flow
accounts (normally income and expenses) that aren't external flows.
accounts_internalize: A set of account name strings used to force internalization.
See internalize() for details.
price_map: An instance of PriceMap as computed by prices.build_price_map(). If left
to its default value of None, we derive the price_map from the entries themselves.
date_begin: A datetime.date instance, the beginning date of the period to compute
returns over.
date_end: A datetime.date instance, the end date of the period to compute returns
over.
Returns:
A triple of
returns: A dict of currency -> float total returns.
dates: A pair of (date_first, date_last) datetime.date instances.
internalized_entries: A short list of the entries that were required to be split
up in order to internalize their flow. (This is mostly returns to be used by
tests, you can otherwise safely discard this.)
"""
if not accounts_value:
raise ValueError("Cannot calculate returns without assets accounts to value")
if price_map is None:
price_map = prices.build_price_map(entries)
# Remove unrealized entries, if any are found. (Note that unrealized gains
# only inserted at the end of the list of entries have no effect because
# this module never creates a period after these. This may change in the future).
entries = [entry
for entry in entries
if not (isinstance(entry, data.Transaction) and
entry.flag == flags.FLAG_UNREALIZED)]
# Internalize entries with internal/external flows.
entries, internalized_entries = internalize(
entries, transfer_account,
accounts_value, accounts_intflows, accounts_internalize)
accounts_value.add(transfer_account)
# Segment the entries, splitting at entries with external flow and computing
# the balances before and after. This returns all such periods with the
# balances at their beginning and end.
periods, portfolio_entries = segment_periods(entries,
accounts_value, accounts_intflows,
date_begin, date_end)
# From the period balances, compute the returns.
logging.info("Calculating period returns.")
logging.info("")
all_returns = []
for (period_begin, period_end, balance_begin, balance_end) in periods:
period_returns, mktvalues = compute_period_returns(period_begin, period_end,
balance_begin, balance_end,
price_map)
mktvalue_begin, mktvalue_end = mktvalues
all_returns.append(period_returns)
try:
annual_returns = (annualize_returns(period_returns, period_begin, period_end)
if period_end != period_begin
else {})
except OverflowError:
annual_returns = 'OVERFLOW'
logging.info("From %s to %s", period_begin, period_end)
logging.info(" Begin %s => %s", balance_begin.units(), mktvalue_begin)
logging.info(" End %s => %s", balance_end.units(), mktvalue_end)
logging.info(" Returns %s", period_returns)
logging.info(" Annualized %s", annual_returns)
logging.info("")
# Compute the piecewise returns. Note that we have to be careful to handle
# all available currencies.
currencies = set(currency
for returns in all_returns
for currency in returns.keys())
total_returns = {}
for currency in currencies:
total_return = 1.
for returns in all_returns:
total_return *= returns.get(currency, 1.)
total_returns[currency] = total_return
date_first = periods[0][0]
date_last = periods[-1][1]
return total_returns, (date_first, date_last), internalized_entries
def find_matching(entries, acc_types,
assets_regexp, intflows_regexp, internalize_regexp=None):
"""Match entries and identify account groups.
Args:
entries: A list of directives.
acc_types: An instance of account_types.AccountTypes
assets_regexp: A regular expression string that matches names of asset accounts to
value for the portfolio.
intflows_regexp: A regular expression string that matches names of accounts considered
internal flows to the portfolio (typically income and expenses accounts).
internalize_regexp: A regular expression string that matches names of accounts
to force internalization of. See internalize() for details.
Returns:
A list of all entries with an account matching the given pattern, and a
triplet of account lists:
accounts_value: A set of the asset accounts in the related group.
accounts_intflows: A set of the internal flow accounts in the related group.
accounts_extflows: A set of the external flow accounts.
accounts_internalize: A set of the explicitly internalized accounts, or None,
if left unspecified.
"""
accounts_value = set()
accounts_intflows = set()
accounts_extflows = set()
accounts_internalize = set()
assets_match = re.compile(assets_regexp).match
intflows_match = re.compile(intflows_regexp).match
internalize_match = re.compile(internalize_regexp).match if internalize_regexp else None
matching_entries = []
for entry in entries:
if not isinstance(entry, data.Transaction):
continue
if any((assets_match(posting.account) or (internalize_match and
internalize_match(posting.account)))
for posting in entry.postings):
matching_entries.append(entry)
for posting in entry.postings:
if assets_match(posting.account):
accounts_value.add(posting.account)
elif intflows_match(posting.account):
accounts_intflows.add(posting.account)
else:
accounts_extflows.add(posting.account)
if internalize_match and internalize_match(posting.account):
accounts_internalize.add(posting.account)
return (matching_entries, (accounts_value,
accounts_intflows,
accounts_extflows,
accounts_internalize or None))
def compute_returns_with_regexp(entries, options_map,
transfer_account,
assets_regexp, intflows_regexp, internalize_regexp=None,
date_begin=None, date_end=None):
"""Compute the returns of a portfolio of accounts defined by a regular expression.
Args:
entries: A list of directives.
options_map: An options dict as produced by the loader.
transfer_account: A string, the name of an account to use for internalizing entries
which need to be split between internal and external flows.
assets_regexp: A regular expression string that matches names of asset accounts to
value for the portfolio.
intflows_regexp: A regular expression string that matches names of accounts considered
internal flows to the portfolio (typically income and expenses accounts).
internalize_regexp: A regular expression string that matches names of accounts
to force internalization of. See internalize() for details.
date_begin: A datetime.date instance, the beginning date of the period to compute
returns over.
date_end: A datetime.date instance, the end date of the period to compute returns
over.
Returns:
See compute_returns().
"""
acc_types = options.get_account_types(options_map)
price_map = prices.build_price_map(entries)
# Fetch the matching entries and figure out account name groups.
matching_entries, (accounts_value,
accounts_intflows,
accounts_extflows,
accounts_internalize) = find_matching(entries, acc_types,
assets_regexp,
intflows_regexp,
internalize_regexp)
logging.info('Asset accounts:')
for account in sorted(accounts_value):
logging.info(' %s', account)
logging.info('Internal flows:')
for account in sorted(accounts_intflows):
logging.info(' %s', account)
logging.info('External flows:')
for account in sorted(accounts_extflows):
logging.info(' %s', account)
logging.info('')
if accounts_internalize:
logging.info('Explicitly internalized accounts:')
for account in sorted(accounts_internalize):
logging.info(' %s', account)
logging.info('')
return compute_returns(entries, transfer_account,
accounts_value, accounts_intflows, accounts_internalize,
price_map,
date_begin, date_end)
def main():
parse_date = lambda s: parse_datetime(s).date()
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='Ledger filename')
parser.add_argument('assets_regexp', action='store',
help=("A regular expression string that matches names of asset "
"accounts to value for the portfolio."))
parser.add_argument('intflows_regexp', action='store',
help=("A regular expression string that matches names of accounts "
"considered internal flows to the portfolio (typically "
"income and expenses accounts)."))
parser.add_argument('--internalize_regexp', action='store',
help=("A regular expression string that matches names of internal "
"flow accounts to trigger an internalization."))
parser.add_argument('--transfer-account', action='store',
default='Equity:Internalized',
help="Default name for subaccount to use for transfer account.")
parser.add_argument('-v', '--verbose', action='store_true',
help="Output detailed processing information. Useful for debugging")
parser.add_argument('--date-begin', '--begin-date', action='store', type=parse_date,
default=None,
help=("Beginning date of the period to compute returns over "
"(default is the first related directive)"))
parser.add_argument('--date-end', '--end-date', action='store', type=parse_date,
default=None,
help=("End date of the period to compute returns over "
"(default is the last related directive)"))
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO,
format='%(levelname)-8s: %(message)s')
# Load the input file and build the price database.
entries, errors, options_map = loader.load_file(args.filename, log_errors=logging.error)
# Compute the returns.
returns, (date_first, date_last), _ = compute_returns_with_regexp(
entries, options_map,
args.transfer_account,
args.assets_regexp, args.intflows_regexp, args.internalize_regexp,
date_begin=args.date_begin, date_end=args.date_end)
# Annualize the returns.
annual_returns = annualize_returns(returns, date_first, date_last)
print('Total returns from {} to {}:'.format(date_first, date_last))
for currency, return_ in sorted(returns.items()):
print(' {}: {:.3%}'.format(currency, return_ - 1))
print('Averaged annual returns from {} to {}:'.format(date_first, date_last))
for currency, return_ in sorted(annual_returns.items()):
print(' {}: {:.3%}'.format(currency, return_ - 1))
if __name__ == '__main__':
main()
| gpl-2.0 |
andrew-plunk/docker-registry | depends/docker-registry-core/docker_registry/testing/mock_dict.py | 38 | 1431 | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Docker.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Extend Mock class with dictionary behavior.
Call it as:
mocked_dict = MockDict()
mocked_dict.add_dict_methods()'''
import mock
MagicMock__init__ = mock.MagicMock.__init__
class MockDict(mock.MagicMock):
def __init__(self, *args, **kwargs):
MagicMock__init__(self, *args, **kwargs)
self._mock_dict = {}
@property
def get_dict(self):
return self._mock_dict
def add_dict_methods(self):
def setitem(key, value):
self._mock_dict[key] = value
def delitem(key):
del self._mock_dict[key]
self.__getitem__.side_effect = lambda key: self._mock_dict[key]
self.__setitem__.side_effect = setitem
self.__delitem__.side_effect = delitem
self.__contains__.side_effect = lambda key: key in self._mock_dict
| apache-2.0 |
qedsoftware/commcare-hq | corehq/ex-submodules/pillowtop/utils.py | 1 | 8673 | from __future__ import division
from collections import namedtuple
from datetime import datetime
import sys
import simplejson
from django.conf import settings
from dimagi.utils.chunked import chunked
from dimagi.utils.modules import to_function
from pillowtop.exceptions import PillowNotFoundError
from pillowtop.logger import pillow_logging
from pillowtop.dao.exceptions import DocumentMismatchError, DocumentNotFoundError
def _get_pillow_instance(full_class_str):
pillow_class = _import_class_or_function(full_class_str)
if pillow_class is None:
raise ValueError('No pillow class found for {}'.format(full_class_str))
return pillow_class()
def _import_class_or_function(full_class_str):
return to_function(full_class_str, failhard=settings.DEBUG)
def get_all_pillow_classes():
return [config.get_class() for config in get_all_pillow_configs()]
def get_all_pillow_instances():
return [config.get_instance() for config in get_all_pillow_configs()]
def get_couch_pillow_instances():
from pillowtop.feed.couch import CouchChangeFeed
return [
pillow for pillow in get_all_pillow_instances()
if isinstance(pillow.get_change_feed(), CouchChangeFeed)
]
def get_all_pillow_configs():
return get_pillow_configs_from_settings_dict(getattr(settings, 'PILLOWTOPS', {}))
def get_pillow_configs_from_settings_dict(pillow_settings_dict):
"""
The pillow_settings_dict is expected to be a dict mapping groups to list of pillow configs
"""
for section, list_of_pillows in pillow_settings_dict.items():
for pillow_config in list_of_pillows:
yield get_pillow_config_from_setting(section, pillow_config)
class PillowConfig(namedtuple('PillowConfig', ['section', 'name', 'class_name', 'instance_generator'])):
"""
Helper object for getting pillow classes/instances from settings
"""
def get_class(self):
return _import_class_or_function(self.class_name)
def get_instance(self):
if self.instance_generator:
instance_generator_fn = _import_class_or_function(self.instance_generator)
return instance_generator_fn(self.name)
else:
return _get_pillow_instance(self.class_name)
def get_pillow_config_from_setting(section, pillow_config_string_or_dict):
if isinstance(pillow_config_string_or_dict, basestring):
return PillowConfig(
section,
pillow_config_string_or_dict.rsplit('.', 1)[1],
pillow_config_string_or_dict,
None,
)
else:
assert 'class' in pillow_config_string_or_dict
class_name = pillow_config_string_or_dict['class']
return PillowConfig(
section,
pillow_config_string_or_dict.get('name', class_name),
class_name,
pillow_config_string_or_dict.get('instance', None),
)
def get_pillow_by_name(pillow_class_name, instantiate=True):
config = get_pillow_config_by_name(pillow_class_name)
return config.get_instance() if instantiate else config.get_class()
def get_pillow_config_by_name(pillow_name):
all_configs = get_all_pillow_configs()
for config in all_configs:
if config.name == pillow_name:
return config
raise PillowNotFoundError(u'No pillow found with name {}'.format(pillow_name))
def force_seq_int(seq):
if seq is None or seq == '':
return None
elif isinstance(seq, dict):
# multi-topic checkpoints don't support a single sequence id
return None
elif isinstance(seq, basestring):
return int(seq.split('-')[0])
else:
assert isinstance(seq, int)
return seq
def get_all_pillows_json():
pillow_configs = get_all_pillow_configs()
return [get_pillow_json(pillow_config) for pillow_config in pillow_configs]
def get_pillow_json(pillow_config):
assert isinstance(pillow_config, PillowConfig)
pillow = pillow_config.get_instance()
checkpoint = pillow.get_checkpoint()
timestamp = checkpoint.timestamp
if timestamp:
time_since_last = datetime.utcnow() - timestamp
hours_since_last = time_since_last.total_seconds() // 3600
try:
# remove microsecond portion
time_since_last = str(time_since_last)
time_since_last = time_since_last[0:time_since_last.index('.')]
except ValueError:
pass
else:
time_since_last = ''
hours_since_last = None
offsets = pillow.get_change_feed().get_current_offsets()
def _couch_seq_to_int(checkpoint, seq):
return force_seq_int(seq) if checkpoint.sequence_format != 'json' else seq
return {
'name': pillow_config.name,
'seq_format': checkpoint.sequence_format,
'seq': _couch_seq_to_int(checkpoint, checkpoint.wrapped_sequence),
'old_seq': _couch_seq_to_int(checkpoint, checkpoint.old_sequence) or 0,
'offsets': offsets,
'time_since_last': time_since_last,
'hours_since_last': hours_since_last
}
ChangeError = namedtuple('ChangeError', 'change exception')
class ErrorCollector(object):
def __init__(self):
self.errors = []
def add_error(self, error):
self.errors.append(error)
def build_bulk_payload(index_info, changes, doc_transform=None, error_collector=None):
doc_transform = doc_transform or (lambda x: x)
payload = []
for change in changes:
if change.deleted and change.id:
payload.append({
"delete": {
"_index": index_info.index,
"_type": index_info.type,
"_id": change.id
}
})
elif not change.deleted:
try:
doc = change.get_document()
doc = doc_transform(doc)
payload.append({
"index": {
"_index": index_info.index,
"_type": index_info.type,
"_id": doc['_id']
}
})
payload.append(doc)
except Exception as e:
if not error_collector:
raise
error_collector.add_error(ChangeError(change, e))
return payload
def prepare_bulk_payloads(bulk_changes, max_size, chunk_size=100):
payloads = ['']
for bulk_chunk in chunked(bulk_changes, chunk_size):
current_payload = payloads[-1]
payload_chunk = '\n'.join(map(simplejson.dumps, bulk_chunk)) + '\n'
appended_payload = current_payload + payload_chunk
new_payload_size = sys.getsizeof(appended_payload)
if new_payload_size > max_size:
payloads.append(payload_chunk)
else:
payloads[-1] = appended_payload
return filter(None, payloads)
def ensure_matched_revisions(change):
"""
This function ensures that the document fetched from a change matches the
revision at which it was pushed to kafka at.
See http://manage.dimagi.com/default.asp?237983 for more details
:raises: DocumentMismatchError - Raised when the revisions of the fetched document
and the change metadata do not match
"""
fetched_document = change.get_document()
change_has_rev = change.metadata and change.metadata.document_rev is not None
doc_has_rev = fetched_document and '_rev' in fetched_document
if doc_has_rev and change_has_rev:
doc_rev = fetched_document['_rev']
change_rev = change.metadata.document_rev
if doc_rev != change_rev:
fetched_rev = _convert_rev_to_int(doc_rev)
stored_rev = _convert_rev_to_int(change_rev)
if fetched_rev < stored_rev or stored_rev == -1:
message = u"Mismatched revs for {}: Cloudant rev {} vs. Changes feed rev {}".format(
change.id,
doc_rev,
change_rev
)
pillow_logging.warning(message)
raise DocumentMismatchError(message)
def _convert_rev_to_int(rev):
try:
return int(rev.split('-')[0])
except (ValueError, AttributeError):
return -1
def ensure_document_exists(change):
"""
Ensures that the document recorded in Kafka exists and is properly returned
:raises: DocumentNotFoundError - Raised when the document is not found
"""
doc = change.get_document()
if doc is None:
pillow_logging.warning("Unable to get document from change: {}".format(change))
raise DocumentNotFoundError() # force a retry
| bsd-3-clause |
cristiana214/cristianachavez214-cristianachavez | python/gdata/samples/oauth/oauth_on_appengine/appengine_utilities/rotmodel.py | 131 | 2149 | """
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from google.appengine.ext import db
class ROTModel(db.Model):
"""
ROTModel overrides the db.Model put function, having it retry
up to 3 times when it encounters a datastore timeout. This is
to try an maximize the chance the data makes it into the datastore
when attempted. If it fails, it raises the db.Timeout error and the
calling application will need to handle that.
"""
def put(self):
count = 0
while count < 3:
try:
return db.Model.put(self)
except db.Timeout:
count += 1
else:
raise db.Timeout()
| apache-2.0 |
joone/chromium-crosswalk | tools/copyright_scanner/copyright_scanner.py | 10 | 16713 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for scanning source files to determine code authorship.
"""
import itertools
def ForwardSlashesToOsPathSeps(input_api, path):
"""Converts forward slashes ('/') in the input path to OS-specific
path separators. Used when the paths come from outside and are using
UNIX path separators. Only works for relative paths!
Args:
input_api: InputAPI, as in presubmit scripts.
path: The path to convert.
Returns:
Converted path.
"""
return input_api.os_path.join(*path.split('/'))
def FindFiles(input_api, root_dir, start_paths_list, excluded_dirs_list):
"""Similar to UNIX utility find(1), searches for files in the directories.
Automatically leaves out only source code files and excludes third_party
directories.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
start_paths_list: The list of paths to start search from. Each path can
be a file or a directory.
excluded_dirs_list: The list of directories to skip.
Returns:
The list of source code files found, relative to |root_dir|.
"""
excluded_dirs_list = [d for d in excluded_dirs_list if not 'third_party' in d]
# Using a common pattern for third-partyies makes the ignore regexp shorter
excluded_dirs_list.append('third_party')
path_join = input_api.os_path.join
EXTRA_EXCLUDED_DIRS = [
# VCS dirs
path_join('.git'),
path_join('.svn'),
# Build output
path_join('out', 'Debug'),
path_join('out', 'Release'),
# 'Copyright' appears in license agreements
path_join('chrome', 'app', 'resources'),
# Quickoffice js files from internal src used on buildbots.
# crbug.com/350472.
path_join('chrome', 'browser', 'resources', 'chromeos', 'quickoffice'),
# This is a test output directory
path_join('chrome', 'tools', 'test', 'reference_build'),
# blink style copy right headers.
path_join('content', 'shell', 'renderer', 'test_runner'),
# blink style copy right headers.
path_join('content', 'shell', 'tools', 'plugin'),
# This is tests directory, doesn't exist in the snapshot
path_join('content', 'test', 'data'),
# This is a tests directory that doesn't exist in the shipped product.
path_join('gin', 'test'),
# This is a test output directory
path_join('data', 'dom_perf'),
# This is a tests directory that doesn't exist in the shipped product.
path_join('tools', 'perf', 'page_sets'),
path_join('tools', 'perf', 'page_sets', 'tough_animation_cases'),
# Histogram tools, doesn't exist in the snapshot
path_join('tools', 'histograms'),
# Swarming tools, doesn't exist in the snapshot
path_join('tools', 'swarming_client'),
# Ignore sysroots.
path_join('build', 'linux', 'debian_wheezy_amd64-sysroot'),
path_join('build', 'linux', 'debian_wheezy_arm-sysroot'),
path_join('build', 'linux', 'debian_wheezy_mips-sysroot'),
path_join('build', 'linux', 'debian_wheezy_i386-sysroot'),
# Old location (TODO(sbc): Remove this once it no longer exists on any bots)
path_join('chrome', 'installer', 'linux', 'debian_wheezy_arm-sysroot'),
# Data is not part of open source chromium, but are included on some bots.
path_join('data'),
# This is not part of open source chromium, but are included on some bots.
path_join('skia', 'tools', 'clusterfuzz-data'),
# Not shipped, only relates to Chrome for Android, but not to WebView
path_join('clank'),
# Internal-only repository.
path_join('remoting', 'android', 'internal'),
]
excluded_dirs_list.extend(EXTRA_EXCLUDED_DIRS)
# Surround the directory names with OS path separators.
dirs_blacklist = [path_join('.', d, '')[1:] for d in excluded_dirs_list if d]
def IsBlacklistedDir(d):
for item in dirs_blacklist:
if item in d:
return True
return False
files_whitelist_re = input_api.re.compile(
r'\.(asm|c(c|pp|xx)?|h(h|pp|xx)?|p(l|m)|xs|sh|php|py(|x)'
'|rb|idl|java|el|sc(i|e)|cs|pas|inc|js|pac|html|dtd|xsl|mod|mm?'
'|tex|mli?)$')
files = []
base_path_len = len(root_dir)
for path in start_paths_list:
full_path = path_join(root_dir, path)
if input_api.os_path.isfile(full_path):
if files_whitelist_re.search(path) and \
not IsBlacklistedDir(full_path[base_path_len:]): # Keep '/' prefix.
files.append(path)
else:
for dirpath, dirnames, filenames in input_api.os_walk(full_path):
# Remove excluded subdirs for faster scanning.
for item in dirnames[:]:
if IsBlacklistedDir(
path_join(dirpath, item)[base_path_len + 1:]):
dirnames.remove(item)
for filename in filenames:
filepath = \
path_join(dirpath, filename)[base_path_len + 1:]
if files_whitelist_re.search(filepath) and \
not IsBlacklistedDir(filepath):
files.append(filepath)
return files
class _GeneratedFilesDetector(object):
GENERATED_FILE = 'GENERATED FILE'
NO_COPYRIGHT = '*No copyright*'
def __init__(self, input_api):
self.python_multiline_string_double_re = \
input_api.re.compile(r'"""[^"]*(?:"""|$)', flags=input_api.re.MULTILINE)
self.python_multiline_string_single_re = \
input_api.re.compile(r"'''[^']*(?:'''|$)", flags=input_api.re.MULTILINE)
self.automatically_generated_re = input_api.re.compile(
r'(All changes made in this file will be lost'
'|DO NOT (EDIT|delete this file)'
'|Generated (at|automatically|data)'
'|Automatically generated'
'|\Wgenerated\s+(?:\w+\s+)*file\W)', flags=input_api.re.IGNORECASE)
def IsGeneratedFile(self, header):
header = header.upper()
if '"""' in header:
header = self.python_multiline_string_double_re.sub('', header)
if "'''" in header:
header = self.python_multiline_string_single_re.sub('', header)
# First do simple strings lookup to save time.
if 'ALL CHANGES MADE IN THIS FILE WILL BE LOST' in header:
return True
if 'DO NOT EDIT' in header or 'DO NOT DELETE' in header or \
'GENERATED' in header:
return self.automatically_generated_re.search(header)
return False
class _CopyrightsScanner(object):
@staticmethod
def StaticInit(input_api):
_CopyrightsScanner._c_comment_re = \
input_api.re.compile(r'''"[^"\\]*(?:\\.[^"\\]*)*"''')
_CopyrightsScanner._copyright_indicator = \
r'(?:copyright|copr\.|\xc2\xa9|\(c\))'
_CopyrightsScanner._full_copyright_indicator_re = input_api.re.compile(
r'(?:\W|^)' + _CopyrightsScanner._copyright_indicator + \
r'(?::\s*|\s+)(\w.*)$', input_api.re.IGNORECASE)
_CopyrightsScanner._copyright_disindicator_re = input_api.re.compile(
r'\s*\b(?:info(?:rmation)?|notice|and|or)\b', input_api.re.IGNORECASE)
def __init__(self, input_api):
self.max_line_numbers_proximity = 3
self.last_a_item_line_number = -200
self.last_b_item_line_number = -100
self.re = input_api.re
def _CloseLineNumbers(self, a, b):
return 0 <= a - b <= self.max_line_numbers_proximity
def MatchLine(self, line_number, line):
if '"' in line:
line = _CopyrightsScanner._c_comment_re.sub('', line)
upcase_line = line.upper()
# Record '(a)' and '(b)' last occurences in C++ comments.
# This is to filter out '(c)' used as a list item inside C++ comments.
# E.g. "// blah-blah (a) blah\n// blah-blah (b) and (c) blah"
cpp_comment_idx = upcase_line.find('//')
if cpp_comment_idx != -1:
if upcase_line.find('(A)') > cpp_comment_idx:
self.last_a_item_line_number = line_number
if upcase_line.find('(B)') > cpp_comment_idx:
self.last_b_item_line_number = line_number
# Fast bailout, uses the same patterns as _copyright_indicator regexp.
if not 'COPYRIGHT' in upcase_line and not 'COPR.' in upcase_line \
and not '\xc2\xa9' in upcase_line:
c_item_index = upcase_line.find('(C)')
if c_item_index == -1:
return None
if c_item_index > cpp_comment_idx and \
self._CloseLineNumbers(line_number,
self.last_b_item_line_number) and \
self._CloseLineNumbers(self.last_b_item_line_number,
self.last_a_item_line_number):
return None
copyr = None
m = _CopyrightsScanner._full_copyright_indicator_re.search(line)
if m and \
not _CopyrightsScanner._copyright_disindicator_re.match(m.group(1)):
copyr = m.group(0)
# Prettify the authorship string.
copyr = self.re.sub(r'([,.])?\s*$/', '', copyr)
copyr = self.re.sub(
_CopyrightsScanner._copyright_indicator, '', copyr, \
flags=self.re.IGNORECASE)
copyr = self.re.sub(r'^\s+', '', copyr)
copyr = self.re.sub(r'\s{2,}', ' ', copyr)
copyr = self.re.sub(r'\\@', '@', copyr)
return copyr
def FindCopyrights(input_api, root_dir, files_to_scan):
"""Determines code autorship, and finds generated files.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
files_to_scan: The list of file names to scan.
Returns:
The list of copyrights associated with each of the files given.
If the certain file is generated, the corresponding list consists a single
entry -- 'GENERATED_FILE' string. If the file has no copyright info,
the corresponding list contains 'NO_COPYRIGHT' string.
"""
generated_files_detector = _GeneratedFilesDetector(input_api)
_CopyrightsScanner.StaticInit(input_api)
copyrights = []
for file_name in files_to_scan:
linenum = 0
header = []
file_copyrights = []
scanner = _CopyrightsScanner(input_api)
contents = input_api.ReadFile(
input_api.os_path.join(root_dir, file_name), 'r')
for l in contents.split('\n'):
linenum += 1
if linenum <= 25:
header.append(l)
c = scanner.MatchLine(linenum, l)
if c:
file_copyrights.append(c)
if generated_files_detector.IsGeneratedFile('\n'.join(header)):
copyrights.append([_GeneratedFilesDetector.GENERATED_FILE])
elif file_copyrights:
copyrights.append(file_copyrights)
else:
copyrights.append([_GeneratedFilesDetector.NO_COPYRIGHT])
return copyrights
def FindCopyrightViolations(input_api, root_dir, files_to_scan):
"""Looks for files that are not belong exlusively to the Chromium Authors.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
files_to_scan: The list of file names to scan.
Returns:
The list of file names that contain non-Chromium copyrights.
"""
copyrights = FindCopyrights(input_api, root_dir, files_to_scan)
offending_files = []
allowed_copyrights_re = input_api.re.compile(
r'^(?:20[0-9][0-9](?:-20[0-9][0-9])? The Chromium Authors\. '
'All rights reserved.*)$')
for f, cs in itertools.izip(files_to_scan, copyrights):
if cs[0] == _GeneratedFilesDetector.GENERATED_FILE or \
cs[0] == _GeneratedFilesDetector.NO_COPYRIGHT:
continue
for c in cs:
if not allowed_copyrights_re.match(c):
offending_files.append(input_api.os_path.normpath(f))
break
return offending_files
def _GetWhitelistFileName(input_api):
return input_api.os_path.join(
'tools', 'copyright_scanner', 'third_party_files_whitelist.txt')
def _ProcessWhitelistedFilesList(input_api, lines):
whitelisted_files = []
for line in lines:
match = input_api.re.match(r'([^#\s]+)', line)
if match:
whitelisted_files.append(
ForwardSlashesToOsPathSeps(input_api, match.group(1)))
return whitelisted_files
def LoadWhitelistedFilesList(input_api):
"""Loads and parses the 3rd party code whitelist file.
input_api: InputAPI of presubmit scripts.
Returns:
The list of files.
"""
full_file_name = input_api.os_path.join(
input_api.change.RepositoryRoot(), _GetWhitelistFileName(input_api))
file_data = input_api.ReadFile(full_file_name, 'rb')
return _ProcessWhitelistedFilesList(input_api, file_data.splitlines())
def AnalyzeScanResults(input_api, whitelisted_files, offending_files):
"""Compares whitelist contents with the results of file scanning.
input_api: InputAPI of presubmit scripts.
whitelisted_files: Whitelisted files list.
offending_files: Files that contain 3rd party code.
Returns:
A triplet of "unknown", "missing", and "stale" file lists.
"Unknown" are files that contain 3rd party code but not whitelisted.
"Missing" are files that are whitelisted but doesn't really exist.
"Stale" are files that are whitelisted unnecessarily.
"""
unknown = set(offending_files) - set(whitelisted_files)
missing = [f for f in whitelisted_files if not input_api.os_path.isfile(
input_api.os_path.join(input_api.change.RepositoryRoot(), f))]
stale = set(whitelisted_files) - set(offending_files) - set(missing)
return (list(unknown), missing, list(stale))
def _GetDeletedContents(affected_file):
"""Returns a list of all deleted lines.
AffectedFile class from presubmit_support is lacking this functionality.
"""
deleted_lines = []
for line in affected_file.GenerateScmDiff().splitlines():
if line.startswith('-') and not line.startswith('--'):
deleted_lines.append(line[1:])
return deleted_lines
def _DoScanAtPresubmit(input_api, whitelisted_files, files_to_check):
# We pass empty 'known third-party' dirs list here. Since this is a patch
# for the Chromium's src tree, it must contain properly licensed Chromium
# code. Any third-party code must be put into a directory named 'third_party',
# and such dirs are automatically excluded by FindFiles.
files_to_scan = FindFiles(
input_api, input_api.change.RepositoryRoot(), files_to_check, [])
offending_files = FindCopyrightViolations(
input_api, input_api.change.RepositoryRoot(), files_to_scan)
return AnalyzeScanResults(
input_api, whitelisted_files, offending_files)
def ScanAtPresubmit(input_api, output_api):
"""Invoked at change presubmit time. Verifies that updated non third-party
code doesn't contain external copyrighted code.
input_api: InputAPI of presubmit scripts.
output_api: OutputAPI of presubmit scripts.
"""
files_to_check = set([])
deleted_files = set([])
whitelist_contents_changed = False
for f in input_api.AffectedFiles():
if f.LocalPath() == _GetWhitelistFileName(input_api):
whitelist_contents_changed = True
deleted_files |= set(_ProcessWhitelistedFilesList(
input_api, _GetDeletedContents(f)))
continue
if f.Action() != 'D':
files_to_check.add(f.LocalPath())
else:
deleted_files.add(f.LocalPath())
whitelisted_files = set(LoadWhitelistedFilesList(input_api))
if not whitelist_contents_changed:
whitelisted_files &= files_to_check | deleted_files
else:
# Need to re-check the entire contents of the whitelist file.
# Also add files removed from the whitelist. If the file has indeed been
# deleted, the scanner will not complain.
files_to_check |= whitelisted_files | deleted_files
(unknown_files, missing_files, stale_files) = _DoScanAtPresubmit(
input_api, list(whitelisted_files), list(files_to_check))
results = []
if unknown_files:
results.append(output_api.PresubmitError(
'The following files contain a third-party license but are not in ' \
'a listed third-party directory and are not whitelisted. You must ' \
'add the following files to the whitelist file %s\n' \
'(Note that if the code you are adding does not actually contain ' \
'any third-party code, it may contain the word "copyright", which ' \
'should be masked out, e.g. by writing it as "copy-right"):' \
'' % _GetWhitelistFileName(input_api),
sorted(unknown_files)))
if missing_files:
results.append(output_api.PresubmitPromptWarning(
'The following files are whitelisted in %s, ' \
'but do not exist or not files:' % _GetWhitelistFileName(input_api),
sorted(missing_files)))
if stale_files:
results.append(output_api.PresubmitPromptWarning(
'The following files are whitelisted unnecessarily. You must ' \
'remove the following files from the whitelist file ' \
'%s:' % _GetWhitelistFileName(input_api),
sorted(stale_files)))
return results
| bsd-3-clause |
temasek/android_external_chromium_org | chrome/common/extensions/docs/server2/host_file_system_provider_test.py | 23 | 1704 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from copy import deepcopy
import unittest
from extensions_paths import API
from file_system import FileNotFoundError
from host_file_system_provider import HostFileSystemProvider
from object_store_creator import ObjectStoreCreator
from test_data.canned_data import CANNED_API_FILE_SYSTEM_DATA
from test_file_system import TestFileSystem
class HostFileSystemProviderTest(unittest.TestCase):
def setUp(self):
self._idle_path = '%s/idle.json' % API
self._canned_data = deepcopy(CANNED_API_FILE_SYSTEM_DATA)
def _constructor_for_test(self, branch, **optargs):
return TestFileSystem(self._canned_data[branch])
def testWithCaching(self):
creator = HostFileSystemProvider(
ObjectStoreCreator.ForTest(),
constructor_for_test=self._constructor_for_test)
fs = creator.GetBranch('1500')
first_read = fs.ReadSingle(self._idle_path).Get()
self._canned_data['1500']['chrome']['common']['extensions'].get('api'
)['idle.json'] = 'blah blah blah'
second_read = fs.ReadSingle(self._idle_path).Get()
self.assertEqual(first_read, second_read)
def testWithOffline(self):
creator = HostFileSystemProvider(
ObjectStoreCreator.ForTest(),
offline=True,
constructor_for_test=self._constructor_for_test)
fs = creator.GetBranch('1500')
# Offline file system should raise a FileNotFoundError if read is attempted.
self.assertRaises(FileNotFoundError, fs.ReadSingle(self._idle_path).Get)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
jennyzhang0215/incubator-mxnet | python/mxnet/module/python_module.py | 38 | 14791 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-instance-attributes, too-many-arguments
"""Provide some handy classes for user to implement a simple computation module
in Python easily.
"""
import logging
from .base_module import BaseModule
from ..initializer import Uniform
from .. import ndarray as nd
class PythonModule(BaseModule):
"""A convenient module class that implements many of the module APIs as
empty functions.
Parameters
----------
data_names : list of str
Names of the data expected by the module.
label_names : list of str
Names of the labels expected by the module. Could be ``None`` if the
module does not need labels.
output_names : list of str
Names of the outputs.
"""
def __init__(self, data_names, label_names, output_names, logger=logging):
super(PythonModule, self).__init__(logger=logger)
if isinstance(data_names, tuple):
data_names = list(data_names)
if isinstance(label_names, tuple):
label_names = list(label_names)
self._data_names = data_names
self._label_names = label_names
self._output_names = output_names
self._data_shapes = None
self._label_shapes = None
self._output_shapes = None
################################################################################
# Symbol information
################################################################################
@property
def data_names(self):
"""A list of names for data required by this module."""
return self._data_names
@property
def output_names(self):
"""A list of names for the outputs of this module."""
return self._output_names
################################################################################
# Input/Output information
################################################################################
@property
def data_shapes(self):
"""A list of (name, shape) pairs specifying the data inputs to this module."""
return self._data_shapes
@property
def label_shapes(self):
"""A list of (name, shape) pairs specifying the label inputs to this module.
If this module does not accept labels -- either it is a module without loss
function, or it is not bound for training, then this should return an empty
list ``[]```.
"""
return self._label_shapes
@property
def output_shapes(self):
"""A list of (name, shape) pairs specifying the outputs of this module."""
return self._output_shapes
################################################################################
# Parameters of a module
################################################################################
def get_params(self):
"""Gets parameters, those are potentially copies of the the actual parameters used
to do computation on the device. Subclass should override this method if contains
parameters.
Returns
-------
``({}, {})``, a pair of empty dict.
"""
return (dict(), dict())
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
allow_missing=False, force_init=False, allow_extra=False):
"""Initializes the parameters and auxiliary states. By default this function
does nothing. Subclass should override this method if contains parameters.
Parameters
----------
initializer : Initializer
Called to initialize parameters if needed.
arg_params : dict
If not ``None``, should be a dictionary of existing `arg_params`. Initialization
will be copied from that.
aux_params : dict
If not ``None``, should be a dictionary of existing `aux_params`. Initialization
will be copied from that.
allow_missing : bool
If ``True``, params could contain missing values, and the initializer will be
called to fill those missing params.
force_init : bool
If ``True``, will force re-initialize even if already initialized.
allow_extra : boolean, optional
Whether allow extra parameters that are not needed by symbol.
If this is True, no error will be thrown when arg_params or aux_params
contain extra parameters that is not needed by the executor.
"""
pass
def update(self):
"""Updates parameters according to the installed optimizer and the gradients computed
in the previous forward-backward batch. Currently we do nothing here. Subclass should
override this method if contains parameters.
"""
pass
def update_metric(self, eval_metric, labels):
"""Evaluates and accumulates evaluation metric on outputs of the last forward computation.
Subclass should override this method if needed.
Parameters
----------
eval_metric : EvalMetric
labels : list of NDArray
Typically ``data_batch.label``.
"""
if self._label_shapes is None:
# since we do not need labels, we are probably not a module with a loss
# function or predictions, so just ignore this call
return
# by default we expect our outputs are some scores that could be evaluated
eval_metric.update(labels, self.get_outputs())
################################################################################
# module setup
################################################################################
def bind(self, data_shapes, label_shapes=None, for_training=True,
inputs_need_grad=False, force_rebind=False, shared_module=None,
grad_req='write'):
"""Binds the symbols to construct executors. This is necessary before one
can perform computation with the module.
Parameters
----------
data_shapes : list of (str, tuple)
Typically is ``data_iter.provide_data``.
label_shapes : list of (str, tuple)
Typically is ``data_iter.provide_label``.
for_training : bool
Default is ``True``. Whether the executors should be bind for training.
inputs_need_grad : bool
Default is ``False``. Whether the gradients to the input data need to be computed.
Typically this is not needed. But this might be needed when implementing composition
of modules.
force_rebind : bool
Default is ``False``. This function does nothing if the executors are already
bound. But with this ``True``, the executors will be forced to rebind.
shared_module : Module
Default is ``None``. This is used in bucketing. When not ``None``, the shared module
essentially corresponds to a different bucket -- a module with different symbol
but with the same sets of parameters (e.g. unrolled RNNs with different lengths).
grad_req : str, list of str, dict of str to str
Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
(default to 'write').
Can be specified globally (str) or for each argument (list, dict).
"""
if self.binded and not force_rebind:
self.logger.warning('Already bound, ignoring bind()')
return
assert grad_req == 'write', "Python module only support write gradient"
self.for_training = for_training
self.inputs_need_grad = inputs_need_grad
assert len(data_shapes) == len(self._data_names)
assert [x[0] for x in data_shapes] == self._data_names
self._data_shapes = data_shapes
self._label_shapes = label_shapes
if label_shapes is not None:
assert self._label_names is not None
assert len(self._label_names) == len(label_shapes)
assert [x[0] for x in label_shapes] == self._label_names
self._output_shapes = self._compute_output_shapes()
def _compute_output_shapes(self):
"""The subclass should implement this method to compute the shape of
outputs. This method can assume that the ``data_shapes`` and ``label_shapes``
are already initialized.
"""
raise NotImplementedError()
def init_optimizer(self, kvstore='local', optimizer='sgd',
optimizer_params=(('learning_rate', 0.01),), force_init=False):
"""Installs and initializes optimizers. By default we do nothing. Subclass should
override this method if needed.
Parameters
----------
kvstore : str or KVStore
Default `'local'`.
optimizer : str or Optimizer
Default `'sgd'`
optimizer_params : dict
Default `(('learning_rate', 0.01),)`. The default value is not a dictionary,
just to avoid pylint warning of dangerous default values.
force_init : bool
Default `False`, indicating whether we should force re-initializing the
optimizer in the case an optimizer is already installed.
"""
pass
class PythonLossModule(PythonModule):
"""A convenient module class that implements many of the module APIs as
empty functions.
Parameters
----------
name : str
Names of the module. The outputs will be named `[name + '_output']`.
data_names : list of str
Defaults to ``['data']``. Names of the data expected by this module.
Should be a list of only one name.
label_names : list of str
Default ``['softmax_label']``. Names of the labels expected by the module.
Should be a list of only one name.
grad_func : function
Optional. If not ``None``, should be a function that takes `scores`
and `labels`, both of type `NDArray`, and return the gradients with
respect to the scores according to this loss function. The return
value could be a numpy array or an `NDArray`.
"""
def __init__(self, name='pyloss', data_names=('data',), label_names=('softmax_label',),
logger=logging, grad_func=None):
super(PythonLossModule, self).__init__(data_names, label_names,
[name + '_output'], logger=logger)
self._name = name
assert len(data_names) == 1
assert len(label_names) == 1
self._scores = None
self._labels = None
self._scores_grad = None
if grad_func is not None:
assert callable(grad_func)
self._grad_func = grad_func
def _compute_output_shapes(self):
"""Computes the shapes of outputs. As a loss module with outputs, we simply
output whatever we receive as inputs (i.e. the scores).
"""
return [(self._name + '_output', self._data_shapes[0][1])]
def forward(self, data_batch, is_train=None):
"""Forward computation. Here we do nothing but to keep a reference to
the scores and the labels so that we can do backward computation.
Parameters
----------
data_batch : DataBatch
Could be anything with similar API implemented.
is_train : bool
Default is ``None``, which means `is_train` takes the value of ``self.for_training``.
"""
self._scores = data_batch.data[0]
if is_train is None:
is_train = self.for_training
if is_train:
self._labels = data_batch.label[0]
def get_outputs(self, merge_multi_context=True):
"""Gets outputs of the previous forward computation. As a output loss module,
we treat the inputs to this module as scores, and simply return them.
Parameters
----------
merge_multi_context : bool
Should always be ``True``, because we do not use multiple contexts for computing.
"""
assert merge_multi_context is True
return [self._scores]
def backward(self, out_grads=None):
"""Backward computation.
Parameters
----------
out_grads : NDArray or list of NDArray, optional
Gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs that are not a loss function.
"""
assert out_grads is None, 'For a loss module, out_grads should be None'
assert self.for_training
self._backward_impl()
def _backward_impl(self):
"""Actual implementation of the backward computation. The computation
should take ``self._scores`` and ``self._labels`` and then compute the
gradients with respect to the scores, store it as an `NDArray` in
``self._scores_grad``.
Instead of defining a subclass and overriding this function,
a more convenient way is to pass in a `grad_func` when constructing
the module object. Then it will be called to compute the gradients.
"""
if self._grad_func is not None:
grad = self._grad_func(self._scores, self._labels)
if not isinstance(grad, nd.NDArray):
grad = nd.array(grad)
self._scores_grad = grad
else:
raise NotImplementedError()
def get_input_grads(self, merge_multi_context=True):
"""Gets the gradients to the inputs, computed in the previous backward computation.
Parameters
----------
merge_multi_context : bool
Should always be ``True`` because we do not use multiple context for computation.
"""
assert merge_multi_context is True
return [self._scores_grad]
def install_monitor(self, mon):
"""Installs monitor on all executors."""
raise NotImplementedError()
| apache-2.0 |
SpeedBienal/app-bienal | Bienal/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py | 1869 | 1247 | # Copyright 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A clone of the default copy.deepcopy that doesn't handle cyclic
structures or complex types except for dicts and lists. This is
because gyp copies so large structure that small copy overhead ends up
taking seconds in a project the size of Chromium."""
class Error(Exception):
pass
__all__ = ["Error", "deepcopy"]
def deepcopy(x):
"""Deep copy operation on gyp objects such as strings, ints, dicts
and lists. More than twice as fast as copy.deepcopy but much less
generic."""
try:
return _deepcopy_dispatch[type(x)](x)
except KeyError:
raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
'or expand simple_copy support.' % type(x))
_deepcopy_dispatch = d = {}
def _deepcopy_atomic(x):
return x
for x in (type(None), int, long, float,
bool, str, unicode, type):
d[x] = _deepcopy_atomic
def _deepcopy_list(x):
return [deepcopy(a) for a in x]
d[list] = _deepcopy_list
def _deepcopy_dict(x):
y = {}
for key, value in x.iteritems():
y[deepcopy(key)] = deepcopy(value)
return y
d[dict] = _deepcopy_dict
del d
| gpl-3.0 |
dmordom/nipype | nipype/interfaces/fsl/tests/test_auto_FNIRT.py | 5 | 3834 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.fsl.preprocess import FNIRT
def test_FNIRT_inputs():
input_map = dict(affine_file=dict(argstr='--aff=%s',
),
apply_inmask=dict(argstr='--applyinmask=%s',
sep=',',
xor=['skip_inmask'],
),
apply_intensity_mapping=dict(argstr='--estint=%s',
sep=',',
xor=['skip_intensity_mapping'],
),
apply_refmask=dict(argstr='--applyrefmask=%s',
sep=',',
xor=['skip_refmask'],
),
args=dict(argstr='%s',
),
bias_regularization_lambda=dict(argstr='--biaslambda=%f',
),
biasfield_resolution=dict(argstr='--biasres=%d,%d,%d',
),
config_file=dict(argstr='--config=%s',
),
derive_from_ref=dict(argstr='--refderiv',
),
environ=dict(nohash=True,
usedefault=True,
),
field_file=dict(argstr='--fout=%s',
hash_files=False,
),
fieldcoeff_file=dict(argstr='--cout=%s',
),
hessian_precision=dict(argstr='--numprec=%s',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='--in=%s',
mandatory=True,
),
in_fwhm=dict(argstr='--infwhm=%s',
sep=',',
),
in_intensitymap_file=dict(argstr='--intin=%s',
),
inmask_file=dict(argstr='--inmask=%s',
),
inmask_val=dict(argstr='--impinval=%f',
),
intensity_mapping_model=dict(argstr='--intmod=%s',
),
intensity_mapping_order=dict(argstr='--intorder=%d',
),
inwarp_file=dict(argstr='--inwarp=%s',
),
jacobian_file=dict(argstr='--jout=%s',
hash_files=False,
),
jacobian_range=dict(argstr='--jacrange=%f,%f',
),
log_file=dict(argstr='--logout=%s',
genfile=True,
hash_files=False,
),
max_nonlin_iter=dict(argstr='--miter=%s',
sep=',',
),
modulatedref_file=dict(argstr='--refout=%s',
hash_files=False,
),
out_intensitymap_file=dict(argstr='--intout=%s',
hash_files=False,
),
output_type=dict(),
ref_file=dict(argstr='--ref=%s',
mandatory=True,
),
ref_fwhm=dict(argstr='--reffwhm=%s',
sep=',',
),
refmask_file=dict(argstr='--refmask=%s',
),
refmask_val=dict(argstr='--imprefval=%f',
),
regularization_lambda=dict(argstr='--lambda=%s',
sep=',',
),
regularization_model=dict(argstr='--regmod=%s',
),
skip_implicit_in_masking=dict(argstr='--impinm=0',
),
skip_implicit_ref_masking=dict(argstr='--imprefm=0',
),
skip_inmask=dict(argstr='--applyinmask=0',
xor=['apply_inmask'],
),
skip_intensity_mapping=dict(argstr='--estint=0',
xor=['apply_intensity_mapping'],
),
skip_lambda_ssq=dict(argstr='--ssqlambda=0',
),
skip_refmask=dict(argstr='--applyrefmask=0',
xor=['apply_refmask'],
),
spline_order=dict(argstr='--splineorder=%d',
),
subsampling_scheme=dict(argstr='--subsamp=%s',
sep=',',
),
terminal_output=dict(mandatory=True,
nohash=True,
),
warp_resolution=dict(argstr='--warpres=%d,%d,%d',
),
warped_file=dict(argstr='--iout=%s',
genfile=True,
hash_files=False,
),
)
inputs = FNIRT.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_FNIRT_outputs():
output_map = dict(field_file=dict(),
fieldcoeff_file=dict(),
jacobian_file=dict(),
log_file=dict(),
modulatedref_file=dict(),
out_intensitymap_file=dict(),
warped_file=dict(),
)
outputs = FNIRT.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
dcramer/django-compositepks | tests/modeltests/many_to_one_null/models.py | 30 | 3262 | """
16. Many-to-one relationships that can be null
To define a many-to-one relationship that can have a null foreign key, use
``ForeignKey()`` with ``null=True`` .
"""
from django.db import models
class Reporter(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Article(models.Model):
headline = models.CharField(max_length=100)
reporter = models.ForeignKey(Reporter, null=True)
class Meta:
ordering = ('headline',)
def __unicode__(self):
return self.headline
__test__ = {'API_TESTS':"""
# Create a Reporter.
>>> r = Reporter(name='John Smith')
>>> r.save()
# Create an Article.
>>> a = Article(headline="First", reporter=r)
>>> a.save()
>>> a.reporter.id
1
>>> a.reporter
<Reporter: John Smith>
# Article objects have access to their related Reporter objects.
>>> r = a.reporter
# Create an Article via the Reporter object.
>>> a2 = r.article_set.create(headline="Second")
>>> a2
<Article: Second>
>>> a2.reporter.id
1
# Reporter objects have access to their related Article objects.
>>> r.article_set.all()
[<Article: First>, <Article: Second>]
>>> r.article_set.filter(headline__startswith='Fir')
[<Article: First>]
>>> r.article_set.count()
2
# Create an Article with no Reporter by passing "reporter=None".
>>> a3 = Article(headline="Third", reporter=None)
>>> a3.save()
>>> a3.id
3
>>> print a3.reporter
None
# Need to reget a3 to refresh the cache
>>> a3 = Article.objects.get(pk=3)
>>> print a3.reporter.id
Traceback (most recent call last):
...
AttributeError: 'NoneType' object has no attribute 'id'
# Accessing an article's 'reporter' attribute returns None
# if the reporter is set to None.
>>> print a3.reporter
None
# To retrieve the articles with no reporters set, use "reporter__isnull=True".
>>> Article.objects.filter(reporter__isnull=True)
[<Article: Third>]
# We can achieve the same thing by filtering for the case where the reporter is
# None.
>>> Article.objects.filter(reporter=None)
[<Article: Third>]
# Set the reporter for the Third article
>>> r.article_set.add(a3)
>>> r.article_set.all()
[<Article: First>, <Article: Second>, <Article: Third>]
# Remove an article from the set, and check that it was removed.
>>> r.article_set.remove(a3)
>>> r.article_set.all()
[<Article: First>, <Article: Second>]
>>> Article.objects.filter(reporter__isnull=True)
[<Article: Third>]
# Create another article and reporter
>>> r2 = Reporter(name='Paul Jones')
>>> r2.save()
>>> a4 = r2.article_set.create(headline='Fourth')
>>> r2.article_set.all()
[<Article: Fourth>]
# Try to remove a4 from a set it does not belong to
>>> r.article_set.remove(a4)
Traceback (most recent call last):
...
DoesNotExist: <Article: Fourth> is not related to <Reporter: John Smith>.
>>> r2.article_set.all()
[<Article: Fourth>]
# Use descriptor assignment to allocate ForeignKey. Null is legal, so
# existing members of set that are not in the assignment set are set null
>>> r2.article_set = [a2, a3]
>>> r2.article_set.all()
[<Article: Second>, <Article: Third>]
# Clear the rest of the set
>>> r.article_set.clear()
>>> r.article_set.all()
[]
>>> Article.objects.filter(reporter__isnull=True)
[<Article: First>, <Article: Fourth>]
"""}
| bsd-3-clause |
piotroxp/scibibscan | scib/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py | 450 | 9668 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif command in 'ABCD':
params = (1,)
return params
def call_win32(self, command, params):
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in 'J':
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
elif command in 'K':
winterm.erase_line(params[0], on_stderr=self.on_stderr)
elif command in 'Hf': # cursor position - absolute
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
elif command in 'ABCD': # cursor position - relative
n = params[0]
# A - up, B - down, C - forward, D - back
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
def convert_osc(self, text):
for match in self.ANSI_OSC_RE.finditer(text):
start, end = match.span()
text = text[:start] + text[end:]
paramstring, command = match.groups()
if command in '\x07': # \x07 = BEL
params = paramstring.split(";")
# 0 - change title and icon (we will only change title)
# 1 - change icon (we don't support this)
# 2 - change title
if params[0] in '02':
winterm.set_title(params[1])
return text
| mit |
deklungel/iRulez | old/modules/libraries/mysql/connector/conversion.py | 15 | 19710 | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Converting MySQL and Python types
"""
import datetime
import time
from decimal import Decimal
from .constants import FieldType, FieldFlag, CharacterSet
from .catch23 import PY2, NUMERIC_TYPES, struct_unpack
from .custom_types import HexLiteral
class MySQLConverterBase(object):
"""Base class for conversion classes
All class dealing with converting to and from MySQL data types must
be a subclass of this class.
"""
def __init__(self, charset='utf8', use_unicode=True):
self.python_types = None
self.mysql_types = None
self.charset = None
self.charset_id = 0
self.use_unicode = None
self.set_charset(charset)
self.set_unicode(use_unicode)
self._cache_field_types = {}
def set_charset(self, charset):
"""Set character set"""
if charset == 'utf8mb4':
charset = 'utf8'
if charset is not None:
self.charset = charset
else:
# default to utf8
self.charset = 'utf8'
self.charset_id = CharacterSet.get_charset_info(self.charset)[0]
def set_unicode(self, value=True):
"""Set whether to use Unicode"""
self.use_unicode = value
def to_mysql(self, value):
"""Convert Python data type to MySQL"""
type_name = value.__class__.__name__.lower()
try:
return getattr(self, "_{0}_to_mysql".format(type_name))(value)
except AttributeError:
return value
def to_python(self, vtype, value):
"""Convert MySQL data type to Python"""
if (value == b'\x00' or value is None) and vtype[1] != FieldType.BIT:
# Don't go further when we hit a NULL value
return None
if not self._cache_field_types:
self._cache_field_types = {}
for name, info in FieldType.desc.items():
try:
self._cache_field_types[info[0]] = getattr(
self, '_{0}_to_python'.format(name))
except AttributeError:
# We ignore field types which has no method
pass
try:
return self._cache_field_types[vtype[1]](value, vtype)
except KeyError:
return value
def escape(self, buf):
"""Escape buffer for sending to MySQL"""
return buf
def quote(self, buf):
"""Quote buffer for sending to MySQL"""
return str(buf)
class MySQLConverter(MySQLConverterBase):
"""Default conversion class for MySQL Connector/Python.
o escape method: for escaping values send to MySQL
o quoting method: for quoting values send to MySQL in statements
o conversion mapping: maps Python and MySQL data types to
function for converting them.
Whenever one needs to convert values differently, a converter_class
argument can be given while instantiating a new connection like
cnx.connect(converter_class=CustomMySQLConverterClass).
"""
def __init__(self, charset=None, use_unicode=True):
MySQLConverterBase.__init__(self, charset, use_unicode)
self._cache_field_types = {}
def escape(self, value):
"""
Escapes special characters as they are expected to by when MySQL
receives them.
As found in MySQL source mysys/charset.c
Returns the value if not a string, or the escaped string.
"""
if value is None:
return value
elif isinstance(value, NUMERIC_TYPES):
return value
if isinstance(value, (bytes, bytearray)):
value = value.replace(b'\\', b'\\\\')
value = value.replace(b'\n', b'\\n')
value = value.replace(b'\r', b'\\r')
value = value.replace(b'\047', b'\134\047') # single quotes
value = value.replace(b'\042', b'\134\042') # double quotes
value = value.replace(b'\032', b'\134\032') # for Win32
else:
value = value.replace('\\', '\\\\')
value = value.replace('\n', '\\n')
value = value.replace('\r', '\\r')
value = value.replace('\047', '\134\047') # single quotes
value = value.replace('\042', '\134\042') # double quotes
value = value.replace('\032', '\134\032') # for Win32
return value
def quote(self, buf):
"""
Quote the parameters for commands. General rules:
o numbers are returns as bytes using ascii codec
o None is returned as bytearray(b'NULL')
o Everything else is single quoted '<buf>'
Returns a bytearray object.
"""
if isinstance(buf, NUMERIC_TYPES):
if PY2:
if isinstance(buf, float):
return repr(buf)
else:
return str(buf)
else:
return str(buf).encode('ascii')
elif isinstance(buf, type(None)):
return bytearray(b"NULL")
else:
return bytearray(b"'" + buf + b"'")
def to_mysql(self, value):
"""Convert Python data type to MySQL"""
type_name = value.__class__.__name__.lower()
try:
return getattr(self, "_{0}_to_mysql".format(type_name))(value)
except AttributeError:
raise TypeError("Python '{0}' cannot be converted to a "
"MySQL type".format(type_name))
def to_python(self, vtype, value):
"""Convert MySQL data type to Python"""
if value == 0 and vtype[1] != FieldType.BIT: # \x00
# Don't go further when we hit a NULL value
return None
if value is None:
return None
if not self._cache_field_types:
self._cache_field_types = {}
for name, info in FieldType.desc.items():
try:
self._cache_field_types[info[0]] = getattr(
self, '_{0}_to_python'.format(name))
except AttributeError:
# We ignore field types which has no method
pass
try:
return self._cache_field_types[vtype[1]](value, vtype)
except KeyError:
# If one type is not defined, we just return the value as str
try:
return value.decode('utf-8')
except UnicodeDecodeError:
return value
except ValueError as err:
raise ValueError("%s (field %s)" % (err, vtype[0]))
except TypeError as err:
raise TypeError("%s (field %s)" % (err, vtype[0]))
except:
raise
def _int_to_mysql(self, value):
"""Convert value to int"""
return int(value)
def _long_to_mysql(self, value):
"""Convert value to int"""
return int(value)
def _float_to_mysql(self, value):
"""Convert value to float"""
return float(value)
def _str_to_mysql(self, value):
"""Convert value to string"""
if PY2:
return str(value)
return self._unicode_to_mysql(value)
def _unicode_to_mysql(self, value):
"""Convert unicode"""
charset = self.charset
charset_id = self.charset_id
if charset == 'binary':
charset = 'utf8'
charset_id = CharacterSet.get_charset_info(charset)[0]
encoded = value.encode(charset)
if charset_id in CharacterSet.slash_charsets:
if b'\x5c' in encoded:
return HexLiteral(value, charset)
return encoded
def _bytes_to_mysql(self, value):
"""Convert value to bytes"""
return value
def _bytearray_to_mysql(self, value):
"""Convert value to bytes"""
return str(value)
def _bool_to_mysql(self, value):
"""Convert value to boolean"""
if value:
return 1
else:
return 0
def _nonetype_to_mysql(self, value):
"""
This would return what None would be in MySQL, but instead we
leave it None and return it right away. The actual conversion
from None to NULL happens in the quoting functionality.
Return None.
"""
return None
def _datetime_to_mysql(self, value):
"""
Converts a datetime instance to a string suitable for MySQL.
The returned string has format: %Y-%m-%d %H:%M:%S[.%f]
If the instance isn't a datetime.datetime type, it return None.
Returns a bytes.
"""
if value.microsecond:
fmt = '{0:d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:06d}'
return fmt.format(
value.year, value.month, value.day,
value.hour, value.minute, value.second,
value.microsecond).encode('ascii')
fmt = '{0:d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'
return fmt.format(
value.year, value.month, value.day,
value.hour, value.minute, value.second).encode('ascii')
def _date_to_mysql(self, value):
"""
Converts a date instance to a string suitable for MySQL.
The returned string has format: %Y-%m-%d
If the instance isn't a datetime.date type, it return None.
Returns a bytes.
"""
return '{0:d}-{1:02d}-{2:02d}'.format(value.year, value.month,
value.day).encode('ascii')
def _time_to_mysql(self, value):
"""
Converts a time instance to a string suitable for MySQL.
The returned string has format: %H:%M:%S[.%f]
If the instance isn't a datetime.time type, it return None.
Returns a bytes.
"""
if value.microsecond:
return value.strftime('%H:%M:%S.%f').encode('ascii')
return value.strftime('%H:%M:%S').encode('ascii')
def _struct_time_to_mysql(self, value):
"""
Converts a time.struct_time sequence to a string suitable
for MySQL.
The returned string has format: %Y-%m-%d %H:%M:%S
Returns a bytes or None when not valid.
"""
return time.strftime('%Y-%m-%d %H:%M:%S', value).encode('ascii')
def _timedelta_to_mysql(self, value):
"""
Converts a timedelta instance to a string suitable for MySQL.
The returned string has format: %H:%M:%S
Returns a bytes.
"""
seconds = abs(value.days * 86400 + value.seconds)
if value.microseconds:
fmt = '{0:02d}:{1:02d}:{2:02d}.{3:06d}'
if value.days < 0:
mcs = 1000000 - value.microseconds
seconds -= 1
else:
mcs = value.microseconds
else:
fmt = '{0:02d}:{1:02d}:{2:02d}'
if value.days < 0:
fmt = '-' + fmt
(hours, remainder) = divmod(seconds, 3600)
(mins, secs) = divmod(remainder, 60)
if value.microseconds:
result = fmt.format(hours, mins, secs, mcs)
else:
result = fmt.format(hours, mins, secs)
if PY2:
return result
else:
return result.encode('ascii')
def _decimal_to_mysql(self, value):
"""
Converts a decimal.Decimal instance to a string suitable for
MySQL.
Returns a bytes or None when not valid.
"""
if isinstance(value, Decimal):
return str(value).encode('ascii')
return None
def row_to_python(self, row, fields):
"""Convert a MySQL text result row to Python types
The row argument is a sequence containing text result returned
by a MySQL server. Each value of the row is converted to the
using the field type information in the fields argument.
Returns a tuple.
"""
i = 0
result = [None]*len(fields)
if not self._cache_field_types:
self._cache_field_types = {}
for name, info in FieldType.desc.items():
try:
self._cache_field_types[info[0]] = getattr(
self, '_{0}_to_python'.format(name))
except AttributeError:
# We ignore field types which has no method
pass
for field in fields:
field_type = field[1]
if (row[i] == 0 and field_type != FieldType.BIT) or row[i] is None:
# Don't convert NULL value
i += 1
continue
try:
result[i] = self._cache_field_types[field_type](row[i], field)
except KeyError:
# If one type is not defined, we just return the value as str
try:
result[i] = row[i].decode('utf-8')
except UnicodeDecodeError:
result[i] = row[i]
except (ValueError, TypeError) as err:
err.message = "{0} (field {1})".format(str(err), field[0])
raise
i += 1
return tuple(result)
def _FLOAT_to_python(self, value, desc=None): # pylint: disable=C0103
"""
Returns value as float type.
"""
return float(value)
_DOUBLE_to_python = _FLOAT_to_python
def _INT_to_python(self, value, desc=None): # pylint: disable=C0103
"""
Returns value as int type.
"""
return int(value)
_TINY_to_python = _INT_to_python
_SHORT_to_python = _INT_to_python
_INT24_to_python = _INT_to_python
_LONG_to_python = _INT_to_python
_LONGLONG_to_python = _INT_to_python
def _DECIMAL_to_python(self, value, desc=None): # pylint: disable=C0103
"""
Returns value as a decimal.Decimal.
"""
val = value.decode(self.charset)
return Decimal(val)
_NEWDECIMAL_to_python = _DECIMAL_to_python
def _str(self, value, desc=None):
"""
Returns value as str type.
"""
return str(value)
def _BIT_to_python(self, value, dsc=None): # pylint: disable=C0103
"""Returns BIT columntype as integer"""
int_val = value
if len(int_val) < 8:
int_val = b'\x00' * (8 - len(int_val)) + int_val
return struct_unpack('>Q', int_val)[0]
def _DATE_to_python(self, value, dsc=None): # pylint: disable=C0103
"""
Returns DATE column type as datetime.date type.
"""
try:
parts = value.split(b'-')
return datetime.date(int(parts[0]), int(parts[1]), int(parts[2]))
except ValueError:
return None
_NEWDATE_to_python = _DATE_to_python
def _TIME_to_python(self, value, dsc=None): # pylint: disable=C0103
"""
Returns TIME column type as datetime.time type.
"""
time_val = None
try:
(hms, mcs) = value.split(b'.')
mcs = int(mcs.ljust(6, b'0'))
except ValueError:
hms = value
mcs = 0
try:
(hours, mins, secs) = [int(d) for d in hms.split(b':')]
if value[0] == 45 or value[0] == '-': # if PY3 or PY2
mins, secs, mcs = -mins, -secs, -mcs
time_val = datetime.timedelta(hours=hours, minutes=mins,
seconds=secs, microseconds=mcs)
except ValueError:
raise ValueError(
"Could not convert {0} to python datetime.timedelta".format(
value))
else:
return time_val
def _DATETIME_to_python(self, value, dsc=None): # pylint: disable=C0103
"""
Returns DATETIME column type as datetime.datetime type.
"""
datetime_val = None
try:
(date_, time_) = value.split(b' ')
if len(time_) > 8:
(hms, mcs) = time_.split(b'.')
mcs = int(mcs.ljust(6, b'0'))
else:
hms = time_
mcs = 0
dtval = [int(i) for i in date_.split(b'-')] + \
[int(i) for i in hms.split(b':')] + [mcs, ]
datetime_val = datetime.datetime(*dtval)
except ValueError:
datetime_val = None
return datetime_val
_TIMESTAMP_to_python = _DATETIME_to_python
def _YEAR_to_python(self, value, desc=None): # pylint: disable=C0103
"""Returns YEAR column type as integer"""
try:
year = int(value)
except ValueError:
raise ValueError("Failed converting YEAR to int (%s)" % value)
return year
def _SET_to_python(self, value, dsc=None): # pylint: disable=C0103
"""Returns SET column type as set
Actually, MySQL protocol sees a SET as a string type field. So this
code isn't called directly, but used by STRING_to_python() method.
Returns SET column type as a set.
"""
set_type = None
val = value.decode(self.charset)
if not val:
return set()
try:
set_type = set(val.split(','))
except ValueError:
raise ValueError("Could not convert set %s to a sequence." % value)
return set_type
def _STRING_to_python(self, value, dsc=None): # pylint: disable=C0103
"""
Note that a SET is a string too, but using the FieldFlag we can see
whether we have to split it.
Returns string typed columns as string type.
"""
if dsc is not None:
# Check if we deal with a SET
if dsc[7] & FieldFlag.SET:
return self._SET_to_python(value, dsc)
if dsc[7] & FieldFlag.BINARY:
return value
if self.charset == 'binary':
return value
if isinstance(value, (bytes, bytearray)) and self.use_unicode:
return value.decode(self.charset)
return value
_VAR_STRING_to_python = _STRING_to_python
def _BLOB_to_python(self, value, dsc=None): # pylint: disable=C0103
"""Convert BLOB data type to Python"""
if dsc is not None:
if dsc[7] & FieldFlag.BINARY:
if PY2:
return value
else:
return bytes(value)
return self._STRING_to_python(value, dsc)
_LONG_BLOB_to_python = _BLOB_to_python
_MEDIUM_BLOB_to_python = _BLOB_to_python
_TINY_BLOB_to_python = _BLOB_to_python
| mit |
brockwhittaker/zulip | zerver/webhooks/beanstalk/view.py | 3 | 3606 | # Webhooks for external integrations.
from django.http import HttpRequest, HttpResponse
from zerver.models import get_client, UserProfile
from zerver.lib.actions import check_send_stream_message
from zerver.lib.response import json_success
from zerver.lib.validator import check_dict
from zerver.decorator import REQ, has_request_variables, authenticated_rest_api_view
import base64
from functools import wraps
from zerver.webhooks.github.view import build_message_from_gitlog
from typing import Any, Callable, Dict, TypeVar, Optional, Text
from zerver.lib.str_utils import force_str, force_bytes
ViewFuncT = TypeVar('ViewFuncT', bound=Callable[..., HttpResponse])
# Beanstalk's web hook UI rejects url with a @ in the username section of a url
# So we ask the user to replace them with %40
# We manually fix the username here before passing it along to @authenticated_rest_api_view
def beanstalk_decoder(view_func):
# type: (ViewFuncT) -> ViewFuncT
@wraps(view_func)
def _wrapped_view_func(request, *args, **kwargs):
# type: (HttpRequest, *Any, **Any) -> HttpResponse
try:
auth_type, encoded_value = request.META['HTTP_AUTHORIZATION'].split() # type: str, str
if auth_type.lower() == "basic":
email, api_key = base64.b64decode(force_bytes(encoded_value)).decode('utf-8').split(":")
email = email.replace('%40', '@')
credentials = u"%s:%s" % (email, api_key)
encoded_credentials = force_str(base64.b64encode(credentials.encode('utf-8')))
request.META['HTTP_AUTHORIZATION'] = "Basic " + encoded_credentials
except Exception:
pass
return view_func(request, *args, **kwargs)
return _wrapped_view_func # type: ignore # https://github.com/python/mypy/issues/1927
@beanstalk_decoder
@authenticated_rest_api_view(is_webhook=True)
@has_request_variables
def api_beanstalk_webhook(request, user_profile,
payload=REQ(validator=check_dict([])),
branches=REQ(default=None)):
# type: (HttpRequest, UserProfile, Dict[str, Any], Optional[Text]) -> HttpResponse
# Beanstalk supports both SVN and git repositories
# We distinguish between the two by checking for a
# 'uri' key that is only present for git repos
git_repo = 'uri' in payload
if git_repo:
if branches is not None and branches.find(payload['branch']) == -1:
return json_success()
# To get a linkable url,
for commit in payload['commits']:
commit['author'] = {'username': commit['author']['name']}
subject, content = build_message_from_gitlog(user_profile, payload['repository']['name'],
payload['ref'], payload['commits'],
payload['before'], payload['after'],
payload['repository']['url'],
payload['pusher_name'])
else:
author = payload.get('author_full_name')
url = payload.get('changeset_url')
revision = payload.get('revision')
(short_commit_msg, _, _) = payload['message'].partition("\n")
subject = "svn r%s" % (revision,)
content = "%s pushed [revision %s](%s):\n\n> %s" % (author, revision, url, short_commit_msg)
check_send_stream_message(user_profile, get_client("ZulipBeanstalkWebhook"),
"commits", subject, content)
return json_success()
| apache-2.0 |
UKTradeInvestment/export-wins-data | mi/views/sector_views.py | 1 | 13377 | import operator
from collections import defaultdict
from itertools import groupby
from operator import itemgetter
from functools import reduce
from django.db.models import Q
from mi.models import (
HVCGroup,
SectorTeam,
Target,
)
from mi.utils import sort_campaigns_by
from mi.views.base_view import BaseWinMIView, TopNonHvcMixin
def get_campaigns_from_group(g: HVCGroup, **kwargs):
return g.campaign_ids
def get_campaigns_from_group_for_year(g: HVCGroup, fin_year=None):
return g.fin_year_campaign_ids(fin_year)
class BaseSectorMIView(BaseWinMIView):
""" Abstract Base for other Sector-related MI endpoints to inherit from """
def _hvc_groups_for_fin_year(self):
""" extracts hvc groups from targets for the given financial year """
return HVCGroup.objects.filter(targets__financial_year=self.fin_year).distinct()
def _hvc_groups_for_team(self, team):
""" `HVCGroup` objects for a `SectorTeam` """
return self._hvc_groups_for_fin_year().filter(sector_team=team)
def _sector_teams_for_fin_year(self):
""" Returns sector teams based on hvc groups from Targets for the given financial year """
return SectorTeam.objects.filter(hvc_groups__targets__financial_year=self.fin_year).distinct()
def _get_team(self, team_id):
""" Get SectorTeam object or False if invalid ID """
try:
return SectorTeam.objects.get(id=int(team_id))
except SectorTeam.DoesNotExist:
return False
def _team_wins_breakdown(self, sector_team):
""" Breakdown of team's HVC, non-HVC and non-export Wins """
return self._breakdowns(
hvc_wins=self._get_hvc_wins(sector_team),
non_hvc_wins=self._get_non_hvc_wins(sector_team)
)
def _get_group_fn(self, group, fn):
"""
Overriding default group.campaign_ids, to add a hack to cater for
cross FY team changes.
"""
campaign_ids = fn(group, fin_year=self.fin_year)
if group.name == "Consumer and Retail":
other_group = HVCGroup.objects.get(name="Consumer Goods & Retail")
campaign_ids.extend(fn(other_group, fin_year=self.fin_year))
elif group.name == "Creative":
other_group = HVCGroup.objects.get(name="Creative Industries")
campaign_ids.extend(fn(other_group, fin_year=self.fin_year))
elif group.id == 34: # Sports Economy has same name across
other_group = HVCGroup.objects.get(id=27)
campaign_ids.extend(fn(other_group, fin_year=self.fin_year))
elif group.id == 30:
fin_group = HVCGroup.objects.get(name="Financial Services")
campaign_ids.extend(fn(fin_group, fin_year=self.fin_year))
pro_group = HVCGroup.objects.get(name="Professional Services")
campaign_ids.extend(fn(pro_group, fin_year=self.fin_year))
elif group.id == 29:
fin_group = HVCGroup.objects.get(name="Digital Economy")
campaign_ids.extend(fn(fin_group, fin_year=self.fin_year))
return campaign_ids
def _get_group_campaigns_for_year(self, group):
return self._get_group_fn(group, get_campaigns_from_group_for_year)
def _get_group_campaigns(self, group):
return self._get_group_fn(group, get_campaigns_from_group)
def _get_group_wins(self, group):
""" HVC wins of the HVC Group, for given `FinancialYear` """
campaigns_for_year = self._get_group_campaigns_for_year(group)
if not campaigns_for_year:
return self._hvc_wins().none()
group_hvcs = [hvc[:4]
for hvc in campaigns_for_year]
filter = reduce(
operator.or_, [Q(hvc__startswith=hvc) for hvc in group_hvcs])
return self._hvc_wins().filter(filter)
def _get_team_campaigns(self, team):
"""
Overriding default team.campaign_ids, to add a hack to cater for cross FY team changes
"""
# hack for Consumer & Creative
campaign_ids = team.campaign_ids
if team.name == "Creative, Consumer and Sports":
other_team = SectorTeam.objects.get(name="Consumer & Creative")
campaign_ids.extend(other_team.campaign_ids)
return campaign_ids
def _get_hvc_wins(self, team):
""" HVC wins alone for the `SectorTeam`
A `Win` is considered HVC for this team, when it falls under a Campaign that belongs to this `SectorTeam`
"""
return self._hvc_wins().filter(hvc__in=self._get_team_campaigns(team))
def _get_non_hvc_wins(self, team):
""" non-HVC wins alone for the `SectorTeam`
A `Win` is a non-HVC, if no HVC was mentioned while recording it
but it belongs to a CDMS Sector that is within this `SectorTeam`s range
"""
return self._non_hvc_wins().filter(sector__in=team.sector_ids)
def _get_all_wins(self, sector_team):
""" Get HVC and non-HVC Wins of a Sector Team """
return self._get_hvc_wins(sector_team) | self._get_non_hvc_wins(sector_team)
def _sector_result(self, team):
""" Basic data about sector team - name & hvc's """
return {
'name': team.name,
'avg_time_to_confirm': self._average_confirm_time(win__sector__in=team.sector_ids),
'hvcs': self._hvc_overview(team.fin_year_targets(fin_year=self.fin_year)),
}
class TopNonHvcSectorCountryWinsView(BaseSectorMIView, TopNonHvcMixin):
""" Sector Team non-HVC Win data broken down by country """
entity_name = 'team'
def __init__(self, **kwargs):
self.entity_getter_fn = self._get_team
self.non_hvc_qs_getter_fn = self._get_non_hvc_wins
super().__init__(**kwargs)
class SectorTeamsListView(BaseSectorMIView):
""" Basic information about all Sector Teams """
def _hvc_groups_data(self, team):
""" return sorted list of HVC Groups data for a given Sector Team """
results = [
{
'id': hvc_group.id,
'name': hvc_group.name,
}
for hvc_group in self._hvc_groups_for_team(team)
]
return sorted(results, key=itemgetter('name'))
def get(self, request):
results = [
{
'id': sector_team.id,
'name': sector_team.name,
'hvc_groups': self._hvc_groups_data(sector_team)
}
for sector_team in self._sector_teams_for_fin_year()
]
return self._success(sorted(results, key=itemgetter('name')))
class SectorTeamDetailView(BaseSectorMIView):
""" Sector Team name, targets and win-breakdown """
def get(self, request, team_id):
team = self._get_team(team_id)
if not team:
return self._invalid('team not found')
results = self._sector_result(team)
results['wins'] = self._team_wins_breakdown(team)
return self._success(results)
class SectorTeamMonthsView(BaseSectorMIView):
""" Sector Team name, hvcs and wins broken down by month """
def get(self, request, team_id):
team = self._get_team(team_id)
if not team:
return self._invalid('team not found')
results = self._sector_result(team)
wins = self._get_all_wins(team)
results['months'] = self._month_breakdowns(wins)
return self._success(results)
class SectorTeamCampaignsView(BaseSectorMIView):
""" Sector Team Wins broken down by individual HVC """
def _campaign_breakdowns(self, team):
wins = self._get_hvc_wins(team)
targets = team.fin_year_targets(self.fin_year)
campaign_to_wins = self._group_wins_by_target(wins, targets)
campaigns = [
{
'campaign': campaign.name.split(':')[0],
'campaign_id': campaign.campaign_id,
'totals': self._progress_breakdown(campaign_wins, campaign.target),
}
for campaign, campaign_wins in campaign_to_wins
]
sorted_campaigns = sorted(
campaigns, key=sort_campaigns_by, reverse=True)
return sorted_campaigns
def get(self, request, team_id):
team = self._get_team(team_id)
if not team:
return self._invalid('team not found')
results = self._sector_result(team)
results['campaigns'] = self._campaign_breakdowns(team)
return self._success(results)
class SectorTeamsOverviewView(BaseSectorMIView):
""" Overview of HVCs, targets etc. for each SectorTeam """
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.team_groups = defaultdict(list)
self.team_targets = defaultdict(list)
self.sector_to_wins = defaultdict(list)
self.hvc_to_wins = defaultdict(list)
def _get_cached_hvc_wins(self, campaign_ids):
return [win
for code, wins in self.hvc_to_wins.items() if code in campaign_ids
for win in wins
]
def _get_cached_non_hvc_wins(self, sector_ids):
return [win
for sector, wins in self.sector_to_wins.items() if sector in sector_ids
for win in wins
]
def _sector_obj_data(self, sector_obj, campaign_ids):
""" Get general data from SectorTeam or HVCGroup """
sector_targets = self.team_targets[sector_obj]
total_target = sum([t.target for t in sector_targets])
hvc_wins = self._get_cached_hvc_wins(campaign_ids)
hvc_confirmed, hvc_unconfirmed = self._confirmed_unconfirmed(hvc_wins)
hvc_colours_count = self._colours(hvc_wins, sector_targets)
return {
'id': sector_obj.id,
'name': sector_obj.name,
'values': {
'hvc': {
'current': {
'confirmed': hvc_confirmed,
'unconfirmed': hvc_unconfirmed
},
'target': total_target,
'target_percent': self._overview_target_percentage(hvc_wins, total_target),
},
},
'hvc_performance': hvc_colours_count,
}
def _sector_data(self, sector_team):
""" Calculate overview for a sector team """
team_campaign_ids = self._get_team_campaigns(sector_team)
result = self._sector_obj_data(sector_team, team_campaign_ids)
hvc_wins = self._get_cached_hvc_wins(team_campaign_ids)
non_hvc_wins = self._get_cached_non_hvc_wins(sector_team.sector_ids)
non_hvc_confirmed, non_hvc_unconfirmed = self._confirmed_unconfirmed(
non_hvc_wins)
hvc_confirmed = result['values']['hvc']['current']['confirmed']
hvc_unconfirmed = result['values']['hvc']['current']['unconfirmed']
total_win_percent = self._overview_win_percentages(
hvc_wins, non_hvc_wins)
totals = {
'confirmed': hvc_confirmed + non_hvc_confirmed,
'unconfirmed': hvc_unconfirmed + non_hvc_unconfirmed
}
non_hvc_data = {
'total_win_percent': total_win_percent['non_hvc'],
'current': {
'confirmed': non_hvc_confirmed,
'unconfirmed': non_hvc_unconfirmed
}
}
result['values']['totals'] = totals
result['values']['non_hvc'] = non_hvc_data
result['values']['hvc']['total_win_percent'] = total_win_percent['hvc']
groups = self.team_groups[sector_team]
result['hvc_groups'] = [self._sector_obj_data(
g, self._get_group_campaigns(g)) for g in groups]
return result
def get(self, request):
# cache wins to avoid many queries
hvc_wins, non_hvc_wins = self._wins().hvc(
fin_year=self.fin_year), self._wins().non_hvc(fin_year=self.fin_year)
for win in hvc_wins:
self.hvc_to_wins[win['hvc']].append(win)
for win in non_hvc_wins:
self.sector_to_wins[win['sector']].append(win)
# cache targets
targets = Target.objects.filter(
financial_year=self.fin_year).select_related('hvc_group', 'sector_team')
for target in targets:
self.team_targets[target.sector_team].append(target)
self.team_targets[target.hvc_group].append(target)
# cache groups
for group in self._hvc_groups_for_fin_year():
self.team_groups[group.sector_team].append(group)
sector_team_qs = self._sector_teams_for_fin_year().prefetch_related(
'sectors',
'targets',
'hvc_groups',
'hvc_groups__targets',
)
result = [self._sector_data(team) for team in sector_team_qs]
return self._success(sorted(result, key=itemgetter('name')))
class SectorTeamWinTableView(BaseSectorMIView):
def get(self, request, team_id):
team = self._get_team(team_id)
if not team:
return self._not_found()
results = {
"sector_team": {
"id": team_id,
"name": team.name,
},
"wins": self._win_table_wins(self._get_hvc_wins(team), self._get_non_hvc_wins(team))
}
return self._success(results)
| gpl-3.0 |
cgstudiomap/cgstudiomap | main/eggs/Django-1.9-py2.7.egg/django/core/cache/backends/db.py | 480 | 8628 | "Database cache backend."
import base64
from datetime import datetime
from django.conf import settings
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.db import DatabaseError, connections, models, router, transaction
from django.utils import six, timezone
from django.utils.encoding import force_bytes
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
class Options(object):
"""A class that will quack like a Django model _meta class.
This allows cache operations to be controlled by the router
"""
def __init__(self, table):
self.db_table = table
self.app_label = 'django_cache'
self.model_name = 'cacheentry'
self.verbose_name = 'cache entry'
self.verbose_name_plural = 'cache entries'
self.object_name = 'CacheEntry'
self.abstract = False
self.managed = True
self.proxy = False
self.swapped = False
class BaseDatabaseCache(BaseCache):
def __init__(self, table, params):
BaseCache.__init__(self, params)
self._table = table
class CacheEntry(object):
_meta = Options(table)
self.cache_model_class = CacheEntry
class DatabaseCache(BaseDatabaseCache):
# This class uses cursors provided by the database connection. This means
# it reads expiration values as aware or naive datetimes, depending on the
# value of USE_TZ and whether the database supports time zones. The ORM's
# conversion and adaptation infrastructure is then used to avoid comparing
# aware and naive datetimes accidentally.
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
db = router.db_for_read(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute("SELECT cache_key, value, expires FROM %s "
"WHERE cache_key = %%s" % table, [key])
row = cursor.fetchone()
if row is None:
return default
expires = row[2]
expression = models.Expression(output_field=models.DateTimeField())
for converter in (connection.ops.get_db_converters(expression) +
expression.get_db_converters(connection)):
expires = converter(expires, expression, connection, {})
if expires < timezone.now():
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
with connection.cursor() as cursor:
cursor.execute("DELETE FROM %s "
"WHERE cache_key = %%s" % table, [key])
return default
value = connection.ops.process_clob(row[1])
return pickle.loads(base64.b64decode(force_bytes(value)))
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
self._base_set('set', key, value, timeout)
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return self._base_set('add', key, value, timeout)
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
timeout = self.get_backend_timeout(timeout)
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
now = timezone.now()
now = now.replace(microsecond=0)
if timeout is None:
exp = datetime.max
elif settings.USE_TZ:
exp = datetime.utcfromtimestamp(timeout)
else:
exp = datetime.fromtimestamp(timeout)
exp = exp.replace(microsecond=0)
if num > self._max_entries:
self._cull(db, cursor, now)
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
b64encoded = base64.b64encode(pickled)
# The DB column is expecting a string, so make sure the value is a
# string, not bytes. Refs #19274.
if six.PY3:
b64encoded = b64encoded.decode('latin1')
try:
# Note: typecasting for datetimes is needed by some 3rd party
# database backends. All core backends work without typecasting,
# so be careful about changes here - test suite will NOT pick
# regressions.
with transaction.atomic(using=db):
cursor.execute("SELECT cache_key, expires FROM %s "
"WHERE cache_key = %%s" % table, [key])
result = cursor.fetchone()
if result:
current_expires = result[1]
expression = models.Expression(output_field=models.DateTimeField())
for converter in (connection.ops.get_db_converters(expression) +
expression.get_db_converters(connection)):
current_expires = converter(current_expires, expression, connection, {})
exp = connection.ops.adapt_datetimefield_value(exp)
if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
"WHERE cache_key = %%s" % table,
[b64encoded, exp, key])
else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) "
"VALUES (%%s, %%s, %%s)" % table,
[key, b64encoded, exp])
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
return False
else:
return True
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
db = router.db_for_read(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
if settings.USE_TZ:
now = datetime.utcnow()
else:
now = datetime.now()
now = now.replace(microsecond=0)
with connection.cursor() as cursor:
cursor.execute("SELECT cache_key FROM %s "
"WHERE cache_key = %%s and expires > %%s" % table,
[key, connection.ops.adapt_datetimefield_value(now)])
return cursor.fetchone() is not None
def _cull(self, db, cursor, now):
if self._cull_frequency == 0:
self.clear()
else:
connection = connections[db]
table = connection.ops.quote_name(self._table)
cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
[connection.ops.adapt_datetimefield_value(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
if num > self._max_entries:
cull_num = num // self._cull_frequency
cursor.execute(
connection.ops.cache_key_culling_sql() % table,
[cull_num])
cursor.execute("DELETE FROM %s "
"WHERE cache_key < %%s" % table,
[cursor.fetchone()[0]])
def clear(self):
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute('DELETE FROM %s' % table)
| agpl-3.0 |
Elettronik/SickRage | lib/html5lib/treewalkers/__init__.py | 354 | 5544 | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
from .. import constants
from .._utils import default_etree
__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
Args:
treeType (str): the name of the tree type required (case-insensitive).
Supported values are:
- "dom": The xml.dom.minidom DOM implementation
- "etree": A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
- "lxml": Optimized walker for lxml.etree
- "genshi": a Genshi stream
Implementation: A module implementing the tree type e.g.
xml.etree.ElementTree or cElementTree (Currently applies to the
"etree" tree type only).
"""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType == "dom":
from . import dom
treeWalkerCache[treeType] = dom.TreeWalker
elif treeType == "genshi":
from . import genshi
treeWalkerCache[treeType] = genshi.TreeWalker
elif treeType == "lxml":
from . import etree_lxml
treeWalkerCache[treeType] = etree_lxml.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| gpl-3.0 |
siggame/webserver | webserver/codemanagement/migrations/0001_initial.py | 1 | 11020 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'BaseClient'
db.create_table('codemanagement_baseclient', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('competition', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['competition.Competition'])),
('language', self.gf('django.db.models.fields.CharField')(max_length=20)),
('language_slug', self.gf('django.db.models.fields.SlugField')(max_length=50, blank=True)),
('repository', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['greta.Repository'], unique=True)),
))
db.send_create_signal('codemanagement', ['BaseClient'])
# Adding model 'TeamClient'
db.create_table('codemanagement_teamclient', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('team', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['competition.Team'], unique=True)),
('base', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['codemanagement.BaseClient'])),
('repository', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['greta.Repository'], unique=True)),
))
db.send_create_signal('codemanagement', ['TeamClient'])
def backwards(self, orm):
# Deleting model 'BaseClient'
db.delete_table('codemanagement_baseclient')
# Deleting model 'TeamClient'
db.delete_table('codemanagement_teamclient')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'codemanagement.baseclient': {
'Meta': {'object_name': 'BaseClient'},
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['competition.Competition']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'language_slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'repository': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['greta.Repository']", 'unique': 'True'})
},
'codemanagement.teamclient': {
'Meta': {'object_name': 'TeamClient'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['codemanagement.BaseClient']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'repository': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['greta.Repository']", 'unique': 'True'}),
'team': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['competition.Team']", 'unique': 'True'})
},
'competition.avatar': {
'Meta': {'object_name': 'Avatar'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'image_height': ('django.db.models.fields.IntegerField', [], {}),
'image_width': ('django.db.models.fields.IntegerField', [], {}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'thumbnail_height': ('django.db.models.fields.IntegerField', [], {}),
'thumbnail_width': ('django.db.models.fields.IntegerField', [], {})
},
'competition.competition': {
'Meta': {'ordering': "['-is_running', '-is_open', '-start_time']", 'object_name': 'Competition'},
'avatar': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['competition.Avatar']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'cost_per_person': ('django.db.models.fields.FloatField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_time': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_num_team_members': ('django.db.models.fields.IntegerField', [], {}),
'min_num_team_members': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'questions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['competition.RegistrationQuestion']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {})
},
'competition.registrationquestion': {
'Meta': {'object_name': 'RegistrationQuestion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.TextField', [], {}),
'question_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'competition.team': {
'Meta': {'ordering': "['name']", 'unique_together': "(('competition', 'slug'),)", 'object_name': 'Team'},
'avatar': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['competition.Avatar']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['competition.Competition']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'eligible_to_win': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'paid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'time_paid': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'greta.repository': {
'Meta': {'object_name': 'Repository'},
'default_branch': ('django.db.models.fields.CharField', [], {'default': "'master'", 'max_length': '30'}),
'description': ('django.db.models.fields.TextField', [], {}),
'forked_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forks'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['greta.Repository']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'owner_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'owner_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'})
}
}
complete_apps = ['codemanagement'] | bsd-3-clause |
mlperf/training_results_v0.6 | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/onnx-tensorrt/third_party/onnx/onnx/backend/test/case/node/gather.py | 2 | 1223 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class Gather(Base):
@staticmethod
def export_gather_0(): # type: () -> None
node = onnx.helper.make_node(
'Gather',
inputs=['data', 'indices'],
outputs=['y'],
axis=0,
)
data = np.random.randn(5, 4, 3, 2).astype(np.float32)
indices = np.array([0, 1, 3])
y = np.take(data, indices, axis=0)
expect(node, inputs=[data, indices.astype(np.int64)], outputs=[y],
name='test_gather_0')
@staticmethod
def export_gather_1(): # type: () -> None
node = onnx.helper.make_node(
'Gather',
inputs=['data', 'indices'],
outputs=['y'],
axis=1,
)
data = np.random.randn(5, 4, 3, 2).astype(np.float32)
indices = np.array([0, 1, 3])
y = np.take(data, indices, axis=1)
expect(node, inputs=[data, indices.astype(np.int64)], outputs=[y],
name='test_gather_1')
| apache-2.0 |
CubicERP/geraldo | site/newsite/django_1_0/django/template/context.py | 14 | 3365 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
_standard_context_processors = None
class ContextPopException(Exception):
"pop() has been called more times than push()"
pass
class Context(object):
"A stack container for variable context"
def __init__(self, dict_=None, autoescape=True):
dict_ = dict_ or {}
self.dicts = [dict_]
self.autoescape = autoescape
def __repr__(self):
return repr(self.dicts)
def __iter__(self):
for d in self.dicts:
yield d
def push(self):
d = {}
self.dicts = [d] + self.dicts
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop(0)
def __setitem__(self, key, value):
"Set a variable in the current context"
self.dicts[0][key] = value
def __getitem__(self, key):
"Get a variable's value, starting at the current context and going upward"
for d in self.dicts:
if key in d:
return d[key]
raise KeyError(key)
def __delitem__(self, key):
"Delete a variable from the current context"
del self.dicts[0][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
__contains__ = has_key
def get(self, key, otherwise=None):
for d in self.dicts:
if key in d:
return d[key]
return otherwise
def update(self, other_dict):
"Like dict.update(). Pushes an entire dictionary's keys and values onto the context."
self.dicts = [other_dict] + self.dicts
return other_dict
# This is a function rather than module-level procedural code because we only
# want it to execute if somebody uses RequestContext.
def get_standard_processors():
global _standard_context_processors
if _standard_context_processors is None:
processors = []
for path in settings.TEMPLATE_CONTEXT_PROCESSORS:
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = __import__(module, {}, {}, [attr])
except ImportError, e:
raise ImproperlyConfigured('Error importing request processor module %s: "%s"' % (module, e))
try:
func = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" callable request processor' % (module, attr))
processors.append(func)
_standard_context_processors = tuple(processors)
return _standard_context_processors
class RequestContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in TEMPLATE_CONTEXT_PROCESSORS.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(self, request, dict=None, processors=None):
Context.__init__(self, dict)
if processors is None:
processors = ()
else:
processors = tuple(processors)
for processor in get_standard_processors() + processors:
self.update(processor(request))
| lgpl-3.0 |
eunchong/build | scripts/tools/get_master_config.py | 2 | 3333 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints information from master_site_config.py
The sole purpose of this program it to keep the crap inside build/ while
we're moving to the new infra/ repository. By calling it, you get access
to some information contained in master_site_config.py for a given master,
as a json string.
Invocation: runit.py get_master_config.py --master-name <master name>
"""
import argparse
import inspect
import json
import logging
import os
import sys
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
# Directory containing build/
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
assert os.path.isdir(os.path.join(ROOT_DIR, 'build')), \
'Script may have moved in the hierarchy'
LOGGER = logging
def get_master_directory(master_name):
"""Given a master name, returns the full path to the corresponding directory.
This function either returns a path to an existing directory, or None.
"""
if master_name.startswith('master.'):
master_name = master_name[7:]
# Look for the master directory
for build_name in ('build', 'build_internal'):
master_path = os.path.join(ROOT_DIR,
build_name,
'masters',
'master.' + master_name)
if os.path.isdir(master_path):
return master_path
return None
def read_master_site_config(master_name):
"""Return a dictionary containing master_site_config
master_name: name of master whose file to parse
Return: dict (empty dict if there is an error)
{'master_port': int()}
"""
master_path = get_master_directory(master_name)
if not master_path:
LOGGER.error('full path for master cannot be determined')
return {}
master_site_config_path = os.path.join(master_path, 'master_site_config.py')
if not os.path.isfile(master_site_config_path):
LOGGER.error('no master_site_config.py file found in %s' % master_path)
return {}
local_vars = {}
try:
execfile(master_site_config_path, local_vars)
except Exception: # pylint: disable=W0703
# Naked exceptions are banned by the style guide but we are
# trying to be resilient here.
LOGGER.exception("exception occured when exec'ing %s"
% master_site_config_path)
return {}
for _, symbol in local_vars.iteritems():
if inspect.isclass(symbol):
if not hasattr(symbol, 'master_port'):
continue
config = {'master_port': symbol.master_port}
for attr in ('project_name', 'slave_port', 'master_host',
'master_port_alt', 'buildbot_url'):
if hasattr(symbol, attr):
config[attr] = getattr(symbol, attr)
return config
LOGGER.error('No master port found in %s' % master_site_config_path)
return {}
def get_options(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--master-name', required=True)
return parser.parse_args(argv)
def main():
options = get_options(sys.argv[1:])
config = read_master_site_config(options.master_name)
print json.dumps(config, indent=2, sort_keys=True)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
py4n6/pyflag | utilities/whois_load.py | 7 | 12614 | #!/usr/bin/python
#
# Script to load whois ipv4 data into pyflag master database
# usage:
# ./whois_load [filename]...
#
# Multiple files can be named on the cmd line, but filenames MUST
# be of a particular format:
# RIR Records: delegated-<source>-<latest|date> eg. delegated-arin-latest
# Full Records: <source>.db.inetnum.gz eg. ripe.db.inetnum.gz
# These are the default names of the files provided via ftp, the
# names are used to determine file type and parse the source and date
#
# If called without arguments, script will attempt to download
# the latest versions of the databases via FTP
#
# David Collett <daveco@sourceforge.net>
import sys
import re
import urllib
import time
import gzip
import os.path
import pyflag.DB as DB
import pyflag.conf
config=pyflag.conf.ConfObject()
from optparse import OptionParser
# whois database URL's
# Full databases are available for apnic and ripencc
# Only have 'RIR' stats for lacnic and arin
# ...though you may be able to request full databases from them
urls = {#'apnic':'ftp://ftp.apnic.net/apnic/whois-data/APNIC/split/apnic.db.inetnum.gz',
'ripe':'ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz',
'arin':'ftp://ftp.arin.net/pub/stats/arin/delegated-arin-latest',
'lacnic':'ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest'}
config.set_usage(usage="""%prog [Options]
Downloads some whois repositories.""")
config.optparser.add_option('-d','--delete', action="store_true",
help="""Delete previous databases""")
config.optparser.add_option('-a','--all', action="store_true",
help="""Load all repositories""")
for k in urls.keys():
config.optparser.add_option('','--%s' % k, action="store_true",
help = "Load %s databases" % k)
config.parse_options()
if config.all:
for k in urls.keys():
setattr(config, k, 1)
MASK32 = 0xffffffffL
import sys
def progress(block,blocksize,totalblocks):
sys.stdout.write("Retrieved %skb/%skb %u%%\r" % (blocksize*block/1024
,totalblocks/1024
,(block*blocksize*100)/totalblocks))
sys.stdout.flush()
# apnic and ripe can be replaced by the below URLs, if only stats are req'd
# ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
# ftp://ftp.ripe.net/pub/stats/ripencc/delegated-ripencc-latest
# pow2 list
pow2 = {}
for num in range(33):
pow2[long(2**num)] = num
pow2list = pow2.keys()
pow2list.sort()
def largest_nm(num):
""" return highest valid netmask possible """
res = 0
for hosts in pow2list:
if hosts <= num:
res = hosts
else:
break
res = (res-1) ^ MASK32
return res
def num_hosts(nm):
""" return number of hosts possible given netmask """
res=(nm ^ MASK32) + 1
return res
def aton(str):
""" convert dotted decimal IP to int """
oct = [long(i) for i in str.split('.')]
result=((oct[0] << 24) | (oct[1] << 16) | (oct[2] << 8) | (oct[3])) & MASK32
return result
class WhoisRec:
""" class representing an ipv4 inetnum whois record """
regex = {'inetnum':re.compile('^inetnum:\s+(.*)$', re.MULTILINE),
'netname':re.compile('^netname:\s+(.*)$', re.MULTILINE),
'descr':re.compile('^descr:\s+(.*)$', re.MULTILINE),
'remarks':re.compile('^remarks:\s+(.*)$', re.MULTILINE),
'country':re.compile('^country:\s+(.*)$', re.MULTILINE),
'status':re.compile('^status:\s+([^\s]*)$', re.MULTILINE),
'adminc':re.compile('^admin-c:\s+(.*)$', re.MULTILINE),
'techc':re.compile('^tech-c:\s+(.*)$', re.MULTILINE),
'notify':re.compile('^notify:\s+(.*)$', re.MULTILINE)}
unfold = re.compile('\n\s+')
def __init__(self, string, type):
if type == 'whois':
self.parse_whois(string)
elif type == 'rir':
self.parse_rir(string)
else:
print "Unknown record type"
def parse_whois(self, string):
# first unfold the string
string = WhoisRec.unfold.sub(' ',string)
# get start_ip, numhosts
self.start_ip = 0L
self.num_hosts = 0
try:
inetnum = self._getsingle('inetnum', string)
self.start_ip, end_ip = [ aton(a.strip()) for a in inetnum.split('-') ]
self.num_hosts = end_ip - self.start_ip + 1
except ValueError, e:
print >>sys.stderr, "ERROR PARSING: %s %s" % (inetnum,e)
self.netname = self._getsingle('netname', string)
self.country = self._getsingle('country', string)
self.adminc = self._getsingle('adminc', string)
self.techc = self._getsingle('techc', string).decode("UTF8","ignore")
self.descr = self._getmulti('descr', string).decode("UTF8","ignore")
self.remarks = self._getmulti('remarks', string).decode("UTF8","ignore")
# get status
status_str = self._getsingle('status', string).lower()
if status_str.find('allocated'):
self.status = 'allocated'
elif status_str.find('assigned'):
self.status = 'assigned'
else:
print "invalid status"
def parse_rir(self, string):
cols = string.split('|')
self.country = cols[1]
self.adminc = ''
self.techc = ''
self.netname=''
self.start_ip = aton(cols[3])
self.num_hosts = int(cols[4])
self.status = cols[6].strip()
self.descr = ''
self.remarks = ''
def _getsingle(self, field, string):
match = WhoisRec.regex[field].search(string)
if match:
return match.groups()[0]
else:
return ""
def _getmulti(self, field, string):
return "\n".join(WhoisRec.regex[field].findall(string))
def __str__(self):
return """
start_ip: %x
netname: %s
num_hosts: %i
country: %s
adminc: %s
techc: %s
status: %s
descr: %s
remarks: %s""" % (self.start_ip,self.netname,self.num_hosts,self.country,self.adminc,self.techc,self.status, self.descr, self.remarks)
class Whois:
""" class to process a whois database file """
def __init__(self, url):
base = os.path.basename(url)
if base.startswith('delegated'):
self.whois = 0
match = re.search('^delegated-(\w+)-(.*)$',base)
if match:
self.source, self.date = match.groups()
if not self.date.isdigit():
self.date = ':'.join(["%i"% i for i in time.localtime()[:6]])
else:
return None
else:
match = re.search('^(\w+)\.db\.inetnum\.gz$', base)
if match:
self.whois = 1
self.source = match.group(1)
self.date = ':'.join(["%i"% i for i in time.localtime()[:6]])
else:
return None
fname="%s/%s" % (config.RESULTDIR,base)
print "searching for %s " % fname
try:
self.fp = gzip.open(fname)
except IOError:
print "retrieving %s into %s " % (url,fname)
fname=urllib.urlretrieve(url,fname,progress)[0]
self.fp=gzip.open(fname)
try:
self.fp.read(1)
self.fp=gzip.open(fname)
except IOError:
self.fp=open(fname)
def next(self):
if self.whois:
return self.next_whois()
else:
return self.next_rir()
def next_whois(self):
entry = ""
while(1):
line = self.fp.readline()
if line == '\n':
break
elif line == '':
raise StopIteration
entry += line
if entry:
return WhoisRec(entry, 'whois')
def next_rir(self):
while(1):
line = self.fp.readline()
cols = line.split('|')
if len(cols) == 7 and cols[2] == 'ipv4':
return WhoisRec(line, 'rir')
if line == '':
raise StopIteration
def __iter__(self):
return self
dbh = DB.DBO(None)
if config.delete:
# create tables in master flag database
# Since the whois entries often get split into many smaller
# subnets for routing, we will use two tables to reduce space
## First drop the old tables
dbh.execute("drop table if exists whois_sources")
dbh.execute("drop table if exists whois")
dbh.execute("drop table if exists whois_routes")
dbh.execute("drop table if exists whois_cache")
dbh.execute("""CREATE TABLE IF NOT EXISTS whois_sources (
`id` int,
`source` varchar(20),
`url` varchar(255),
`updated` datetime)""")
whois_sources_id = 0
dbh.execute("""CREATE TABLE IF NOT EXISTS whois (
`id` int,
`src_id` int,
`start_ip` int(10) unsigned,
`netname` varchar(250),
`numhosts` int unsigned,
`country` char(3),
`adminc` varchar(250),
`techc` varchar(250),
`descr` text,
`remarks` text,
`status` enum('assigned','allocated','reserved','unallocated'))""")
whois_id = 0
dbh.execute("CREATE TABLE IF NOT EXISTS whois_routes ( `network` int(10) unsigned, `netmask` int(10) unsigned, `whois_id` int)")
dbh.execute("""create table whois_cache (
`id` int unsigned not NULL,
`ip` int unsigned not NULL,
`geoip_country` int unsigned NOT NULL,
`geoip_city` int unsigned NOT NULL,
`geoip_isp` int unsigned NOT NULL,
`geoip_org` int unsigned NOT NULL
) engine=MyISAM""")
# add default (fallthrough) route and reserved ranges
dbh.insert('whois_sources',
source='static',
id = whois_sources_id,
url='static',
updated= ':'.join(["%i"% i for i in time.localtime()[:6]]))
dbh.insert('whois',
id=whois_id,
src_id=str(dbh.cursor.lastrowid),
start_ip=0,
netname='Default',
numhosts=0,
country='--',
adminc='',
techc='',
descr='Default Fallthrough Route: IP INVALID OR UNASSIGNED',
remarks='',
status='unallocated'
)
dbh.insert('whois_routes',
network=0,
netmask = 0,
whois_id = whois_id)
else:
dbh.execute("select max(id) as max from whois_sources")
row = dbh.fetch()
if not row or not row['max']:
whois_sources_id = 1
else:
whois_sources_id = row['max'] + 1
dbh.execute("select max(id) as max from whois")
row = dbh.fetch()
if not row:
whois_id = 1
else:
whois_id = row["max"]+1
# process files
source_dbh = dbh.clone()
routes_dbh = dbh.clone()
source_dbh.mass_insert_start('whois_sources')
routes_dbh.mass_insert_start('whois_routes')
dbh.mass_insert_start('whois')
for k,url in urls.items():
# if not getattr(config, k): continue
print "Processing %s" % url
try:
db = Whois(url)
except Exception, e:
print "Error %s" % e
continue
# add this source to db
source_dbh.mass_insert(
source = db.source,
url = url,
id = whois_sources_id,
updated = db.date)
whois_sources_id+=1
# process records
for rec in db:
## Get a new whois_id number:
whois_id += 1
dbh.mass_insert(
src_id = whois_sources_id,
start_ip = "%u" % rec.start_ip,
netname = rec.netname[:250],
numhosts = rec.num_hosts,
country = rec.country[:2],
adminc = rec.adminc[:250],
techc = rec.techc[:250],
id = whois_id,
__descr = rec.descr,
remarks = rec.remarks,
status = rec.status)
#now process the networks (routes)...
# split into networks on bit boundaries
left = rec.num_hosts
masks = []
while left:
nm = largest_nm(left)
masks.append(nm)
left = left - num_hosts(nm)
# sort masks, set initial network address
network = rec.start_ip
masks.sort() # smallest netmask (ie. largest network) will be first
# process networks
while masks:
# get indexes of the ones that align
align = [ x for x in range(len(masks)) if (network & masks[x]) == network ]
if len(align) == 0:
# none align, have to split smallest network in half and try again
masks.append(largest_nm(num_hosts(masks.pop())/2))
masks.append(masks[-1])
else:
# choose the largest network which is aligned and assign it
routes_dbh.mass_insert(
network = network & MASK32,
netmask = "%u" % masks[align[0]],
whois_id = whois_id)
# advance network address and remove this from masks
network = network + num_hosts(masks[align[0]])
del masks[align[0]]
# add indexes
#dbh.check_index("whois_routes","network")
#dbh.check_index("whois_routes","netmask")
#dbh.check_index("whois","id")
| gpl-2.0 |
sexroute/commandergenius | project/jni/python/src/Lib/plat-mac/lib-scriptpackages/Finder/Enumerations.py | 81 | 3491 | """Suite Enumerations: Enumerations for the Finder
Level 1, version 1
Generated from /System/Library/CoreServices/Finder.app
AETE/AEUT resource version 0/144, language 0, script 0
"""
import aetools
import MacOS
_code = 'tpnm'
from StdSuites.Type_Names_Suite import *
class Enumerations_Events(Type_Names_Suite_Events):
pass
_Enum_earr = {
'not_arranged' : 'narr', #
'snap_to_grid' : 'grda', #
'arranged_by_name' : 'nama', #
'arranged_by_modification_date' : 'mdta', #
'arranged_by_creation_date' : 'cdta', #
'arranged_by_size' : 'siza', #
'arranged_by_kind' : 'kina', #
'arranged_by_label' : 'laba', #
}
_Enum_ecvw = {
'icon_view' : 'icnv', #
'list_view' : 'lsvw', #
'column_view' : 'clvw', #
}
_Enum_edfm = {
'Mac_OS_format' : 'dfhf', #
'Mac_OS_Extended_format' : 'dfh+', #
'UFS_format' : 'dfuf', #
'NFS_format' : 'dfnf', #
'audio_format' : 'dfau', #
'ProDOS_format' : 'dfpr', #
'MS_2d_DOS_format' : 'dfms', #
'ISO_9660_format' : 'df96', #
'High_Sierra_format' : 'dfhs', #
'QuickTake_format' : 'dfqt', #
'Apple_Photo_format' : 'dfph', #
'AppleShare_format' : 'dfas', #
'UDF_format' : 'dfud', #
'WebDAV_format' : 'dfwd', #
'FTP_format' : 'dfft', #
'Packet_2d_written_UDF_format' : 'dfpu', #
'unknown_format' : 'df??', #
}
_Enum_elsv = {
'name_column' : 'elsn', #
'modification_date_column' : 'elsm', #
'creation_date_column' : 'elsc', #
'size_column' : 'elss', #
'kind_column' : 'elsk', #
'label_column' : 'elsl', #
'version_column' : 'elsv', #
'comment_column' : 'elsC', #
}
_Enum_ipnl = {
'General_Information_panel' : 'gpnl', #
'Sharing_panel' : 'spnl', #
'Memory_panel' : 'mpnl', #
'Preview_panel' : 'vpnl', #
'Application_panel' : 'apnl', #
'Languages_panel' : 'pklg', #
'Plugins_panel' : 'pkpg', #
'Name__26__Extension_panel' : 'npnl', #
'Comments_panel' : 'cpnl', #
'Content_Index_panel' : 'cinl', #
}
_Enum_isiz = {
'mini' : 'miic', #
'small' : 'smic', #
'large' : 'lgic', #
}
_Enum_lvic = {
'small_icon' : 'smic', #
'large_icon' : 'lgic', #
}
_Enum_priv = {
'read_only' : 'read', #
'read_write' : 'rdwr', #
'write_only' : 'writ', #
'none' : 'none', #
}
_Enum_sodr = {
'normal' : 'snrm', #
'reversed' : 'srvs', #
}
_Enum_vwby = {
'conflicts' : 'cflc', #
'existing_items' : 'exsi', #
'small_icon' : 'smic', #
'icon' : 'iimg', #
'name' : 'pnam', #
'modification_date' : 'asmo', #
'size' : 'ptsz', #
'kind' : 'kind', #
'comment' : 'comt', #
'label' : 'labi', #
'version' : 'vers', #
'creation_date' : 'ascd', #
'small_button' : 'smbu', #
'large_button' : 'lgbu', #
'grid' : 'grid', #
'all' : 'kyal', #
}
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
'earr' : _Enum_earr,
'ecvw' : _Enum_ecvw,
'edfm' : _Enum_edfm,
'elsv' : _Enum_elsv,
'ipnl' : _Enum_ipnl,
'isiz' : _Enum_isiz,
'lvic' : _Enum_lvic,
'priv' : _Enum_priv,
'sodr' : _Enum_sodr,
'vwby' : _Enum_vwby,
}
| lgpl-2.1 |
bgris/ODL_bgris | lib/python3.5/site-packages/PIL/FpxImagePlugin.py | 14 | 6388 | #
# THIS IS WORK IN PROGRESS
#
# The Python Imaging Library.
# $Id$
#
# FlashPix support for PIL
#
# History:
# 97-01-25 fl Created (reads uncompressed RGB images only)
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from __future__ import print_function
from PIL import Image, ImageFile, _binary
import olefile
__version__ = "0.1"
i32 = _binary.i32le
i8 = _binary.i8
# we map from colour field tuples to (mode, rawmode) descriptors
MODES = {
# opacity
(0x00007ffe): ("A", "L"),
# monochrome
(0x00010000,): ("L", "L"),
(0x00018000, 0x00017ffe): ("RGBA", "LA"),
# photo YCC
(0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
(0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"),
# standard RGB (NIFRGB)
(0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
(0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"),
}
#
# --------------------------------------------------------------------
def _accept(prefix):
return prefix[:8] == olefile.MAGIC
##
# Image plugin for the FlashPix images.
class FpxImageFile(ImageFile.ImageFile):
format = "FPX"
format_description = "FlashPix"
def _open(self):
#
# read the OLE directory and see if this is a likely
# to be a FlashPix file
try:
self.ole = olefile.OleFileIO(self.fp)
except IOError:
raise SyntaxError("not an FPX file; invalid OLE file")
if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
raise SyntaxError("not an FPX file; bad root CLSID")
self._open_index(1)
def _open_index(self, index=1):
#
# get the Image Contents Property Set
prop = self.ole.getproperties([
"Data Object Store %06d" % index,
"\005Image Contents"
])
# size (highest resolution)
self.size = prop[0x1000002], prop[0x1000003]
size = max(self.size)
i = 1
while size > 64:
size = size / 2
i += 1
self.maxid = i - 1
# mode. instead of using a single field for this, flashpix
# requires you to specify the mode for each channel in each
# resolution subimage, and leaves it to the decoder to make
# sure that they all match. for now, we'll cheat and assume
# that this is always the case.
id = self.maxid << 16
s = prop[0x2000002 | id]
colors = []
for i in range(i32(s, 4)):
# note: for now, we ignore the "uncalibrated" flag
colors.append(i32(s, 8+i*4) & 0x7fffffff)
self.mode, self.rawmode = MODES[tuple(colors)]
# load JPEG tables, if any
self.jpeg = {}
for i in range(256):
id = 0x3000001 | (i << 16)
if id in prop:
self.jpeg[i] = prop[id]
# print(len(self.jpeg), "tables loaded")
self._open_subimage(1, self.maxid)
def _open_subimage(self, index=1, subimage=0):
#
# setup tile descriptors for a given subimage
stream = [
"Data Object Store %06d" % index,
"Resolution %04d" % subimage,
"Subimage 0000 Header"
]
fp = self.ole.openstream(stream)
# skip prefix
fp.read(28)
# header stream
s = fp.read(36)
size = i32(s, 4), i32(s, 8)
# tilecount = i32(s, 12)
tilesize = i32(s, 16), i32(s, 20)
# channels = i32(s, 24)
offset = i32(s, 28)
length = i32(s, 32)
# print(size, self.mode, self.rawmode)
if size != self.size:
raise IOError("subimage mismatch")
# get tile descriptors
fp.seek(28 + offset)
s = fp.read(i32(s, 12) * length)
x = y = 0
xsize, ysize = size
xtile, ytile = tilesize
self.tile = []
for i in range(0, len(s), length):
compression = i32(s, i+8)
if compression == 0:
self.tile.append(("raw", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (self.rawmode)))
elif compression == 1:
# FIXME: the fill decoder is not implemented
self.tile.append(("fill", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (self.rawmode, s[12:16])))
elif compression == 2:
internal_color_conversion = i8(s[14])
jpeg_tables = i8(s[15])
rawmode = self.rawmode
if internal_color_conversion:
# The image is stored as usual (usually YCbCr).
if rawmode == "RGBA":
# For "RGBA", data is stored as YCbCrA based on
# negative RGB. The following trick works around
# this problem :
jpegmode, rawmode = "YCbCrK", "CMYK"
else:
jpegmode = None # let the decoder decide
else:
# The image is stored as defined by rawmode
jpegmode = rawmode
self.tile.append(("jpeg", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (rawmode, jpegmode)))
# FIXME: jpeg tables are tile dependent; the prefix
# data must be placed in the tile descriptor itself!
if jpeg_tables:
self.tile_prefix = self.jpeg[jpeg_tables]
else:
raise IOError("unknown/invalid compression")
x = x + xtile
if x >= xsize:
x, y = 0, y + ytile
if y >= ysize:
break # isn't really required
self.stream = stream
self.fp = None
def load(self):
if not self.fp:
self.fp = self.ole.openstream(self.stream[:2] +
["Subimage 0000 Data"])
return ImageFile.ImageFile.load(self)
#
# --------------------------------------------------------------------
Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
Image.register_extension(FpxImageFile.format, ".fpx")
| gpl-3.0 |
yakky/django | tests/sitemaps_tests/urls/http.py | 311 | 4069 | from datetime import date, datetime
from django.conf.urls import url
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.all()
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
def items(self):
return []
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
'simple': SimpleSitemap,
}
simple_i18nsitemaps = {
'simple': SimpleI18nSitemap,
}
empty_sitemaps = {
'empty': EmptySitemap,
}
fixed_lastmod_sitemaps = {
'fixed-lastmod': FixedLastmodSitemap,
}
fixed_lastmod__mixed_sitemaps = {
'fixed-lastmod-mixed': FixedLastmodMixedSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': TestModel.objects.all()}),
}
urlpatterns = [
url(r'^simple/index\.xml$', views.index, {'sitemaps': simple_sitemaps}),
url(r'^simple/custom-index\.xml$', views.index,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
url(r'^simple/sitemap-(?P<section>.+)\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/i18n\.xml$', views.sitemap,
{'sitemaps': simple_i18nsitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/custom-sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^empty/sitemap\.xml$', views.sitemap,
{'sitemaps': empty_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod-mixed/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod__mixed_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/date-sitemap.xml$', views.sitemap,
{'sitemaps': {'date-sitemap': DateSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/tz-sitemap.xml$', views.sitemap,
{'sitemaps': {'tz-sitemap': TimezoneSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^generic/sitemap\.xml$', views.sitemap,
{'sitemaps': generic_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
]
urlpatterns += i18n_patterns(
url(r'^i18n/testmodel/(?P<id>\d+)/$', testmodelview, name='i18n_testmodel'),
)
| bsd-3-clause |
drpaneas/linuxed.gr | lib/python2.7/site-packages/docutils/parsers/rst/languages/pl.py | 130 | 3427 | # $Id$
# Author: Robert Wojciechowicz <rw@smsnet.pl>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Polish-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'uwaga': 'attention',
u'ostro\u017cnie': 'caution',
u'code (translation required)': 'code',
u'niebezpiecze\u0144stwo': 'danger',
u'b\u0142\u0105d': 'error',
u'wskaz\u00f3wka': 'hint',
u'wa\u017cne': 'important',
u'przypis': 'note',
u'rada': 'tip',
u'ostrze\u017cenie': 'warning',
u'upomnienie': 'admonition',
u'ramka': 'sidebar',
u'temat': 'topic',
u'blok-linii': 'line-block',
u'sparsowany-litera\u0142': 'parsed-literal',
u'rubryka': 'rubric',
u'epigraf': 'epigraph',
u'highlights': 'highlights', # FIXME no polish equivalent?
u'pull-quote': 'pull-quote', # FIXME no polish equivalent?
u'z\u0142o\u017cony': 'compound',
u'kontener': 'container',
#'questions': 'questions',
u'tabela': 'table',
u'tabela-csv': 'csv-table',
u'tabela-listowa': 'list-table',
#'qa': 'questions',
#'faq': 'questions',
u'meta': 'meta',
'math (translation required)': 'math',
#'imagemap': 'imagemap',
u'obraz': 'image',
u'rycina': 'figure',
u'do\u0142\u0105cz': 'include',
u'surowe': 'raw',
u'zast\u0105p': 'replace',
u'unikod': 'unicode',
u'data': 'date',
u'klasa': 'class',
u'rola': 'role',
u'rola-domy\u015blna': 'default-role',
u'tytu\u0142': 'title',
u'tre\u015b\u0107': 'contents',
u'sectnum': 'sectnum',
u'numeracja-sekcji': 'sectnum',
u'nag\u0142\u00f3wek': 'header',
u'stopka': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'target-notes': 'target-notes', # FIXME no polish equivalent?
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Polish name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'skr\u00f3t': 'abbreviation',
u'akronim': 'acronym',
u'code (translation required)': 'code',
u'indeks': 'index',
u'indeks-dolny': 'subscript',
u'indeks-g\u00f3rny': 'superscript',
u'referencja-tytu\u0142': 'title-reference',
u'referencja-pep': 'pep-reference',
u'referencja-rfc': 'rfc-reference',
u'podkre\u015blenie': 'emphasis',
u'wyt\u0142uszczenie': 'strong',
u'dos\u0142ownie': 'literal',
'math (translation required)': 'math',
u'referencja-nazwana': 'named-reference',
u'referencja-anonimowa': 'anonymous-reference',
u'referencja-przypis': 'footnote-reference',
u'referencja-cytat': 'citation-reference',
u'referencja-podstawienie': 'substitution-reference',
u'cel': 'target',
u'referencja-uri': 'uri-reference',
u'uri': 'uri-reference',
u'url': 'uri-reference',
u'surowe': 'raw',}
"""Mapping of Polish role names to canonical role names for interpreted text.
"""
| mit |
rosswhitfield/mantid | Framework/PythonInterface/plugins/algorithms/EnggFocus.py | 3 | 12079 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
from mantid.kernel import *
from mantid.api import *
import EnggUtils
class EnggFocus(PythonAlgorithm):
INDICES_PROP_NAME = 'SpectrumNumbers'
def category(self):
return "Diffraction\\Engineering"
def seeAlso(self):
return ["AlignDetectors", "DiffractionFocussing"]
def name(self):
return "EnggFocus"
def summary(self):
return ("This algorithm is deprecated as of May 2021, consider using DiffractionFocussing instead."
"Focuses a run by summing up all the spectra into a single one.")
def PyInit(self):
self.declareProperty(MatrixWorkspaceProperty("InputWorkspace", "", Direction.Input),
"Workspace with the run to focus.")
self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", "", Direction.Output),
"A workspace with focussed data")
self.declareProperty(ITableWorkspaceProperty('DetectorPositions', '', Direction.Input,
PropertyMode.Optional),
"Calibrated detector positions. If not specified, default ones are used.")
self.declareProperty(MatrixWorkspaceProperty("VanadiumWorkspace", "", Direction.Input,
PropertyMode.Optional),
doc='Workspace with the Vanadium (correction and calibration) run. '
'Alternatively, when the Vanadium run has been already processed, '
'the properties can be used')
self.declareProperty(ITableWorkspaceProperty('VanIntegrationWorkspace', '',
Direction.Input, PropertyMode.Optional),
doc='Results of integrating the spectra of a Vanadium run, with one column '
'(integration result) and one row per spectrum. This can be used in '
'combination with OutVanadiumCurveFits from a previous execution and '
'VanadiumWorkspace to provide pre-calculated values for Vanadium correction.')
self.declareProperty(MatrixWorkspaceProperty('VanCurvesWorkspace', '', Direction.Input,
PropertyMode.Optional),
doc='A workspace2D with the fitting workspaces corresponding to '
'the instrument banks. This workspace has three spectra per bank, as produced '
'by the algorithm Fit. This is meant to be used as an alternative input '
'VanadiumWorkspace for testing and performance reasons. If not given, no '
'workspace is generated.')
vana_grp = 'Vanadium (open beam) properties'
self.setPropertyGroup('VanadiumWorkspace', vana_grp)
self.setPropertyGroup('VanIntegrationWorkspace', vana_grp)
self.setPropertyGroup('VanCurvesWorkspace', vana_grp)
self.declareProperty("Bank", '', StringListValidator(EnggUtils.ENGINX_BANKS),
direction=Direction.Input,
doc="Which bank to focus: It can be specified as 1 or 2, or "
"equivalently, North or South. See also " + self.INDICES_PROP_NAME + " "
"for a more flexible alternative to select specific detectors")
self.declareProperty(self.INDICES_PROP_NAME, '', direction=Direction.Input,
doc='Sets the spectrum numbers for the detectors '
'that should be considered in the focusing operation (all others will be '
'ignored). This option cannot be used together with Bank, as they overlap. '
'You can give multiple ranges, for example: "0-99", or "0-9, 50-59, 100-109".')
banks_grp = 'Banks / spectra'
self.setPropertyGroup('Bank', banks_grp)
self.setPropertyGroup(self.INDICES_PROP_NAME, banks_grp)
self.declareProperty('NormaliseByCurrent', True, direction=Direction.Input,
doc='Normalize the input data by applying the NormaliseByCurrent algorithm '
'which use the log entry gd_proton_charge. If there is no proton charge '
'the data are not normalised.')
self.declareProperty(FloatArrayProperty('MaskBinsXMins', EnggUtils.ENGINX_MASK_BIN_MINS,
direction=Direction.Input),
doc="List of minimum bin values to mask, separated by commas.")
self.declareProperty(FloatArrayProperty('MaskBinsXMaxs', EnggUtils.ENGINX_MASK_BIN_MAXS,
direction=Direction.Input),
doc="List of maximum bin values to mask, separated by commas.")
prep_grp = 'Data preparation/pre-processing'
self.setPropertyGroup('NormaliseByCurrent', prep_grp)
self.setPropertyGroup('MaskBinsXMins', prep_grp)
self.setPropertyGroup('MaskBinsXMaxs', prep_grp)
def validateInputs(self):
issues = dict()
if not self.getPropertyValue('MaskBinsXMins') and self.getPropertyValue('MaskBinsXMaxs') or \
self.getPropertyValue('MaskBinsXMins') and not self.getPropertyValue('MaskBinsXMaxs'):
issues['MaskBinsXMins'] = "Both minimum and maximum values need to be given, or none"
min_list = self.getProperty('MaskBinsXMins').value
max_list = self.getProperty('MaskBinsXMaxs').value
if len(min_list) > 0 and len(max_list) > 0:
len_min = len(min_list)
len_max = len(max_list)
if len_min != len_max:
issues['MaskBinsXMins'] = ("The number of minimum and maximum values must match. Got "
"{0} and {1} for the minimum and maximum, respectively"
.format(len_min, len_max))
return issues
def PyExec(self):
logger.warning("EnggFocus is deprecated as of May 2021. Please use DiffractionFocussing instead.")
# Get the run workspace
input_ws = self.getProperty('InputWorkspace').value
# Get spectra indices either from bank or direct list of indices, checking for errors
bank = self.getProperty('Bank').value
spectra = self.getProperty(self.INDICES_PROP_NAME).value
indices = EnggUtils.get_ws_indices_from_input_properties(input_ws, bank, spectra)
detector_positions = self.getProperty("DetectorPositions").value
n_reports = 8
prog = Progress(self, start=0, end=1, nreports=n_reports)
# Leave only the data for the bank/spectra list requested
prog.report('Selecting spectra from input workspace')
input_ws = EnggUtils.crop_data(self, input_ws, indices)
prog.report('Masking some bins if requested')
self._mask_bins(input_ws, self.getProperty('MaskBinsXMins').value, self.getProperty('MaskBinsXMaxs').value)
prog.report('Applying vanadium corrections')
# Leave data for the same bank in the vanadium workspace too
vanadium_ws = self.getProperty('VanadiumWorkspace').value
van_integration_ws = self.getProperty('VanIntegrationWorkspace').value
van_curves_ws = self.getProperty('VanCurvesWorkspace').value
EnggUtils.apply_vanadium_corrections(parent=self, ws=input_ws, indices=indices, vanadium_ws=vanadium_ws,
van_integration_ws=van_integration_ws, van_curves_ws=van_curves_ws,
progress_range=(0.65, 0.8))
prog.report("Applying calibration if requested")
# Apply calibration
if detector_positions:
self._apply_calibration(input_ws, detector_positions)
# Convert to dSpacing
prog.report("Converting to d")
input_ws = EnggUtils.convert_to_d_spacing(self, input_ws)
prog.report('Summing spectra')
# Sum the values across spectra
input_ws = EnggUtils.sum_spectra(self, input_ws)
prog.report('Preparing output workspace')
# Convert back to time of flight
input_ws = EnggUtils.convert_to_TOF(self, input_ws)
prog.report('Normalizing input workspace if needed')
if self.getProperty('NormaliseByCurrent').value:
self._normalize_by_current(input_ws)
# OpenGenie displays distributions instead of pure counts (this is done implicitly when
# converting units), so I guess that's what users will expect
self._convert_to_distribution(input_ws)
self._add_bank_number(input_ws, bank)
self.setProperty("OutputWorkspace", input_ws)
def _bank_to_int(self, bank):
if bank == "North":
return "1"
if bank == "South":
return "2"
if bank in ("1", "2"):
return bank
# The convention is to set bank ID to 0 for cropped / texture runs
return "0"
def _add_bank_number(self, ws, bank):
alg = self.createChildAlgorithm("AddSampleLog")
alg.setProperty("Workspace", ws)
alg.setProperty("LogName", "bankid")
alg.setProperty("LogText", self._bank_to_int(bank))
alg.setProperty("LogType", "Number")
alg.execute()
def _mask_bins(self, wks, min_bins, max_bins):
"""
Mask multiple ranges of bins, given multiple pairs min-max
@param wks :: workspace that will be masked (in/out, masked in place)
@param min_bins :: list of minimum values for every range to mask
@param max_bins :: list of maxima
"""
for min_x, max_x in zip(min_bins, max_bins):
alg = self.createChildAlgorithm('MaskBins')
alg.setProperty('InputWorkspace', wks)
alg.setProperty('OutputWorkspace', wks)
alg.setProperty('XMin', min_x)
alg.setProperty('XMax', max_x)
alg.execute()
def _normalize_by_current(self, wks):
"""
Apply the normalize by current algorithm on a workspace
@param wks :: workspace (in/out, modified in place)
"""
if wks.getRun().getProtonCharge() > 0:
alg = self.createChildAlgorithm('NormaliseByCurrent')
alg.setProperty('InputWorkspace', wks)
alg.setProperty('OutputWorkspace', wks)
alg.execute()
else:
self.log().warning(f"Cannot normalize by current because workspace {wks.name()} has invalid proton charge")
def _apply_calibration(self, wks, detector_positions):
"""
Refines the detector positions using the result of calibration (if one is specified).
@param wks :: workspace to apply the calibration (on its instrument)
@param detector_positions :: detector positions (as a table of positions, one row per detector)
"""
alg = self.createChildAlgorithm('ApplyCalibration')
alg.setProperty('Workspace', wks)
alg.setProperty('CalibrationTable', detector_positions)
alg.execute()
def _convert_to_distribution(self, wks):
"""
Convert workspace to distribution
@param wks :: workspace, which is modified/converted in place
"""
alg = self.createChildAlgorithm('ConvertToDistribution')
alg.setProperty('Workspace', wks)
alg.execute()
AlgorithmFactory.subscribe(EnggFocus)
| gpl-3.0 |
VagrantApe/flaskMicroblog | venv/lib/python2.7/site-packages/flask_wtf/i18n.py | 4 | 1651 | # coding: utf-8
"""
flask_wtf.i18n
~~~~~~~~~~~~~~
Internationalization support for Flask WTF.
:copyright: (c) 2013 by Hsiaoming Yang.
"""
from flask import _request_ctx_stack
from wtforms.ext.i18n.utils import messages_path
from flask.ext.babel import get_locale
from babel import support
__all__ = ('Translations', 'translations')
def _get_translations():
"""Returns the correct gettext translations.
Copy from flask-babel with some modifications.
"""
ctx = _request_ctx_stack.top
if ctx is None:
return None
# babel should be in extensions for get_locale
if 'babel' not in ctx.app.extensions:
return None
translations = getattr(ctx, 'wtforms_translations', None)
if translations is None:
dirname = messages_path()
translations = support.Translations.load(
dirname, [get_locale()], domain='wtforms'
)
ctx.wtforms_translations = translations
return translations
class Translations(object):
def gettext(self, string):
t = _get_translations()
if t is None:
return string
if hasattr(t, 'ugettext'):
return t.ugettext(string)
# Python 3 has no ugettext
return t.gettext(string)
def ngettext(self, singular, plural, n):
t = _get_translations()
if t is None:
if n == 1:
return singular
return plural
if hasattr(t, 'ungettext'):
return t.ungettext(singular, plural, n)
# Python 3 has no ungettext
return t.ngettext(singular, plural, n)
translations = Translations()
| bsd-3-clause |
docusign/docusign-python-client | docusign_esign/models/in_person_signer.py | 1 | 102224 | # coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class InPersonSigner(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'access_code': 'str',
'access_code_metadata': 'PropertyMetadata',
'add_access_code_to_email': 'str',
'allow_system_override_for_locked_recipient': 'str',
'auto_navigation': 'str',
'can_sign_offline': 'str',
'client_user_id': 'str',
'completed_count': 'str',
'creation_reason': 'str',
'custom_fields': 'list[str]',
'declined_date_time': 'str',
'declined_reason': 'str',
'default_recipient': 'str',
'delivered_date_time': 'str',
'delivery_method': 'str',
'delivery_method_metadata': 'PropertyMetadata',
'designator_id': 'str',
'designator_id_guid': 'str',
'document_visibility': 'list[DocumentVisibility]',
'email': 'str',
'email_metadata': 'PropertyMetadata',
'email_notification': 'RecipientEmailNotification',
'embedded_recipient_start_url': 'str',
'error_details': 'ErrorDetails',
'excluded_documents': 'list[str]',
'fax_number': 'str',
'fax_number_metadata': 'PropertyMetadata',
'host_email': 'str',
'host_email_metadata': 'PropertyMetadata',
'host_name': 'str',
'host_name_metadata': 'PropertyMetadata',
'id_check_configuration_name': 'str',
'id_check_configuration_name_metadata': 'PropertyMetadata',
'id_check_information_input': 'IdCheckInformationInput',
'identity_verification': 'RecipientIdentityVerification',
'inherit_email_notification_configuration': 'str',
'in_person_signing_type': 'str',
'in_person_signing_type_metadata': 'PropertyMetadata',
'locked_recipient_phone_auth_editable': 'str',
'locked_recipient_sms_editable': 'str',
'name': 'str',
'name_metadata': 'PropertyMetadata',
'notary_host': 'NotaryHost',
'notary_id': 'str',
'note': 'str',
'note_metadata': 'PropertyMetadata',
'offline_attributes': 'OfflineAttributes',
'phone_authentication': 'RecipientPhoneAuthentication',
'recipient_attachments': 'list[RecipientAttachment]',
'recipient_authentication_status': 'AuthenticationStatus',
'recipient_feature_metadata': 'list[FeatureAvailableMetadata]',
'recipient_id': 'str',
'recipient_id_guid': 'str',
'recipient_signature_providers': 'list[RecipientSignatureProvider]',
'recipient_supplies_tabs': 'str',
'recipient_type': 'str',
'recipient_type_metadata': 'PropertyMetadata',
'require_id_lookup': 'str',
'require_id_lookup_metadata': 'PropertyMetadata',
'require_signer_certificate': 'str',
'require_sign_on_paper': 'str',
'require_upload_signature': 'str',
'role_name': 'str',
'routing_order': 'str',
'routing_order_metadata': 'PropertyMetadata',
'sent_date_time': 'str',
'signature_info': 'RecipientSignatureInformation',
'signed_date_time': 'str',
'signer_email': 'str',
'signer_email_metadata': 'PropertyMetadata',
'signer_first_name': 'str',
'signer_first_name_metadata': 'PropertyMetadata',
'signer_last_name': 'str',
'signer_last_name_metadata': 'PropertyMetadata',
'signer_name': 'str',
'signer_name_metadata': 'PropertyMetadata',
'sign_in_each_location': 'str',
'sign_in_each_location_metadata': 'PropertyMetadata',
'signing_group_id': 'str',
'signing_group_id_metadata': 'PropertyMetadata',
'signing_group_name': 'str',
'signing_group_users': 'list[UserInfo]',
'sms_authentication': 'RecipientSMSAuthentication',
'social_authentications': 'list[SocialAuthentication]',
'status': 'str',
'status_code': 'str',
'suppress_emails': 'str',
'tabs': 'Tabs',
'template_locked': 'str',
'template_required': 'str',
'total_tab_count': 'str',
'user_id': 'str'
}
attribute_map = {
'access_code': 'accessCode',
'access_code_metadata': 'accessCodeMetadata',
'add_access_code_to_email': 'addAccessCodeToEmail',
'allow_system_override_for_locked_recipient': 'allowSystemOverrideForLockedRecipient',
'auto_navigation': 'autoNavigation',
'can_sign_offline': 'canSignOffline',
'client_user_id': 'clientUserId',
'completed_count': 'completedCount',
'creation_reason': 'creationReason',
'custom_fields': 'customFields',
'declined_date_time': 'declinedDateTime',
'declined_reason': 'declinedReason',
'default_recipient': 'defaultRecipient',
'delivered_date_time': 'deliveredDateTime',
'delivery_method': 'deliveryMethod',
'delivery_method_metadata': 'deliveryMethodMetadata',
'designator_id': 'designatorId',
'designator_id_guid': 'designatorIdGuid',
'document_visibility': 'documentVisibility',
'email': 'email',
'email_metadata': 'emailMetadata',
'email_notification': 'emailNotification',
'embedded_recipient_start_url': 'embeddedRecipientStartURL',
'error_details': 'errorDetails',
'excluded_documents': 'excludedDocuments',
'fax_number': 'faxNumber',
'fax_number_metadata': 'faxNumberMetadata',
'host_email': 'hostEmail',
'host_email_metadata': 'hostEmailMetadata',
'host_name': 'hostName',
'host_name_metadata': 'hostNameMetadata',
'id_check_configuration_name': 'idCheckConfigurationName',
'id_check_configuration_name_metadata': 'idCheckConfigurationNameMetadata',
'id_check_information_input': 'idCheckInformationInput',
'identity_verification': 'identityVerification',
'inherit_email_notification_configuration': 'inheritEmailNotificationConfiguration',
'in_person_signing_type': 'inPersonSigningType',
'in_person_signing_type_metadata': 'inPersonSigningTypeMetadata',
'locked_recipient_phone_auth_editable': 'lockedRecipientPhoneAuthEditable',
'locked_recipient_sms_editable': 'lockedRecipientSmsEditable',
'name': 'name',
'name_metadata': 'nameMetadata',
'notary_host': 'notaryHost',
'notary_id': 'notaryId',
'note': 'note',
'note_metadata': 'noteMetadata',
'offline_attributes': 'offlineAttributes',
'phone_authentication': 'phoneAuthentication',
'recipient_attachments': 'recipientAttachments',
'recipient_authentication_status': 'recipientAuthenticationStatus',
'recipient_feature_metadata': 'recipientFeatureMetadata',
'recipient_id': 'recipientId',
'recipient_id_guid': 'recipientIdGuid',
'recipient_signature_providers': 'recipientSignatureProviders',
'recipient_supplies_tabs': 'recipientSuppliesTabs',
'recipient_type': 'recipientType',
'recipient_type_metadata': 'recipientTypeMetadata',
'require_id_lookup': 'requireIdLookup',
'require_id_lookup_metadata': 'requireIdLookupMetadata',
'require_signer_certificate': 'requireSignerCertificate',
'require_sign_on_paper': 'requireSignOnPaper',
'require_upload_signature': 'requireUploadSignature',
'role_name': 'roleName',
'routing_order': 'routingOrder',
'routing_order_metadata': 'routingOrderMetadata',
'sent_date_time': 'sentDateTime',
'signature_info': 'signatureInfo',
'signed_date_time': 'signedDateTime',
'signer_email': 'signerEmail',
'signer_email_metadata': 'signerEmailMetadata',
'signer_first_name': 'signerFirstName',
'signer_first_name_metadata': 'signerFirstNameMetadata',
'signer_last_name': 'signerLastName',
'signer_last_name_metadata': 'signerLastNameMetadata',
'signer_name': 'signerName',
'signer_name_metadata': 'signerNameMetadata',
'sign_in_each_location': 'signInEachLocation',
'sign_in_each_location_metadata': 'signInEachLocationMetadata',
'signing_group_id': 'signingGroupId',
'signing_group_id_metadata': 'signingGroupIdMetadata',
'signing_group_name': 'signingGroupName',
'signing_group_users': 'signingGroupUsers',
'sms_authentication': 'smsAuthentication',
'social_authentications': 'socialAuthentications',
'status': 'status',
'status_code': 'statusCode',
'suppress_emails': 'suppressEmails',
'tabs': 'tabs',
'template_locked': 'templateLocked',
'template_required': 'templateRequired',
'total_tab_count': 'totalTabCount',
'user_id': 'userId'
}
def __init__(self, access_code=None, access_code_metadata=None, add_access_code_to_email=None, allow_system_override_for_locked_recipient=None, auto_navigation=None, can_sign_offline=None, client_user_id=None, completed_count=None, creation_reason=None, custom_fields=None, declined_date_time=None, declined_reason=None, default_recipient=None, delivered_date_time=None, delivery_method=None, delivery_method_metadata=None, designator_id=None, designator_id_guid=None, document_visibility=None, email=None, email_metadata=None, email_notification=None, embedded_recipient_start_url=None, error_details=None, excluded_documents=None, fax_number=None, fax_number_metadata=None, host_email=None, host_email_metadata=None, host_name=None, host_name_metadata=None, id_check_configuration_name=None, id_check_configuration_name_metadata=None, id_check_information_input=None, identity_verification=None, inherit_email_notification_configuration=None, in_person_signing_type=None, in_person_signing_type_metadata=None, locked_recipient_phone_auth_editable=None, locked_recipient_sms_editable=None, name=None, name_metadata=None, notary_host=None, notary_id=None, note=None, note_metadata=None, offline_attributes=None, phone_authentication=None, recipient_attachments=None, recipient_authentication_status=None, recipient_feature_metadata=None, recipient_id=None, recipient_id_guid=None, recipient_signature_providers=None, recipient_supplies_tabs=None, recipient_type=None, recipient_type_metadata=None, require_id_lookup=None, require_id_lookup_metadata=None, require_signer_certificate=None, require_sign_on_paper=None, require_upload_signature=None, role_name=None, routing_order=None, routing_order_metadata=None, sent_date_time=None, signature_info=None, signed_date_time=None, signer_email=None, signer_email_metadata=None, signer_first_name=None, signer_first_name_metadata=None, signer_last_name=None, signer_last_name_metadata=None, signer_name=None, signer_name_metadata=None, sign_in_each_location=None, sign_in_each_location_metadata=None, signing_group_id=None, signing_group_id_metadata=None, signing_group_name=None, signing_group_users=None, sms_authentication=None, social_authentications=None, status=None, status_code=None, suppress_emails=None, tabs=None, template_locked=None, template_required=None, total_tab_count=None, user_id=None): # noqa: E501
"""InPersonSigner - a model defined in Swagger""" # noqa: E501
self._access_code = None
self._access_code_metadata = None
self._add_access_code_to_email = None
self._allow_system_override_for_locked_recipient = None
self._auto_navigation = None
self._can_sign_offline = None
self._client_user_id = None
self._completed_count = None
self._creation_reason = None
self._custom_fields = None
self._declined_date_time = None
self._declined_reason = None
self._default_recipient = None
self._delivered_date_time = None
self._delivery_method = None
self._delivery_method_metadata = None
self._designator_id = None
self._designator_id_guid = None
self._document_visibility = None
self._email = None
self._email_metadata = None
self._email_notification = None
self._embedded_recipient_start_url = None
self._error_details = None
self._excluded_documents = None
self._fax_number = None
self._fax_number_metadata = None
self._host_email = None
self._host_email_metadata = None
self._host_name = None
self._host_name_metadata = None
self._id_check_configuration_name = None
self._id_check_configuration_name_metadata = None
self._id_check_information_input = None
self._identity_verification = None
self._inherit_email_notification_configuration = None
self._in_person_signing_type = None
self._in_person_signing_type_metadata = None
self._locked_recipient_phone_auth_editable = None
self._locked_recipient_sms_editable = None
self._name = None
self._name_metadata = None
self._notary_host = None
self._notary_id = None
self._note = None
self._note_metadata = None
self._offline_attributes = None
self._phone_authentication = None
self._recipient_attachments = None
self._recipient_authentication_status = None
self._recipient_feature_metadata = None
self._recipient_id = None
self._recipient_id_guid = None
self._recipient_signature_providers = None
self._recipient_supplies_tabs = None
self._recipient_type = None
self._recipient_type_metadata = None
self._require_id_lookup = None
self._require_id_lookup_metadata = None
self._require_signer_certificate = None
self._require_sign_on_paper = None
self._require_upload_signature = None
self._role_name = None
self._routing_order = None
self._routing_order_metadata = None
self._sent_date_time = None
self._signature_info = None
self._signed_date_time = None
self._signer_email = None
self._signer_email_metadata = None
self._signer_first_name = None
self._signer_first_name_metadata = None
self._signer_last_name = None
self._signer_last_name_metadata = None
self._signer_name = None
self._signer_name_metadata = None
self._sign_in_each_location = None
self._sign_in_each_location_metadata = None
self._signing_group_id = None
self._signing_group_id_metadata = None
self._signing_group_name = None
self._signing_group_users = None
self._sms_authentication = None
self._social_authentications = None
self._status = None
self._status_code = None
self._suppress_emails = None
self._tabs = None
self._template_locked = None
self._template_required = None
self._total_tab_count = None
self._user_id = None
self.discriminator = None
if access_code is not None:
self.access_code = access_code
if access_code_metadata is not None:
self.access_code_metadata = access_code_metadata
if add_access_code_to_email is not None:
self.add_access_code_to_email = add_access_code_to_email
if allow_system_override_for_locked_recipient is not None:
self.allow_system_override_for_locked_recipient = allow_system_override_for_locked_recipient
if auto_navigation is not None:
self.auto_navigation = auto_navigation
if can_sign_offline is not None:
self.can_sign_offline = can_sign_offline
if client_user_id is not None:
self.client_user_id = client_user_id
if completed_count is not None:
self.completed_count = completed_count
if creation_reason is not None:
self.creation_reason = creation_reason
if custom_fields is not None:
self.custom_fields = custom_fields
if declined_date_time is not None:
self.declined_date_time = declined_date_time
if declined_reason is not None:
self.declined_reason = declined_reason
if default_recipient is not None:
self.default_recipient = default_recipient
if delivered_date_time is not None:
self.delivered_date_time = delivered_date_time
if delivery_method is not None:
self.delivery_method = delivery_method
if delivery_method_metadata is not None:
self.delivery_method_metadata = delivery_method_metadata
if designator_id is not None:
self.designator_id = designator_id
if designator_id_guid is not None:
self.designator_id_guid = designator_id_guid
if document_visibility is not None:
self.document_visibility = document_visibility
if email is not None:
self.email = email
if email_metadata is not None:
self.email_metadata = email_metadata
if email_notification is not None:
self.email_notification = email_notification
if embedded_recipient_start_url is not None:
self.embedded_recipient_start_url = embedded_recipient_start_url
if error_details is not None:
self.error_details = error_details
if excluded_documents is not None:
self.excluded_documents = excluded_documents
if fax_number is not None:
self.fax_number = fax_number
if fax_number_metadata is not None:
self.fax_number_metadata = fax_number_metadata
if host_email is not None:
self.host_email = host_email
if host_email_metadata is not None:
self.host_email_metadata = host_email_metadata
if host_name is not None:
self.host_name = host_name
if host_name_metadata is not None:
self.host_name_metadata = host_name_metadata
if id_check_configuration_name is not None:
self.id_check_configuration_name = id_check_configuration_name
if id_check_configuration_name_metadata is not None:
self.id_check_configuration_name_metadata = id_check_configuration_name_metadata
if id_check_information_input is not None:
self.id_check_information_input = id_check_information_input
if identity_verification is not None:
self.identity_verification = identity_verification
if inherit_email_notification_configuration is not None:
self.inherit_email_notification_configuration = inherit_email_notification_configuration
if in_person_signing_type is not None:
self.in_person_signing_type = in_person_signing_type
if in_person_signing_type_metadata is not None:
self.in_person_signing_type_metadata = in_person_signing_type_metadata
if locked_recipient_phone_auth_editable is not None:
self.locked_recipient_phone_auth_editable = locked_recipient_phone_auth_editable
if locked_recipient_sms_editable is not None:
self.locked_recipient_sms_editable = locked_recipient_sms_editable
if name is not None:
self.name = name
if name_metadata is not None:
self.name_metadata = name_metadata
if notary_host is not None:
self.notary_host = notary_host
if notary_id is not None:
self.notary_id = notary_id
if note is not None:
self.note = note
if note_metadata is not None:
self.note_metadata = note_metadata
if offline_attributes is not None:
self.offline_attributes = offline_attributes
if phone_authentication is not None:
self.phone_authentication = phone_authentication
if recipient_attachments is not None:
self.recipient_attachments = recipient_attachments
if recipient_authentication_status is not None:
self.recipient_authentication_status = recipient_authentication_status
if recipient_feature_metadata is not None:
self.recipient_feature_metadata = recipient_feature_metadata
if recipient_id is not None:
self.recipient_id = recipient_id
if recipient_id_guid is not None:
self.recipient_id_guid = recipient_id_guid
if recipient_signature_providers is not None:
self.recipient_signature_providers = recipient_signature_providers
if recipient_supplies_tabs is not None:
self.recipient_supplies_tabs = recipient_supplies_tabs
if recipient_type is not None:
self.recipient_type = recipient_type
if recipient_type_metadata is not None:
self.recipient_type_metadata = recipient_type_metadata
if require_id_lookup is not None:
self.require_id_lookup = require_id_lookup
if require_id_lookup_metadata is not None:
self.require_id_lookup_metadata = require_id_lookup_metadata
if require_signer_certificate is not None:
self.require_signer_certificate = require_signer_certificate
if require_sign_on_paper is not None:
self.require_sign_on_paper = require_sign_on_paper
if require_upload_signature is not None:
self.require_upload_signature = require_upload_signature
if role_name is not None:
self.role_name = role_name
if routing_order is not None:
self.routing_order = routing_order
if routing_order_metadata is not None:
self.routing_order_metadata = routing_order_metadata
if sent_date_time is not None:
self.sent_date_time = sent_date_time
if signature_info is not None:
self.signature_info = signature_info
if signed_date_time is not None:
self.signed_date_time = signed_date_time
if signer_email is not None:
self.signer_email = signer_email
if signer_email_metadata is not None:
self.signer_email_metadata = signer_email_metadata
if signer_first_name is not None:
self.signer_first_name = signer_first_name
if signer_first_name_metadata is not None:
self.signer_first_name_metadata = signer_first_name_metadata
if signer_last_name is not None:
self.signer_last_name = signer_last_name
if signer_last_name_metadata is not None:
self.signer_last_name_metadata = signer_last_name_metadata
if signer_name is not None:
self.signer_name = signer_name
if signer_name_metadata is not None:
self.signer_name_metadata = signer_name_metadata
if sign_in_each_location is not None:
self.sign_in_each_location = sign_in_each_location
if sign_in_each_location_metadata is not None:
self.sign_in_each_location_metadata = sign_in_each_location_metadata
if signing_group_id is not None:
self.signing_group_id = signing_group_id
if signing_group_id_metadata is not None:
self.signing_group_id_metadata = signing_group_id_metadata
if signing_group_name is not None:
self.signing_group_name = signing_group_name
if signing_group_users is not None:
self.signing_group_users = signing_group_users
if sms_authentication is not None:
self.sms_authentication = sms_authentication
if social_authentications is not None:
self.social_authentications = social_authentications
if status is not None:
self.status = status
if status_code is not None:
self.status_code = status_code
if suppress_emails is not None:
self.suppress_emails = suppress_emails
if tabs is not None:
self.tabs = tabs
if template_locked is not None:
self.template_locked = template_locked
if template_required is not None:
self.template_required = template_required
if total_tab_count is not None:
self.total_tab_count = total_tab_count
if user_id is not None:
self.user_id = user_id
@property
def access_code(self):
"""Gets the access_code of this InPersonSigner. # noqa: E501
If a value is provided, the recipient must enter the value as the access code to view and sign the envelope. Maximum Length: 50 characters and it must conform to the account's access code format setting. If blank, but the signer `accessCode` property is set in the envelope, then that value is used. If blank and the signer `accessCode` property is not set, then the access code is not required. # noqa: E501
:return: The access_code of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._access_code
@access_code.setter
def access_code(self, access_code):
"""Sets the access_code of this InPersonSigner.
If a value is provided, the recipient must enter the value as the access code to view and sign the envelope. Maximum Length: 50 characters and it must conform to the account's access code format setting. If blank, but the signer `accessCode` property is set in the envelope, then that value is used. If blank and the signer `accessCode` property is not set, then the access code is not required. # noqa: E501
:param access_code: The access_code of this InPersonSigner. # noqa: E501
:type: str
"""
self._access_code = access_code
@property
def access_code_metadata(self):
"""Gets the access_code_metadata of this InPersonSigner. # noqa: E501
:return: The access_code_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._access_code_metadata
@access_code_metadata.setter
def access_code_metadata(self, access_code_metadata):
"""Sets the access_code_metadata of this InPersonSigner.
:param access_code_metadata: The access_code_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._access_code_metadata = access_code_metadata
@property
def add_access_code_to_email(self):
"""Gets the add_access_code_to_email of this InPersonSigner. # noqa: E501
This Optional attribute indicates that the access code will be added to the email sent to the recipient; this nullifies the Security measure of Access Code on the recipient. # noqa: E501
:return: The add_access_code_to_email of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._add_access_code_to_email
@add_access_code_to_email.setter
def add_access_code_to_email(self, add_access_code_to_email):
"""Sets the add_access_code_to_email of this InPersonSigner.
This Optional attribute indicates that the access code will be added to the email sent to the recipient; this nullifies the Security measure of Access Code on the recipient. # noqa: E501
:param add_access_code_to_email: The add_access_code_to_email of this InPersonSigner. # noqa: E501
:type: str
"""
self._add_access_code_to_email = add_access_code_to_email
@property
def allow_system_override_for_locked_recipient(self):
"""Gets the allow_system_override_for_locked_recipient of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The allow_system_override_for_locked_recipient of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._allow_system_override_for_locked_recipient
@allow_system_override_for_locked_recipient.setter
def allow_system_override_for_locked_recipient(self, allow_system_override_for_locked_recipient):
"""Sets the allow_system_override_for_locked_recipient of this InPersonSigner.
# noqa: E501
:param allow_system_override_for_locked_recipient: The allow_system_override_for_locked_recipient of this InPersonSigner. # noqa: E501
:type: str
"""
self._allow_system_override_for_locked_recipient = allow_system_override_for_locked_recipient
@property
def auto_navigation(self):
"""Gets the auto_navigation of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The auto_navigation of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._auto_navigation
@auto_navigation.setter
def auto_navigation(self, auto_navigation):
"""Sets the auto_navigation of this InPersonSigner.
# noqa: E501
:param auto_navigation: The auto_navigation of this InPersonSigner. # noqa: E501
:type: str
"""
self._auto_navigation = auto_navigation
@property
def can_sign_offline(self):
"""Gets the can_sign_offline of this InPersonSigner. # noqa: E501
When set to **true**, specifies that the signer can perform the signing ceremony offline. # noqa: E501
:return: The can_sign_offline of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._can_sign_offline
@can_sign_offline.setter
def can_sign_offline(self, can_sign_offline):
"""Sets the can_sign_offline of this InPersonSigner.
When set to **true**, specifies that the signer can perform the signing ceremony offline. # noqa: E501
:param can_sign_offline: The can_sign_offline of this InPersonSigner. # noqa: E501
:type: str
"""
self._can_sign_offline = can_sign_offline
@property
def client_user_id(self):
"""Gets the client_user_id of this InPersonSigner. # noqa: E501
Specifies whether the recipient is embedded or remote. If the `clientUserId` property is not null then the recipient is embedded. Note that if the `ClientUserId` property is set and either `SignerMustHaveAccount` or `SignerMustLoginToSign` property of the account settings is set to **true**, an error is generated on sending.ng. Maximum length: 100 characters. # noqa: E501
:return: The client_user_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._client_user_id
@client_user_id.setter
def client_user_id(self, client_user_id):
"""Sets the client_user_id of this InPersonSigner.
Specifies whether the recipient is embedded or remote. If the `clientUserId` property is not null then the recipient is embedded. Note that if the `ClientUserId` property is set and either `SignerMustHaveAccount` or `SignerMustLoginToSign` property of the account settings is set to **true**, an error is generated on sending.ng. Maximum length: 100 characters. # noqa: E501
:param client_user_id: The client_user_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._client_user_id = client_user_id
@property
def completed_count(self):
"""Gets the completed_count of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The completed_count of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._completed_count
@completed_count.setter
def completed_count(self, completed_count):
"""Sets the completed_count of this InPersonSigner.
# noqa: E501
:param completed_count: The completed_count of this InPersonSigner. # noqa: E501
:type: str
"""
self._completed_count = completed_count
@property
def creation_reason(self):
"""Gets the creation_reason of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The creation_reason of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._creation_reason
@creation_reason.setter
def creation_reason(self, creation_reason):
"""Sets the creation_reason of this InPersonSigner.
# noqa: E501
:param creation_reason: The creation_reason of this InPersonSigner. # noqa: E501
:type: str
"""
self._creation_reason = creation_reason
@property
def custom_fields(self):
"""Gets the custom_fields of this InPersonSigner. # noqa: E501
An optional array of strings that allows the sender to provide custom data about the recipient. This information is returned in the envelope status but otherwise not used by DocuSign. Each customField string can be a maximum of 100 characters. # noqa: E501
:return: The custom_fields of this InPersonSigner. # noqa: E501
:rtype: list[str]
"""
return self._custom_fields
@custom_fields.setter
def custom_fields(self, custom_fields):
"""Sets the custom_fields of this InPersonSigner.
An optional array of strings that allows the sender to provide custom data about the recipient. This information is returned in the envelope status but otherwise not used by DocuSign. Each customField string can be a maximum of 100 characters. # noqa: E501
:param custom_fields: The custom_fields of this InPersonSigner. # noqa: E501
:type: list[str]
"""
self._custom_fields = custom_fields
@property
def declined_date_time(self):
"""Gets the declined_date_time of this InPersonSigner. # noqa: E501
The date and time the recipient declined the document. # noqa: E501
:return: The declined_date_time of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._declined_date_time
@declined_date_time.setter
def declined_date_time(self, declined_date_time):
"""Sets the declined_date_time of this InPersonSigner.
The date and time the recipient declined the document. # noqa: E501
:param declined_date_time: The declined_date_time of this InPersonSigner. # noqa: E501
:type: str
"""
self._declined_date_time = declined_date_time
@property
def declined_reason(self):
"""Gets the declined_reason of this InPersonSigner. # noqa: E501
The reason the recipient declined the document. # noqa: E501
:return: The declined_reason of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._declined_reason
@declined_reason.setter
def declined_reason(self, declined_reason):
"""Sets the declined_reason of this InPersonSigner.
The reason the recipient declined the document. # noqa: E501
:param declined_reason: The declined_reason of this InPersonSigner. # noqa: E501
:type: str
"""
self._declined_reason = declined_reason
@property
def default_recipient(self):
"""Gets the default_recipient of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The default_recipient of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._default_recipient
@default_recipient.setter
def default_recipient(self, default_recipient):
"""Sets the default_recipient of this InPersonSigner.
# noqa: E501
:param default_recipient: The default_recipient of this InPersonSigner. # noqa: E501
:type: str
"""
self._default_recipient = default_recipient
@property
def delivered_date_time(self):
"""Gets the delivered_date_time of this InPersonSigner. # noqa: E501
Reserved: For DocuSign use only. # noqa: E501
:return: The delivered_date_time of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._delivered_date_time
@delivered_date_time.setter
def delivered_date_time(self, delivered_date_time):
"""Sets the delivered_date_time of this InPersonSigner.
Reserved: For DocuSign use only. # noqa: E501
:param delivered_date_time: The delivered_date_time of this InPersonSigner. # noqa: E501
:type: str
"""
self._delivered_date_time = delivered_date_time
@property
def delivery_method(self):
"""Gets the delivery_method of this InPersonSigner. # noqa: E501
Reserved: For DocuSign use only. # noqa: E501
:return: The delivery_method of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._delivery_method
@delivery_method.setter
def delivery_method(self, delivery_method):
"""Sets the delivery_method of this InPersonSigner.
Reserved: For DocuSign use only. # noqa: E501
:param delivery_method: The delivery_method of this InPersonSigner. # noqa: E501
:type: str
"""
self._delivery_method = delivery_method
@property
def delivery_method_metadata(self):
"""Gets the delivery_method_metadata of this InPersonSigner. # noqa: E501
:return: The delivery_method_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._delivery_method_metadata
@delivery_method_metadata.setter
def delivery_method_metadata(self, delivery_method_metadata):
"""Sets the delivery_method_metadata of this InPersonSigner.
:param delivery_method_metadata: The delivery_method_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._delivery_method_metadata = delivery_method_metadata
@property
def designator_id(self):
"""Gets the designator_id of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The designator_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._designator_id
@designator_id.setter
def designator_id(self, designator_id):
"""Sets the designator_id of this InPersonSigner.
# noqa: E501
:param designator_id: The designator_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._designator_id = designator_id
@property
def designator_id_guid(self):
"""Gets the designator_id_guid of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The designator_id_guid of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._designator_id_guid
@designator_id_guid.setter
def designator_id_guid(self, designator_id_guid):
"""Sets the designator_id_guid of this InPersonSigner.
# noqa: E501
:param designator_id_guid: The designator_id_guid of this InPersonSigner. # noqa: E501
:type: str
"""
self._designator_id_guid = designator_id_guid
@property
def document_visibility(self):
"""Gets the document_visibility of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The document_visibility of this InPersonSigner. # noqa: E501
:rtype: list[DocumentVisibility]
"""
return self._document_visibility
@document_visibility.setter
def document_visibility(self, document_visibility):
"""Sets the document_visibility of this InPersonSigner.
# noqa: E501
:param document_visibility: The document_visibility of this InPersonSigner. # noqa: E501
:type: list[DocumentVisibility]
"""
self._document_visibility = document_visibility
@property
def email(self):
"""Gets the email of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The email of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this InPersonSigner.
# noqa: E501
:param email: The email of this InPersonSigner. # noqa: E501
:type: str
"""
self._email = email
@property
def email_metadata(self):
"""Gets the email_metadata of this InPersonSigner. # noqa: E501
:return: The email_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._email_metadata
@email_metadata.setter
def email_metadata(self, email_metadata):
"""Sets the email_metadata of this InPersonSigner.
:param email_metadata: The email_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._email_metadata = email_metadata
@property
def email_notification(self):
"""Gets the email_notification of this InPersonSigner. # noqa: E501
:return: The email_notification of this InPersonSigner. # noqa: E501
:rtype: RecipientEmailNotification
"""
return self._email_notification
@email_notification.setter
def email_notification(self, email_notification):
"""Sets the email_notification of this InPersonSigner.
:param email_notification: The email_notification of this InPersonSigner. # noqa: E501
:type: RecipientEmailNotification
"""
self._email_notification = email_notification
@property
def embedded_recipient_start_url(self):
"""Gets the embedded_recipient_start_url of this InPersonSigner. # noqa: E501
Specifies a sender provided valid URL string for redirecting an embedded recipient. When using this option, the embedded recipient still receives an email from DocuSign, just as a remote recipient would. When the document link in the email is clicked the recipient is redirected, through DocuSign, to the supplied URL to complete their actions. When routing to the URL, the sender's system (the server responding to the URL) must request a recipient token to launch a signing session. If set to `SIGN_AT_DOCUSIGN`, the recipient is directed to an embedded signing or viewing process directly at DocuSign. The signing or viewing action is initiated by the DocuSign system and the transaction activity and Certificate of Completion records will reflect this. In all other ways the process is identical to an embedded signing or viewing operation that is launched by any partner. It is important to remember that in a typical embedded workflow the authentication of an embedded recipient is the responsibility of the sending application, DocuSign expects that senders will follow their own process for establishing the recipient's identity. In this workflow the recipient goes through the sending application before the embedded signing or viewing process in initiated. However, when the sending application sets `EmbeddedRecipientStartURL=SIGN_AT_DOCUSIGN`, the recipient goes directly to the embedded signing or viewing process bypassing the sending application and any authentication steps the sending application would use. In this case, DocuSign recommends that you use one of the normal DocuSign authentication features (Access Code, Phone Authentication, SMS Authentication, etc.) to verify the identity of the recipient. If the `clientUserId` property is NOT set, and the `embeddedRecipientStartURL` is set, DocuSign will ignore the redirect URL and launch the standard signing process for the email recipient. Information can be appended to the embedded recipient start URL using merge fields. The available merge fields items are: envelopeId, recipientId, recipientName, recipientEmail, and customFields. The `customFields` property must be set fort the recipient or envelope. The merge fields are enclosed in double brackets. *Example*: `http://senderHost/[[mergeField1]]/ beginSigningSession? [[mergeField2]]&[[mergeField3]]` # noqa: E501
:return: The embedded_recipient_start_url of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._embedded_recipient_start_url
@embedded_recipient_start_url.setter
def embedded_recipient_start_url(self, embedded_recipient_start_url):
"""Sets the embedded_recipient_start_url of this InPersonSigner.
Specifies a sender provided valid URL string for redirecting an embedded recipient. When using this option, the embedded recipient still receives an email from DocuSign, just as a remote recipient would. When the document link in the email is clicked the recipient is redirected, through DocuSign, to the supplied URL to complete their actions. When routing to the URL, the sender's system (the server responding to the URL) must request a recipient token to launch a signing session. If set to `SIGN_AT_DOCUSIGN`, the recipient is directed to an embedded signing or viewing process directly at DocuSign. The signing or viewing action is initiated by the DocuSign system and the transaction activity and Certificate of Completion records will reflect this. In all other ways the process is identical to an embedded signing or viewing operation that is launched by any partner. It is important to remember that in a typical embedded workflow the authentication of an embedded recipient is the responsibility of the sending application, DocuSign expects that senders will follow their own process for establishing the recipient's identity. In this workflow the recipient goes through the sending application before the embedded signing or viewing process in initiated. However, when the sending application sets `EmbeddedRecipientStartURL=SIGN_AT_DOCUSIGN`, the recipient goes directly to the embedded signing or viewing process bypassing the sending application and any authentication steps the sending application would use. In this case, DocuSign recommends that you use one of the normal DocuSign authentication features (Access Code, Phone Authentication, SMS Authentication, etc.) to verify the identity of the recipient. If the `clientUserId` property is NOT set, and the `embeddedRecipientStartURL` is set, DocuSign will ignore the redirect URL and launch the standard signing process for the email recipient. Information can be appended to the embedded recipient start URL using merge fields. The available merge fields items are: envelopeId, recipientId, recipientName, recipientEmail, and customFields. The `customFields` property must be set fort the recipient or envelope. The merge fields are enclosed in double brackets. *Example*: `http://senderHost/[[mergeField1]]/ beginSigningSession? [[mergeField2]]&[[mergeField3]]` # noqa: E501
:param embedded_recipient_start_url: The embedded_recipient_start_url of this InPersonSigner. # noqa: E501
:type: str
"""
self._embedded_recipient_start_url = embedded_recipient_start_url
@property
def error_details(self):
"""Gets the error_details of this InPersonSigner. # noqa: E501
:return: The error_details of this InPersonSigner. # noqa: E501
:rtype: ErrorDetails
"""
return self._error_details
@error_details.setter
def error_details(self, error_details):
"""Sets the error_details of this InPersonSigner.
:param error_details: The error_details of this InPersonSigner. # noqa: E501
:type: ErrorDetails
"""
self._error_details = error_details
@property
def excluded_documents(self):
"""Gets the excluded_documents of this InPersonSigner. # noqa: E501
Specifies the documents that are not visible to this recipient. Document Visibility must be enabled for the account and the `enforceSignerVisibility` property must be set to **true** for the envelope to use this. When enforce signer visibility is enabled, documents with tabs can only be viewed by signers that have a tab on that document. Recipients that have an administrative role (Agent, Editor, or Intermediaries) or informational role (Certified Deliveries or Carbon Copies) can always see all the documents in an envelope, unless they are specifically excluded using this setting when an envelope is sent. Documents that do not have tabs are always visible to all recipients, unless they are specifically excluded using this setting when an envelope is sent. # noqa: E501
:return: The excluded_documents of this InPersonSigner. # noqa: E501
:rtype: list[str]
"""
return self._excluded_documents
@excluded_documents.setter
def excluded_documents(self, excluded_documents):
"""Sets the excluded_documents of this InPersonSigner.
Specifies the documents that are not visible to this recipient. Document Visibility must be enabled for the account and the `enforceSignerVisibility` property must be set to **true** for the envelope to use this. When enforce signer visibility is enabled, documents with tabs can only be viewed by signers that have a tab on that document. Recipients that have an administrative role (Agent, Editor, or Intermediaries) or informational role (Certified Deliveries or Carbon Copies) can always see all the documents in an envelope, unless they are specifically excluded using this setting when an envelope is sent. Documents that do not have tabs are always visible to all recipients, unless they are specifically excluded using this setting when an envelope is sent. # noqa: E501
:param excluded_documents: The excluded_documents of this InPersonSigner. # noqa: E501
:type: list[str]
"""
self._excluded_documents = excluded_documents
@property
def fax_number(self):
"""Gets the fax_number of this InPersonSigner. # noqa: E501
Reserved: # noqa: E501
:return: The fax_number of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._fax_number
@fax_number.setter
def fax_number(self, fax_number):
"""Sets the fax_number of this InPersonSigner.
Reserved: # noqa: E501
:param fax_number: The fax_number of this InPersonSigner. # noqa: E501
:type: str
"""
self._fax_number = fax_number
@property
def fax_number_metadata(self):
"""Gets the fax_number_metadata of this InPersonSigner. # noqa: E501
:return: The fax_number_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._fax_number_metadata
@fax_number_metadata.setter
def fax_number_metadata(self, fax_number_metadata):
"""Sets the fax_number_metadata of this InPersonSigner.
:param fax_number_metadata: The fax_number_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._fax_number_metadata = fax_number_metadata
@property
def host_email(self):
"""Gets the host_email of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The host_email of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._host_email
@host_email.setter
def host_email(self, host_email):
"""Sets the host_email of this InPersonSigner.
# noqa: E501
:param host_email: The host_email of this InPersonSigner. # noqa: E501
:type: str
"""
self._host_email = host_email
@property
def host_email_metadata(self):
"""Gets the host_email_metadata of this InPersonSigner. # noqa: E501
:return: The host_email_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._host_email_metadata
@host_email_metadata.setter
def host_email_metadata(self, host_email_metadata):
"""Sets the host_email_metadata of this InPersonSigner.
:param host_email_metadata: The host_email_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._host_email_metadata = host_email_metadata
@property
def host_name(self):
"""Gets the host_name of this InPersonSigner. # noqa: E501
Specifies the name of the signing host. It is a required element for In Person Signers recipient Type. Maximum Length: 100 characters. # noqa: E501
:return: The host_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._host_name
@host_name.setter
def host_name(self, host_name):
"""Sets the host_name of this InPersonSigner.
Specifies the name of the signing host. It is a required element for In Person Signers recipient Type. Maximum Length: 100 characters. # noqa: E501
:param host_name: The host_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._host_name = host_name
@property
def host_name_metadata(self):
"""Gets the host_name_metadata of this InPersonSigner. # noqa: E501
:return: The host_name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._host_name_metadata
@host_name_metadata.setter
def host_name_metadata(self, host_name_metadata):
"""Sets the host_name_metadata of this InPersonSigner.
:param host_name_metadata: The host_name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._host_name_metadata = host_name_metadata
@property
def id_check_configuration_name(self):
"""Gets the id_check_configuration_name of this InPersonSigner. # noqa: E501
Specifies authentication check by name. The names used here must be the same as the authentication type names used by the account (these name can also be found in the web console sending interface in the Identify list for a recipient,) This overrides any default authentication setting. *Example*: Your account has ID Check and SMS Authentication available and in the web console Identify list these appear as 'ID Check $' and 'SMS Auth $'. To use ID check in an envelope, the idCheckConfigurationName should be 'ID Check '. If you wanted to use SMS, it would be 'SMS Auth $' and you would need to add you would need to add phone number information to the `smsAuthentication` node. # noqa: E501
:return: The id_check_configuration_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._id_check_configuration_name
@id_check_configuration_name.setter
def id_check_configuration_name(self, id_check_configuration_name):
"""Sets the id_check_configuration_name of this InPersonSigner.
Specifies authentication check by name. The names used here must be the same as the authentication type names used by the account (these name can also be found in the web console sending interface in the Identify list for a recipient,) This overrides any default authentication setting. *Example*: Your account has ID Check and SMS Authentication available and in the web console Identify list these appear as 'ID Check $' and 'SMS Auth $'. To use ID check in an envelope, the idCheckConfigurationName should be 'ID Check '. If you wanted to use SMS, it would be 'SMS Auth $' and you would need to add you would need to add phone number information to the `smsAuthentication` node. # noqa: E501
:param id_check_configuration_name: The id_check_configuration_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._id_check_configuration_name = id_check_configuration_name
@property
def id_check_configuration_name_metadata(self):
"""Gets the id_check_configuration_name_metadata of this InPersonSigner. # noqa: E501
:return: The id_check_configuration_name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._id_check_configuration_name_metadata
@id_check_configuration_name_metadata.setter
def id_check_configuration_name_metadata(self, id_check_configuration_name_metadata):
"""Sets the id_check_configuration_name_metadata of this InPersonSigner.
:param id_check_configuration_name_metadata: The id_check_configuration_name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._id_check_configuration_name_metadata = id_check_configuration_name_metadata
@property
def id_check_information_input(self):
"""Gets the id_check_information_input of this InPersonSigner. # noqa: E501
:return: The id_check_information_input of this InPersonSigner. # noqa: E501
:rtype: IdCheckInformationInput
"""
return self._id_check_information_input
@id_check_information_input.setter
def id_check_information_input(self, id_check_information_input):
"""Sets the id_check_information_input of this InPersonSigner.
:param id_check_information_input: The id_check_information_input of this InPersonSigner. # noqa: E501
:type: IdCheckInformationInput
"""
self._id_check_information_input = id_check_information_input
@property
def identity_verification(self):
"""Gets the identity_verification of this InPersonSigner. # noqa: E501
:return: The identity_verification of this InPersonSigner. # noqa: E501
:rtype: RecipientIdentityVerification
"""
return self._identity_verification
@identity_verification.setter
def identity_verification(self, identity_verification):
"""Sets the identity_verification of this InPersonSigner.
:param identity_verification: The identity_verification of this InPersonSigner. # noqa: E501
:type: RecipientIdentityVerification
"""
self._identity_verification = identity_verification
@property
def inherit_email_notification_configuration(self):
"""Gets the inherit_email_notification_configuration of this InPersonSigner. # noqa: E501
When set to **true** and the envelope recipient creates a DocuSign account after signing, the Manage Account Email Notification settings are used as the default settings for the recipient's account. # noqa: E501
:return: The inherit_email_notification_configuration of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._inherit_email_notification_configuration
@inherit_email_notification_configuration.setter
def inherit_email_notification_configuration(self, inherit_email_notification_configuration):
"""Sets the inherit_email_notification_configuration of this InPersonSigner.
When set to **true** and the envelope recipient creates a DocuSign account after signing, the Manage Account Email Notification settings are used as the default settings for the recipient's account. # noqa: E501
:param inherit_email_notification_configuration: The inherit_email_notification_configuration of this InPersonSigner. # noqa: E501
:type: str
"""
self._inherit_email_notification_configuration = inherit_email_notification_configuration
@property
def in_person_signing_type(self):
"""Gets the in_person_signing_type of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The in_person_signing_type of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._in_person_signing_type
@in_person_signing_type.setter
def in_person_signing_type(self, in_person_signing_type):
"""Sets the in_person_signing_type of this InPersonSigner.
# noqa: E501
:param in_person_signing_type: The in_person_signing_type of this InPersonSigner. # noqa: E501
:type: str
"""
self._in_person_signing_type = in_person_signing_type
@property
def in_person_signing_type_metadata(self):
"""Gets the in_person_signing_type_metadata of this InPersonSigner. # noqa: E501
:return: The in_person_signing_type_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._in_person_signing_type_metadata
@in_person_signing_type_metadata.setter
def in_person_signing_type_metadata(self, in_person_signing_type_metadata):
"""Sets the in_person_signing_type_metadata of this InPersonSigner.
:param in_person_signing_type_metadata: The in_person_signing_type_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._in_person_signing_type_metadata = in_person_signing_type_metadata
@property
def locked_recipient_phone_auth_editable(self):
"""Gets the locked_recipient_phone_auth_editable of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The locked_recipient_phone_auth_editable of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._locked_recipient_phone_auth_editable
@locked_recipient_phone_auth_editable.setter
def locked_recipient_phone_auth_editable(self, locked_recipient_phone_auth_editable):
"""Sets the locked_recipient_phone_auth_editable of this InPersonSigner.
# noqa: E501
:param locked_recipient_phone_auth_editable: The locked_recipient_phone_auth_editable of this InPersonSigner. # noqa: E501
:type: str
"""
self._locked_recipient_phone_auth_editable = locked_recipient_phone_auth_editable
@property
def locked_recipient_sms_editable(self):
"""Gets the locked_recipient_sms_editable of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The locked_recipient_sms_editable of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._locked_recipient_sms_editable
@locked_recipient_sms_editable.setter
def locked_recipient_sms_editable(self, locked_recipient_sms_editable):
"""Sets the locked_recipient_sms_editable of this InPersonSigner.
# noqa: E501
:param locked_recipient_sms_editable: The locked_recipient_sms_editable of this InPersonSigner. # noqa: E501
:type: str
"""
self._locked_recipient_sms_editable = locked_recipient_sms_editable
@property
def name(self):
"""Gets the name of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this InPersonSigner.
# noqa: E501
:param name: The name of this InPersonSigner. # noqa: E501
:type: str
"""
self._name = name
@property
def name_metadata(self):
"""Gets the name_metadata of this InPersonSigner. # noqa: E501
:return: The name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._name_metadata
@name_metadata.setter
def name_metadata(self, name_metadata):
"""Sets the name_metadata of this InPersonSigner.
:param name_metadata: The name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._name_metadata = name_metadata
@property
def notary_host(self):
"""Gets the notary_host of this InPersonSigner. # noqa: E501
:return: The notary_host of this InPersonSigner. # noqa: E501
:rtype: NotaryHost
"""
return self._notary_host
@notary_host.setter
def notary_host(self, notary_host):
"""Sets the notary_host of this InPersonSigner.
:param notary_host: The notary_host of this InPersonSigner. # noqa: E501
:type: NotaryHost
"""
self._notary_host = notary_host
@property
def notary_id(self):
"""Gets the notary_id of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The notary_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._notary_id
@notary_id.setter
def notary_id(self, notary_id):
"""Sets the notary_id of this InPersonSigner.
# noqa: E501
:param notary_id: The notary_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._notary_id = notary_id
@property
def note(self):
"""Gets the note of this InPersonSigner. # noqa: E501
Specifies a note that is unique to this recipient. This note is sent to the recipient via the signing email. The note displays in the signing UI near the upper left corner of the document on the signing screen. Maximum Length: 1000 characters. # noqa: E501
:return: The note of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._note
@note.setter
def note(self, note):
"""Sets the note of this InPersonSigner.
Specifies a note that is unique to this recipient. This note is sent to the recipient via the signing email. The note displays in the signing UI near the upper left corner of the document on the signing screen. Maximum Length: 1000 characters. # noqa: E501
:param note: The note of this InPersonSigner. # noqa: E501
:type: str
"""
self._note = note
@property
def note_metadata(self):
"""Gets the note_metadata of this InPersonSigner. # noqa: E501
:return: The note_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._note_metadata
@note_metadata.setter
def note_metadata(self, note_metadata):
"""Sets the note_metadata of this InPersonSigner.
:param note_metadata: The note_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._note_metadata = note_metadata
@property
def offline_attributes(self):
"""Gets the offline_attributes of this InPersonSigner. # noqa: E501
:return: The offline_attributes of this InPersonSigner. # noqa: E501
:rtype: OfflineAttributes
"""
return self._offline_attributes
@offline_attributes.setter
def offline_attributes(self, offline_attributes):
"""Sets the offline_attributes of this InPersonSigner.
:param offline_attributes: The offline_attributes of this InPersonSigner. # noqa: E501
:type: OfflineAttributes
"""
self._offline_attributes = offline_attributes
@property
def phone_authentication(self):
"""Gets the phone_authentication of this InPersonSigner. # noqa: E501
:return: The phone_authentication of this InPersonSigner. # noqa: E501
:rtype: RecipientPhoneAuthentication
"""
return self._phone_authentication
@phone_authentication.setter
def phone_authentication(self, phone_authentication):
"""Sets the phone_authentication of this InPersonSigner.
:param phone_authentication: The phone_authentication of this InPersonSigner. # noqa: E501
:type: RecipientPhoneAuthentication
"""
self._phone_authentication = phone_authentication
@property
def recipient_attachments(self):
"""Gets the recipient_attachments of this InPersonSigner. # noqa: E501
Reserved: # noqa: E501
:return: The recipient_attachments of this InPersonSigner. # noqa: E501
:rtype: list[RecipientAttachment]
"""
return self._recipient_attachments
@recipient_attachments.setter
def recipient_attachments(self, recipient_attachments):
"""Sets the recipient_attachments of this InPersonSigner.
Reserved: # noqa: E501
:param recipient_attachments: The recipient_attachments of this InPersonSigner. # noqa: E501
:type: list[RecipientAttachment]
"""
self._recipient_attachments = recipient_attachments
@property
def recipient_authentication_status(self):
"""Gets the recipient_authentication_status of this InPersonSigner. # noqa: E501
:return: The recipient_authentication_status of this InPersonSigner. # noqa: E501
:rtype: AuthenticationStatus
"""
return self._recipient_authentication_status
@recipient_authentication_status.setter
def recipient_authentication_status(self, recipient_authentication_status):
"""Sets the recipient_authentication_status of this InPersonSigner.
:param recipient_authentication_status: The recipient_authentication_status of this InPersonSigner. # noqa: E501
:type: AuthenticationStatus
"""
self._recipient_authentication_status = recipient_authentication_status
@property
def recipient_feature_metadata(self):
"""Gets the recipient_feature_metadata of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The recipient_feature_metadata of this InPersonSigner. # noqa: E501
:rtype: list[FeatureAvailableMetadata]
"""
return self._recipient_feature_metadata
@recipient_feature_metadata.setter
def recipient_feature_metadata(self, recipient_feature_metadata):
"""Sets the recipient_feature_metadata of this InPersonSigner.
# noqa: E501
:param recipient_feature_metadata: The recipient_feature_metadata of this InPersonSigner. # noqa: E501
:type: list[FeatureAvailableMetadata]
"""
self._recipient_feature_metadata = recipient_feature_metadata
@property
def recipient_id(self):
"""Gets the recipient_id of this InPersonSigner. # noqa: E501
Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document. # noqa: E501
:return: The recipient_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._recipient_id
@recipient_id.setter
def recipient_id(self, recipient_id):
"""Sets the recipient_id of this InPersonSigner.
Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document. # noqa: E501
:param recipient_id: The recipient_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._recipient_id = recipient_id
@property
def recipient_id_guid(self):
"""Gets the recipient_id_guid of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The recipient_id_guid of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._recipient_id_guid
@recipient_id_guid.setter
def recipient_id_guid(self, recipient_id_guid):
"""Sets the recipient_id_guid of this InPersonSigner.
# noqa: E501
:param recipient_id_guid: The recipient_id_guid of this InPersonSigner. # noqa: E501
:type: str
"""
self._recipient_id_guid = recipient_id_guid
@property
def recipient_signature_providers(self):
"""Gets the recipient_signature_providers of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The recipient_signature_providers of this InPersonSigner. # noqa: E501
:rtype: list[RecipientSignatureProvider]
"""
return self._recipient_signature_providers
@recipient_signature_providers.setter
def recipient_signature_providers(self, recipient_signature_providers):
"""Sets the recipient_signature_providers of this InPersonSigner.
# noqa: E501
:param recipient_signature_providers: The recipient_signature_providers of this InPersonSigner. # noqa: E501
:type: list[RecipientSignatureProvider]
"""
self._recipient_signature_providers = recipient_signature_providers
@property
def recipient_supplies_tabs(self):
"""Gets the recipient_supplies_tabs of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The recipient_supplies_tabs of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._recipient_supplies_tabs
@recipient_supplies_tabs.setter
def recipient_supplies_tabs(self, recipient_supplies_tabs):
"""Sets the recipient_supplies_tabs of this InPersonSigner.
# noqa: E501
:param recipient_supplies_tabs: The recipient_supplies_tabs of this InPersonSigner. # noqa: E501
:type: str
"""
self._recipient_supplies_tabs = recipient_supplies_tabs
@property
def recipient_type(self):
"""Gets the recipient_type of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The recipient_type of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._recipient_type
@recipient_type.setter
def recipient_type(self, recipient_type):
"""Sets the recipient_type of this InPersonSigner.
# noqa: E501
:param recipient_type: The recipient_type of this InPersonSigner. # noqa: E501
:type: str
"""
self._recipient_type = recipient_type
@property
def recipient_type_metadata(self):
"""Gets the recipient_type_metadata of this InPersonSigner. # noqa: E501
:return: The recipient_type_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._recipient_type_metadata
@recipient_type_metadata.setter
def recipient_type_metadata(self, recipient_type_metadata):
"""Sets the recipient_type_metadata of this InPersonSigner.
:param recipient_type_metadata: The recipient_type_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._recipient_type_metadata = recipient_type_metadata
@property
def require_id_lookup(self):
"""Gets the require_id_lookup of this InPersonSigner. # noqa: E501
When set to **true**, the recipient is required to use the specified ID check method (including Phone and SMS authentication) to validate their identity. # noqa: E501
:return: The require_id_lookup of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._require_id_lookup
@require_id_lookup.setter
def require_id_lookup(self, require_id_lookup):
"""Sets the require_id_lookup of this InPersonSigner.
When set to **true**, the recipient is required to use the specified ID check method (including Phone and SMS authentication) to validate their identity. # noqa: E501
:param require_id_lookup: The require_id_lookup of this InPersonSigner. # noqa: E501
:type: str
"""
self._require_id_lookup = require_id_lookup
@property
def require_id_lookup_metadata(self):
"""Gets the require_id_lookup_metadata of this InPersonSigner. # noqa: E501
:return: The require_id_lookup_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._require_id_lookup_metadata
@require_id_lookup_metadata.setter
def require_id_lookup_metadata(self, require_id_lookup_metadata):
"""Sets the require_id_lookup_metadata of this InPersonSigner.
:param require_id_lookup_metadata: The require_id_lookup_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._require_id_lookup_metadata = require_id_lookup_metadata
@property
def require_signer_certificate(self):
"""Gets the require_signer_certificate of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The require_signer_certificate of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._require_signer_certificate
@require_signer_certificate.setter
def require_signer_certificate(self, require_signer_certificate):
"""Sets the require_signer_certificate of this InPersonSigner.
# noqa: E501
:param require_signer_certificate: The require_signer_certificate of this InPersonSigner. # noqa: E501
:type: str
"""
self._require_signer_certificate = require_signer_certificate
@property
def require_sign_on_paper(self):
"""Gets the require_sign_on_paper of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The require_sign_on_paper of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._require_sign_on_paper
@require_sign_on_paper.setter
def require_sign_on_paper(self, require_sign_on_paper):
"""Sets the require_sign_on_paper of this InPersonSigner.
# noqa: E501
:param require_sign_on_paper: The require_sign_on_paper of this InPersonSigner. # noqa: E501
:type: str
"""
self._require_sign_on_paper = require_sign_on_paper
@property
def require_upload_signature(self):
"""Gets the require_upload_signature of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The require_upload_signature of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._require_upload_signature
@require_upload_signature.setter
def require_upload_signature(self, require_upload_signature):
"""Sets the require_upload_signature of this InPersonSigner.
# noqa: E501
:param require_upload_signature: The require_upload_signature of this InPersonSigner. # noqa: E501
:type: str
"""
self._require_upload_signature = require_upload_signature
@property
def role_name(self):
"""Gets the role_name of this InPersonSigner. # noqa: E501
Optional element. Specifies the role name associated with the recipient.<br/><br/>This is required when working with template recipients. # noqa: E501
:return: The role_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._role_name
@role_name.setter
def role_name(self, role_name):
"""Sets the role_name of this InPersonSigner.
Optional element. Specifies the role name associated with the recipient.<br/><br/>This is required when working with template recipients. # noqa: E501
:param role_name: The role_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._role_name = role_name
@property
def routing_order(self):
"""Gets the routing_order of this InPersonSigner. # noqa: E501
Specifies the routing order of the recipient in the envelope. # noqa: E501
:return: The routing_order of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._routing_order
@routing_order.setter
def routing_order(self, routing_order):
"""Sets the routing_order of this InPersonSigner.
Specifies the routing order of the recipient in the envelope. # noqa: E501
:param routing_order: The routing_order of this InPersonSigner. # noqa: E501
:type: str
"""
self._routing_order = routing_order
@property
def routing_order_metadata(self):
"""Gets the routing_order_metadata of this InPersonSigner. # noqa: E501
:return: The routing_order_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._routing_order_metadata
@routing_order_metadata.setter
def routing_order_metadata(self, routing_order_metadata):
"""Sets the routing_order_metadata of this InPersonSigner.
:param routing_order_metadata: The routing_order_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._routing_order_metadata = routing_order_metadata
@property
def sent_date_time(self):
"""Gets the sent_date_time of this InPersonSigner. # noqa: E501
The date and time the envelope was sent. # noqa: E501
:return: The sent_date_time of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._sent_date_time
@sent_date_time.setter
def sent_date_time(self, sent_date_time):
"""Sets the sent_date_time of this InPersonSigner.
The date and time the envelope was sent. # noqa: E501
:param sent_date_time: The sent_date_time of this InPersonSigner. # noqa: E501
:type: str
"""
self._sent_date_time = sent_date_time
@property
def signature_info(self):
"""Gets the signature_info of this InPersonSigner. # noqa: E501
:return: The signature_info of this InPersonSigner. # noqa: E501
:rtype: RecipientSignatureInformation
"""
return self._signature_info
@signature_info.setter
def signature_info(self, signature_info):
"""Sets the signature_info of this InPersonSigner.
:param signature_info: The signature_info of this InPersonSigner. # noqa: E501
:type: RecipientSignatureInformation
"""
self._signature_info = signature_info
@property
def signed_date_time(self):
"""Gets the signed_date_time of this InPersonSigner. # noqa: E501
Reserved: For DocuSign use only. # noqa: E501
:return: The signed_date_time of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signed_date_time
@signed_date_time.setter
def signed_date_time(self, signed_date_time):
"""Sets the signed_date_time of this InPersonSigner.
Reserved: For DocuSign use only. # noqa: E501
:param signed_date_time: The signed_date_time of this InPersonSigner. # noqa: E501
:type: str
"""
self._signed_date_time = signed_date_time
@property
def signer_email(self):
"""Gets the signer_email of this InPersonSigner. # noqa: E501
The email address for an InPersonSigner recipient Type. Maximum Length: 100 characters. # noqa: E501
:return: The signer_email of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signer_email
@signer_email.setter
def signer_email(self, signer_email):
"""Sets the signer_email of this InPersonSigner.
The email address for an InPersonSigner recipient Type. Maximum Length: 100 characters. # noqa: E501
:param signer_email: The signer_email of this InPersonSigner. # noqa: E501
:type: str
"""
self._signer_email = signer_email
@property
def signer_email_metadata(self):
"""Gets the signer_email_metadata of this InPersonSigner. # noqa: E501
:return: The signer_email_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._signer_email_metadata
@signer_email_metadata.setter
def signer_email_metadata(self, signer_email_metadata):
"""Sets the signer_email_metadata of this InPersonSigner.
:param signer_email_metadata: The signer_email_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._signer_email_metadata = signer_email_metadata
@property
def signer_first_name(self):
"""Gets the signer_first_name of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The signer_first_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signer_first_name
@signer_first_name.setter
def signer_first_name(self, signer_first_name):
"""Sets the signer_first_name of this InPersonSigner.
# noqa: E501
:param signer_first_name: The signer_first_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._signer_first_name = signer_first_name
@property
def signer_first_name_metadata(self):
"""Gets the signer_first_name_metadata of this InPersonSigner. # noqa: E501
:return: The signer_first_name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._signer_first_name_metadata
@signer_first_name_metadata.setter
def signer_first_name_metadata(self, signer_first_name_metadata):
"""Sets the signer_first_name_metadata of this InPersonSigner.
:param signer_first_name_metadata: The signer_first_name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._signer_first_name_metadata = signer_first_name_metadata
@property
def signer_last_name(self):
"""Gets the signer_last_name of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The signer_last_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signer_last_name
@signer_last_name.setter
def signer_last_name(self, signer_last_name):
"""Sets the signer_last_name of this InPersonSigner.
# noqa: E501
:param signer_last_name: The signer_last_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._signer_last_name = signer_last_name
@property
def signer_last_name_metadata(self):
"""Gets the signer_last_name_metadata of this InPersonSigner. # noqa: E501
:return: The signer_last_name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._signer_last_name_metadata
@signer_last_name_metadata.setter
def signer_last_name_metadata(self, signer_last_name_metadata):
"""Sets the signer_last_name_metadata of this InPersonSigner.
:param signer_last_name_metadata: The signer_last_name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._signer_last_name_metadata = signer_last_name_metadata
@property
def signer_name(self):
"""Gets the signer_name of this InPersonSigner. # noqa: E501
Required. The full legal name of a signer for the envelope. Maximum Length: 100 characters. # noqa: E501
:return: The signer_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signer_name
@signer_name.setter
def signer_name(self, signer_name):
"""Sets the signer_name of this InPersonSigner.
Required. The full legal name of a signer for the envelope. Maximum Length: 100 characters. # noqa: E501
:param signer_name: The signer_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._signer_name = signer_name
@property
def signer_name_metadata(self):
"""Gets the signer_name_metadata of this InPersonSigner. # noqa: E501
:return: The signer_name_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._signer_name_metadata
@signer_name_metadata.setter
def signer_name_metadata(self, signer_name_metadata):
"""Sets the signer_name_metadata of this InPersonSigner.
:param signer_name_metadata: The signer_name_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._signer_name_metadata = signer_name_metadata
@property
def sign_in_each_location(self):
"""Gets the sign_in_each_location of this InPersonSigner. # noqa: E501
When set to **true**, specifies that the signer must sign in all locations. # noqa: E501
:return: The sign_in_each_location of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._sign_in_each_location
@sign_in_each_location.setter
def sign_in_each_location(self, sign_in_each_location):
"""Sets the sign_in_each_location of this InPersonSigner.
When set to **true**, specifies that the signer must sign in all locations. # noqa: E501
:param sign_in_each_location: The sign_in_each_location of this InPersonSigner. # noqa: E501
:type: str
"""
self._sign_in_each_location = sign_in_each_location
@property
def sign_in_each_location_metadata(self):
"""Gets the sign_in_each_location_metadata of this InPersonSigner. # noqa: E501
:return: The sign_in_each_location_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._sign_in_each_location_metadata
@sign_in_each_location_metadata.setter
def sign_in_each_location_metadata(self, sign_in_each_location_metadata):
"""Sets the sign_in_each_location_metadata of this InPersonSigner.
:param sign_in_each_location_metadata: The sign_in_each_location_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._sign_in_each_location_metadata = sign_in_each_location_metadata
@property
def signing_group_id(self):
"""Gets the signing_group_id of this InPersonSigner. # noqa: E501
When set to **true** and the feature is enabled in the sender's account, the signing recipient is required to draw signatures and initials at each signature/initial tab ( instead of adopting a signature/initial style or only drawing a signature/initial once). # noqa: E501
:return: The signing_group_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signing_group_id
@signing_group_id.setter
def signing_group_id(self, signing_group_id):
"""Sets the signing_group_id of this InPersonSigner.
When set to **true** and the feature is enabled in the sender's account, the signing recipient is required to draw signatures and initials at each signature/initial tab ( instead of adopting a signature/initial style or only drawing a signature/initial once). # noqa: E501
:param signing_group_id: The signing_group_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._signing_group_id = signing_group_id
@property
def signing_group_id_metadata(self):
"""Gets the signing_group_id_metadata of this InPersonSigner. # noqa: E501
:return: The signing_group_id_metadata of this InPersonSigner. # noqa: E501
:rtype: PropertyMetadata
"""
return self._signing_group_id_metadata
@signing_group_id_metadata.setter
def signing_group_id_metadata(self, signing_group_id_metadata):
"""Sets the signing_group_id_metadata of this InPersonSigner.
:param signing_group_id_metadata: The signing_group_id_metadata of this InPersonSigner. # noqa: E501
:type: PropertyMetadata
"""
self._signing_group_id_metadata = signing_group_id_metadata
@property
def signing_group_name(self):
"""Gets the signing_group_name of this InPersonSigner. # noqa: E501
The display name for the signing group. Maximum Length: 100 characters. # noqa: E501
:return: The signing_group_name of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._signing_group_name
@signing_group_name.setter
def signing_group_name(self, signing_group_name):
"""Sets the signing_group_name of this InPersonSigner.
The display name for the signing group. Maximum Length: 100 characters. # noqa: E501
:param signing_group_name: The signing_group_name of this InPersonSigner. # noqa: E501
:type: str
"""
self._signing_group_name = signing_group_name
@property
def signing_group_users(self):
"""Gets the signing_group_users of this InPersonSigner. # noqa: E501
A complex type that contains information about users in the signing group. # noqa: E501
:return: The signing_group_users of this InPersonSigner. # noqa: E501
:rtype: list[UserInfo]
"""
return self._signing_group_users
@signing_group_users.setter
def signing_group_users(self, signing_group_users):
"""Sets the signing_group_users of this InPersonSigner.
A complex type that contains information about users in the signing group. # noqa: E501
:param signing_group_users: The signing_group_users of this InPersonSigner. # noqa: E501
:type: list[UserInfo]
"""
self._signing_group_users = signing_group_users
@property
def sms_authentication(self):
"""Gets the sms_authentication of this InPersonSigner. # noqa: E501
:return: The sms_authentication of this InPersonSigner. # noqa: E501
:rtype: RecipientSMSAuthentication
"""
return self._sms_authentication
@sms_authentication.setter
def sms_authentication(self, sms_authentication):
"""Sets the sms_authentication of this InPersonSigner.
:param sms_authentication: The sms_authentication of this InPersonSigner. # noqa: E501
:type: RecipientSMSAuthentication
"""
self._sms_authentication = sms_authentication
@property
def social_authentications(self):
"""Gets the social_authentications of this InPersonSigner. # noqa: E501
Lists the social ID type that can be used for recipient authentication. # noqa: E501
:return: The social_authentications of this InPersonSigner. # noqa: E501
:rtype: list[SocialAuthentication]
"""
return self._social_authentications
@social_authentications.setter
def social_authentications(self, social_authentications):
"""Sets the social_authentications of this InPersonSigner.
Lists the social ID type that can be used for recipient authentication. # noqa: E501
:param social_authentications: The social_authentications of this InPersonSigner. # noqa: E501
:type: list[SocialAuthentication]
"""
self._social_authentications = social_authentications
@property
def status(self):
"""Gets the status of this InPersonSigner. # noqa: E501
Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later. # noqa: E501
:return: The status of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this InPersonSigner.
Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later. # noqa: E501
:param status: The status of this InPersonSigner. # noqa: E501
:type: str
"""
self._status = status
@property
def status_code(self):
"""Gets the status_code of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The status_code of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._status_code
@status_code.setter
def status_code(self, status_code):
"""Sets the status_code of this InPersonSigner.
# noqa: E501
:param status_code: The status_code of this InPersonSigner. # noqa: E501
:type: str
"""
self._status_code = status_code
@property
def suppress_emails(self):
"""Gets the suppress_emails of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The suppress_emails of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._suppress_emails
@suppress_emails.setter
def suppress_emails(self, suppress_emails):
"""Sets the suppress_emails of this InPersonSigner.
# noqa: E501
:param suppress_emails: The suppress_emails of this InPersonSigner. # noqa: E501
:type: str
"""
self._suppress_emails = suppress_emails
@property
def tabs(self):
"""Gets the tabs of this InPersonSigner. # noqa: E501
:return: The tabs of this InPersonSigner. # noqa: E501
:rtype: Tabs
"""
return self._tabs
@tabs.setter
def tabs(self, tabs):
"""Sets the tabs of this InPersonSigner.
:param tabs: The tabs of this InPersonSigner. # noqa: E501
:type: Tabs
"""
self._tabs = tabs
@property
def template_locked(self):
"""Gets the template_locked of this InPersonSigner. # noqa: E501
When set to **true**, the sender cannot change any attributes of the recipient. Used only when working with template recipients. # noqa: E501
:return: The template_locked of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._template_locked
@template_locked.setter
def template_locked(self, template_locked):
"""Sets the template_locked of this InPersonSigner.
When set to **true**, the sender cannot change any attributes of the recipient. Used only when working with template recipients. # noqa: E501
:param template_locked: The template_locked of this InPersonSigner. # noqa: E501
:type: str
"""
self._template_locked = template_locked
@property
def template_required(self):
"""Gets the template_required of this InPersonSigner. # noqa: E501
When set to **true**, the sender may not remove the recipient. Used only when working with template recipients. # noqa: E501
:return: The template_required of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._template_required
@template_required.setter
def template_required(self, template_required):
"""Sets the template_required of this InPersonSigner.
When set to **true**, the sender may not remove the recipient. Used only when working with template recipients. # noqa: E501
:param template_required: The template_required of this InPersonSigner. # noqa: E501
:type: str
"""
self._template_required = template_required
@property
def total_tab_count(self):
"""Gets the total_tab_count of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The total_tab_count of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._total_tab_count
@total_tab_count.setter
def total_tab_count(self, total_tab_count):
"""Sets the total_tab_count of this InPersonSigner.
# noqa: E501
:param total_tab_count: The total_tab_count of this InPersonSigner. # noqa: E501
:type: str
"""
self._total_tab_count = total_tab_count
@property
def user_id(self):
"""Gets the user_id of this InPersonSigner. # noqa: E501
# noqa: E501
:return: The user_id of this InPersonSigner. # noqa: E501
:rtype: str
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""Sets the user_id of this InPersonSigner.
# noqa: E501
:param user_id: The user_id of this InPersonSigner. # noqa: E501
:type: str
"""
self._user_id = user_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(InPersonSigner, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InPersonSigner):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit |
eeshangarg/oh-mainline | vendor/packages/Django/scripts/manage_translations.py | 41 | 6210 | #!/usr/bin/env python
#
# This python file contains utility scripts to manage Django translations.
# It has to be run inside the django git root directory.
#
# The following commands are available:
#
# * update_catalogs: check for new strings in core and contrib catalogs, and
# output how much strings are new/changed.
#
# * lang_stats: output statistics for each catalog/language combination
#
# * fetch: fetch translations from transifex.com
#
# Each command support the --languages and --resources options to limit their
# operation to the specified language or resource. For example, to get stats
# for Spanish in contrib.admin, run:
#
# $ python scripts/manage_translations.py lang_stats --language=es --resources=admin
import os
from optparse import OptionParser
from subprocess import call, Popen, PIPE
from django.core.management import call_command
HAVE_JS = ['admin']
def _get_locale_dirs(include_core=True):
"""
Return a tuple (contrib name, absolute path) for all locale directories,
optionally including the django core catalog.
"""
contrib_dir = os.path.join(os.getcwd(), 'django', 'contrib')
dirs = []
for contrib_name in os.listdir(contrib_dir):
path = os.path.join(contrib_dir, contrib_name, 'locale')
if os.path.isdir(path):
dirs.append((contrib_name, path))
if contrib_name in HAVE_JS:
dirs.append(("%s-js" % contrib_name, path))
if include_core:
dirs.insert(0, ('core', os.path.join(os.getcwd(), 'django', 'conf', 'locale')))
return dirs
def _tx_resource_for_name(name):
""" Return the Transifex resource name """
if name == 'core':
return "django-core.core"
else:
return "django-core.contrib-%s" % name
def _check_diff(cat_name, base_path):
"""
Output the approximate number of changed/added strings in the en catalog.
"""
po_path = '%(path)s/en/LC_MESSAGES/django%(ext)s.po' % {
'path': base_path, 'ext': 'js' if cat_name.endswith('-js') else ''}
p = Popen("git diff -U0 %s | egrep -v '^@@|^[-+]#|^..POT-Creation' | wc -l" % po_path,
stdout=PIPE, stderr=PIPE, shell=True)
output, errors = p.communicate()
num_changes = int(output.strip()) - 4
print("%d changed/added messages in '%s' catalog." % (num_changes, cat_name))
def update_catalogs(resources=None, languages=None):
"""
Update the en/LC_MESSAGES/django.po (main and contrib) files with
new/updated translatable strings.
"""
contrib_dirs = _get_locale_dirs(include_core=False)
os.chdir(os.path.join(os.getcwd(), 'django'))
print("Updating main en catalog")
call_command('makemessages', locale='en')
_check_diff('core', os.path.join(os.getcwd(), 'conf', 'locale'))
# Contrib catalogs
for name, dir_ in contrib_dirs:
if resources and not name in resources:
continue
os.chdir(os.path.join(dir_, '..'))
print("Updating en catalog in %s" % dir_)
if name.endswith('-js'):
call_command('makemessages', locale='en', domain='djangojs')
else:
call_command('makemessages', locale='en')
_check_diff(name, dir_)
def lang_stats(resources=None, languages=None):
"""
Output language statistics of committed translation files for each
Django catalog.
If resources is provided, it should be a list of translation resource to
limit the output (e.g. ['core', 'gis']).
"""
locale_dirs = _get_locale_dirs()
for name, dir_ in locale_dirs:
if resources and not name in resources:
continue
print("\nShowing translations stats for '%s':" % name)
langs = sorted([d for d in os.listdir(dir_) if not d.startswith('_')])
for lang in langs:
if languages and not lang in languages:
continue
# TODO: merge first with the latest en catalog
p = Popen("msgfmt -vc -o /dev/null %(path)s/%(lang)s/LC_MESSAGES/django%(ext)s.po" % {
'path': dir_, 'lang': lang, 'ext': 'js' if name.endswith('-js') else ''},
stdout=PIPE, stderr=PIPE, shell=True)
output, errors = p.communicate()
if p.returncode == 0:
# msgfmt output stats on stderr
print("%s: %s" % (lang, errors.strip()))
def fetch(resources=None, languages=None):
"""
Fetch translations from Transifex, wrap long lines, generate mo files.
"""
locale_dirs = _get_locale_dirs()
for name, dir_ in locale_dirs:
if resources and not name in resources:
continue
# Transifex pull
if languages is None:
call('tx pull -r %(res)s -a -f' % {'res': _tx_resource_for_name(name)}, shell=True)
languages = sorted([d for d in os.listdir(dir_) if not d.startswith('_')])
else:
for lang in languages:
call('tx pull -r %(res)s -f -l %(lang)s' % {
'res': _tx_resource_for_name(name), 'lang': lang}, shell=True)
# msgcat to wrap lines and msgfmt for compilation of .mo file
for lang in languages:
po_path = '%(path)s/%(lang)s/LC_MESSAGES/django%(ext)s.po' % {
'path': dir_, 'lang': lang, 'ext': 'js' if name.endswith('-js') else ''}
call('msgcat -o %s %s' % (po_path, po_path), shell=True)
mo_path = '%s.mo' % po_path[:-3]
call('msgfmt -o %s %s' % (mo_path, po_path), shell=True)
if __name__ == "__main__":
RUNABLE_SCRIPTS = ('update_catalogs', 'lang_stats', 'fetch')
parser = OptionParser(usage="usage: %prog [options] cmd")
parser.add_option("-r", "--resources", action='append',
help="limit operation to the specified resources")
parser.add_option("-l", "--languages", action='append',
help="limit operation to the specified languages")
options, args = parser.parse_args()
if not args:
parser.print_usage()
exit(1)
if args[0] in RUNABLE_SCRIPTS:
eval(args[0])(options.resources, options.languages)
else:
print("Available commands are: %s" % ", ".join(RUNABLE_SCRIPTS))
| agpl-3.0 |
Lekensteyn/buildbot | master/buildbot/test/unit/test_util_subscriptions.py | 10 | 2140 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.trial import unittest
from buildbot.util import subscription
class subscriptions(unittest.TestCase):
def setUp(self):
self.subpt = subscription.SubscriptionPoint('test_sub')
def test_str(self):
self.assertIn('test_sub', str(self.subpt))
def test_subscribe_unsubscribe(self):
state = []
def cb(*args, **kwargs):
state.append((args, kwargs))
# subscribe
sub = self.subpt.subscribe(cb)
self.assertTrue(isinstance(sub, subscription.Subscription))
self.assertEqual(state, [])
# deliver
self.subpt.deliver(1, 2, a=3, b=4)
self.assertEqual(state, [((1, 2), dict(a=3, b=4))])
state.pop()
# unsubscribe
sub.unsubscribe()
# don't receive events anymore
self.subpt.deliver(3, 4)
self.assertEqual(state, [])
def test_exception(self):
def cb(*args, **kwargs):
raise RuntimeError('mah bucket!')
# subscribe
self.subpt.subscribe(cb)
try:
self.subpt.deliver()
except RuntimeError:
self.fail("should not have seen exception here!")
# log.err will cause Trial to complain about this error anyway, unless
# we clean it up
self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
| gpl-2.0 |
ktsitsikas/odemis | src/odemis/gui/test/comp_slider_test.py | 1 | 179452 | #-*- coding: utf-8 -*-
"""
@author: Rinze de Laat
Copyright © 2012 Rinze de Laat, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License version 2 as published by the Free
Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
#===============================================================================
# Test module for Odemis' gui.comp.text module
#===============================================================================
from __future__ import division
from odemis.gui import test
import odemis.gui.comp.slider as slidecomp
import odemis.gui.test as test
import odemis.model as model
import math
import odemis.gui.util.widgets as widgets
import unittest
import wx
test.goto_manual() # Keep the test frame open after the tests are run
# test.goto_inspect()
# logging.getLogger().setLevel(logging.DEBUG)
DATA = [
[],
[0.1, 0.2, 0.3, 0.5, 0.6, 0.7, 0.9, 1.0],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],
[0.0, 0.03182561363494727, 0.06361898390404276, 0.0953479001080363, 0.12698021684778552, 0.15848388659160487, 0.18982699214346238, 0.2209777789791301, 0.2519046874175275, 0.28257638459466383, 0.31296179620778664, 0.3430301379975745, 0.37275094693647975, 0.4020941120916224, 0.4310299051309676, 0.459529010441881, 0.487562554831546, 0.5151021367791545, 0.5421198552102349, 0.5685883377639631, 0.5944807685248221, 0.6197709151905099, 0.6444331556485751, 0.6684425039348527, 0.6917746355474012, 0.7144059120902945, 0.736313405222303, 0.7574749198861987, 0.7778690167951506, 0.7974750341534307, 0.8162731085894214, 0.8342441952797205, 0.8513700872439518, 0.8676334337907361, 0.8830177580961318, 0.8975074738967401, 0.9110879012805561, 0.9237452815595754, 0.9354667912090815, 0.9462405548594964, 0.9560556573276295, 0.9649021546751365, 0.972771084282985, 0.9796544739317193, 0.9855453498783258, 0.990437743921515, 0.9943266994482632, 0.9972082764554879, 0.999079555541768, 0.9999386408650661, 0.9997846620634563, 0.9986177751369107, 0.9964391622892512, 0.9932510307304285, 0.9890566104403391, 0.9838601508964477, 0.9776669167685306, 0.9704831825848994, 0.9623162263755122, 0.9531743222984094, 0.9430667322569474, 0.9320036965163202, 0.919996423328878, 0.9070570775787515, 0.8931987684572888, 0.8784355361817884, 0.8627823377709855, 0.8462550318917048, 0.8288703627920264, 0.8106459433372488, 0.7916002371658313, 0.7717525399833982, 0.75112296001375, 0.7297323976266944, 0.7076025241633322, 0.6847557599802512, 0.6612152517348749, 0.6370048489349738, 0.6121490797761028, 0.5866731262914412, 0.560602798839214, 0.5339645099535378, 0.5067852475851887, 0.47909254775939986, 0.45091446667839175, 0.42227955229689373, 0.393216815399462, 0.3637557002088908, 0.33392605455549723, 0.30375809963749767, 0.27328239940311877, 0.2425298295854577, 0.21153154642146482, 0.1803189550867425, 0.1489236778781338, 0.11737752217634358, 0.08571244822104433, 0.05396053673111876, 0.022153956402837488, 0.009675068681091057, 0.0414942916985869, 0.07327147575829991, 0.10497442655965462, 0.1365710250097407, 0.16802925976401129, 0.1993172596578159, 0.23040332599591795, 0.2612559646672744, 0.29184391805254806, 0.322136196692024, 0.3521021106818439, 0.3817113007667575, 0.41093376909788015, 0.43973990962430304, 0.4681005380877601, 0.4959869215899699, 0.5233708077026882, 0.5502244530909871, 0.5765206516207552, 0.6022327619219493, 0.6273347343796659, 0.651801137525691, 0.6756071838037883, 0.6987287546826255, 0.7211424250908917, 0.742825487149853, 0.7637559731793018, 0.7839126779535895],
[0.0, 0.03182561363494727, 0.06361898390404276, 0.0953479001080363, 0.12698021684778552, 0.15848388659160487, 0.18982699214346238, 0.2209777789791301, 0.2519046874175275, 0.28257638459466383, 0.31296179620778664, 0.3430301379975745, 0.37275094693647975, 0.4020941120916224, 0.4310299051309676, 0.459529010441881, 0.487562554831546, 0.5151021367791545, 0.5421198552102349, 0.5685883377639631, 0.5944807685248221, 0.6197709151905099, 0.6444331556485751, 0.6684425039348527, 0.6917746355474012, 0.7144059120902945, 0.736313405222303, 0.7574749198861987, 0.7778690167951506, 0.7974750341534307, 0.8162731085894214, 0.8342441952797205, 0.8513700872439518, 0.8676334337907361, 0.8830177580961318, 0.8975074738967401, 0.9110879012805561, 0.9237452815595754, 0.9354667912090815, 0.9462405548594964, 0.9560556573276295, 0.9649021546751365, 0.972771084282985, 0.9796544739317193, 0.9855453498783258, 0.990437743921515, 0.9943266994482632, 0.9972082764554879, 0.999079555541768, 0.9999386408650661, 0.9997846620634563, 0.9986177751369107, 0.9964391622892512, 0.9932510307304285, 0.9890566104403391, 0.9838601508964477, 0.9776669167685306, 0.9704831825848994, 0.9623162263755122, 0.9531743222984094, 0.9430667322569474, 0.9320036965163202, 0.919996423328878, 0.9070570775787515, 0.8931987684572888, 0.8784355361817884, 0.8627823377709855, 0.8462550318917048, 0.8288703627920264, 0.8106459433372488, 0.7916002371658313, 0.7717525399833982, 0.75112296001375, 0.7297323976266944, 0.7076025241633322, 0.6847557599802512, 0.6612152517348749, 0.6370048489349738, 0.6121490797761028, 0.5866731262914412, 0.560602798839214, 0.5339645099535378, 0.5067852475851887, 0.47909254775939986, 0.45091446667839175, 0.42227955229689373, 0.393216815399462, 0.3637557002088908, 0.33392605455549723, 0.30375809963749767, 0.27328239940311877, 0.2425298295854577, 0.21153154642146482, 0.1803189550867425, 0.1489236778781338, 0.11737752217634358, 0.08571244822104433, 0.05396053673111876, 0.022153956402837488, 0.009675068681091057, 0.0414942916985869, 0.07327147575829991, 0.10497442655965462, 0.1365710250097407, 0.16802925976401129, 0.1993172596578159, 0.23040332599591795, 0.2612559646672744, 0.29184391805254806, 0.322136196692024, 0.3521021106818439, 0.3817113007667575, 0.41093376909788015, 0.43973990962430304, 0.4681005380877601, 0.4959869215899699, 0.5233708077026882, 0.5502244530909871, 0.5765206516207552, 0.6022327619219493, 0.6273347343796659, 0.651801137525691, 0.6756071838037883, 0.6987287546826255, 0.7211424250908917, 0.742825487149853, 0.7637559731793018, 0.7839126779535895, 0.8032751801852016, 0.8218238632140968, 0.8395399348818634, 0.8564054465705497, 0.8724033113868781, 0.8875173214734277, 0.9017321644292412, 0.915033438823219, 0.9274076687845899, 0.9388423176556677, 0.9493258006930703, 0.9588474968045277, 0.9673977593093892, 0.9749679257119288, 0.981550326477549, 0.9871382928029862, 0.9917261633726514, 0.9953092900942581, 0.9978840428079259, 0.9994478129639919, 0.9999990162658001, 0.9995370942747949, 0.9980625149762887, 0.9955767723053351, 0.9920823846331827, 0.9875828922158475, 0.9820828536073872, 0.9755878410415086, 0.9681044347861931, 0.9596402164770522, 0.9502037614361747, 0.9398046299842422, 0.9284533577547156, 0.9161614450199101, 0.9029413450397645, 0.8888064514451182, 0.8737710846682711, 0.8578504774345758, 0.8410607593297651, 0.8234189404586443, 0.8049428942117046, 0.7856513391571252, 0.7655638200764951, 0.7447006881634859, 0.7230830804055194, 0.7007328981693293, 0.6776727850121139, 0.6539261037407516, 0.6295169127423244, 0.6044699416099384, 0.5788105660885181, 0.5525647823659761, 0.5257591807357896, 0.49842091865767385, 0.4705776932436496, 0.44225771319736584, 0.41348967023512506, 0.3843027100175495, 0.3547264026213435, 0.32479071258107595, 0.29452596853132074, 0.2639628324799168, 0.23313226874348783, 0.2020655125766724, 0.17079403852687225, 0.13934952854655822, 0.10776383989544888, 0.07606897286508914, 0.04429703835850661, 0.012480225357817844, 0.01934923168727843, 0.05115908551707235, 0.08291710873238416, 0.11459112644499225, 0.1461490488748609, 0.1775589038611339, 0.20878886925397827, 0.23980730515443469, 0.2705827859696336, 0.3010841322508921, 0.3312804422824277, 0.3611411233887021, 0.3906359229286669, 0.41973495894550417, 0.44840875044082895, 0.4766282472426579, 0.5043648594369051, 0.5315904863325774, 0.5582775449313184, 0.5843989978724747, 0.609928380825352, 0.6348398293009263, 0.6591081048558398, 0.6827086206621275, 0.7056174664167802, 0.7278114325659043, 0.7492680338189254, 0.7699655319290326, 0.7898829577167641, 0.8090001323144352, 0.8272976876098822, 0.8447570858688016, 0.8613606385158188, 0.8770915240552496, 0.8919338051133966, 0.905872444585125, 0.9188933208683463, 0.9309832421709858, 0.9421299598759351, 0.9523221809504453, 0.9615495793873985, 0.9698028066668546, 0.9770735012272838, 0.9833542969368844, 0.9886388305564042, 0.9929217481859058, 0.9961987106889432, 0.9984663980886549, 0.99972251293132, 0.9999657826139688, 0.9991959606736905, 0.9974138270373308, 0.9946211872313283, 0.9908208705524871, 0.986016727201543, 0.9802136243824232, 0.9734174413711545, 0.9656350635594129],
[0.0, 0.03182561363494727, 0.06361898390404276, 0.0953479001080363, 0.12698021684778552, 0.15848388659160487, 0.18982699214346238, 0.2209777789791301, 0.2519046874175275, 0.28257638459466383, 0.31296179620778664, 0.3430301379975745, 0.37275094693647975, 0.4020941120916224, 0.4310299051309676, 0.459529010441881, 0.487562554831546, 0.5151021367791545, 0.5421198552102349, 0.5685883377639631, 0.5944807685248221, 0.6197709151905099, 0.6444331556485751, 0.6684425039348527, 0.6917746355474012, 0.7144059120902945, 0.736313405222303, 0.7574749198861987, 0.7778690167951506, 0.7974750341534307, 0.8162731085894214, 0.8342441952797205, 0.8513700872439518, 0.8676334337907361, 0.8830177580961318, 0.8975074738967401, 0.9110879012805561, 0.9237452815595754, 0.9354667912090815, 0.9462405548594964, 0.9560556573276295, 0.9649021546751365, 0.972771084282985, 0.9796544739317193, 0.9855453498783258, 0.990437743921515, 0.9943266994482632, 0.9972082764554879, 0.999079555541768, 0.9999386408650661, 0.9997846620634563, 0.9986177751369107, 0.9964391622892512, 0.9932510307304285, 0.9890566104403391, 0.9838601508964477, 0.9776669167685306, 0.9704831825848994, 0.9623162263755122, 0.9531743222984094, 0.9430667322569474, 0.9320036965163202, 0.919996423328878, 0.9070570775787515, 0.8931987684572888, 0.8784355361817884, 0.8627823377709855, 0.8462550318917048, 0.8288703627920264, 0.8106459433372488, 0.7916002371658313, 0.7717525399833982, 0.75112296001375, 0.7297323976266944, 0.7076025241633322, 0.6847557599802512, 0.6612152517348749, 0.6370048489349738, 0.6121490797761028, 0.5866731262914412, 0.560602798839214, 0.5339645099535378, 0.5067852475851887, 0.47909254775939986, 0.45091446667839175, 0.42227955229689373, 0.393216815399462, 0.3637557002088908, 0.33392605455549723, 0.30375809963749767, 0.27328239940311877, 0.2425298295854577, 0.21153154642146482, 0.1803189550867425, 0.1489236778781338, 0.11737752217634358, 0.08571244822104433, 0.05396053673111876, 0.022153956402837488, 0.009675068681091057, 0.0414942916985869, 0.07327147575829991, 0.10497442655965462, 0.1365710250097407, 0.16802925976401129, 0.1993172596578159, 0.23040332599591795, 0.2612559646672744, 0.29184391805254806, 0.322136196692024, 0.3521021106818439, 0.3817113007667575, 0.41093376909788015, 0.43973990962430304, 0.4681005380877601, 0.4959869215899699, 0.5233708077026882, 0.5502244530909871, 0.5765206516207552, 0.6022327619219493, 0.6273347343796659, 0.651801137525691, 0.6756071838037883, 0.6987287546826255, 0.7211424250908917, 0.742825487149853, 0.7637559731793018, 0.7839126779535895, 0.8032751801852016, 0.8218238632140968, 0.8395399348818634, 0.8564054465705497, 0.8724033113868781, 0.8875173214734277, 0.9017321644292412, 0.915033438823219, 0.9274076687845899, 0.9388423176556677, 0.9493258006930703, 0.9588474968045277, 0.9673977593093892, 0.9749679257119288, 0.981550326477549, 0.9871382928029862, 0.9917261633726514, 0.9953092900942581, 0.9978840428079259, 0.9994478129639919, 0.9999990162658001, 0.9995370942747949, 0.9980625149762887, 0.9955767723053351, 0.9920823846331827, 0.9875828922158475, 0.9820828536073872, 0.9755878410415086, 0.9681044347861931, 0.9596402164770522, 0.9502037614361747, 0.9398046299842422, 0.9284533577547156, 0.9161614450199101, 0.9029413450397645, 0.8888064514451182, 0.8737710846682711, 0.8578504774345758, 0.8410607593297651, 0.8234189404586443, 0.8049428942117046, 0.7856513391571252, 0.7655638200764951, 0.7447006881634859, 0.7230830804055194, 0.7007328981693293, 0.6776727850121139, 0.6539261037407516, 0.6295169127423244, 0.6044699416099384, 0.5788105660885181, 0.5525647823659761, 0.5257591807357896, 0.49842091865767385, 0.4705776932436496, 0.44225771319736584, 0.41348967023512506, 0.3843027100175495, 0.3547264026213435, 0.32479071258107595, 0.29452596853132074, 0.2639628324799168, 0.23313226874348783, 0.2020655125766724, 0.17079403852687225, 0.13934952854655822, 0.10776383989544888, 0.07606897286508914, 0.04429703835850661, 0.012480225357817844, 0.01934923168727843, 0.05115908551707235, 0.08291710873238416, 0.11459112644499225, 0.1461490488748609, 0.1775589038611339, 0.20878886925397827, 0.23980730515443469, 0.2705827859696336, 0.3010841322508921, 0.3312804422824277, 0.3611411233887021, 0.3906359229286669, 0.41973495894550417, 0.44840875044082895, 0.4766282472426579, 0.5043648594369051, 0.5315904863325774, 0.5582775449313184, 0.5843989978724747, 0.609928380825352, 0.6348398293009263, 0.6591081048558398, 0.6827086206621275, 0.7056174664167802, 0.7278114325659043, 0.7492680338189254, 0.7699655319290326, 0.7898829577167641, 0.8090001323144352, 0.8272976876098822, 0.8447570858688016, 0.8613606385158188, 0.8770915240552496, 0.8919338051133966, 0.905872444585125, 0.9188933208683463, 0.9309832421709858, 0.9421299598759351, 0.9523221809504453, 0.9615495793873985, 0.9698028066668546, 0.9770735012272838, 0.9833542969368844, 0.9886388305564042, 0.9929217481859058, 0.9961987106889432, 0.9984663980886549, 0.99972251293132, 0.9999657826139688, 0.9991959606736905, 0.9974138270373308, 0.9946211872313283, 0.9908208705524871, 0.986016727201543, 0.9802136243824232, 0.9734174413711545, 0.9656350635594129, 0.9568743754787543, 0.9471442528125871, 0.936454553403984, 0.9248161072684422, 0.9122407056217109, 0.8987410889337976, 0.884330934021267, 0.8690248401908961, 0.8528383144487337, 0.835787755789545, 0.8178904385825557, 0.79916449507034, 0.7796288969985652, 0.7593034363952178, 0.738208705518782, 0.7163660759956753, 0.6937976771680996, 0.6705263736742183, 0.646575742283391, 0.6219700480099303, 0.5967342195295723, 0.5708938239235881, 0.5444750407760963, 0.5175046356508392, 0.49000993297428647, 0.46201878835254223, 0.4335595603500918, 0.4046610817590037, 0.37535263038766403, 0.34566389939866043, 0.3156249672258603, 0.2852662671011519, 0.25461855622174695, 0.2237128845892493, 0.19258056355208322, 0.16125313408314515, 0.12976233482480778, 0.09814006993367762, 0.06641837675764817, 0.034629393378022226, 0.002805326049578726, 0.02902158342843789, 0.06081909037732138, 0.09255497990684253, 0.12419709955299955, 0.15571339185252295, 0.1870719268211412, 0.21824093430267696, 0.24918883615623422, 0.27988427824884154, 0.31029616222114825, 0.3403936769939883, 0.3701463299839013, 0.3995239779959603, 0.4284968577626417, 0.4570356160977725, 0.4851113396350133, 0.512695584120756, 0.5397604032317357, 0.5662783768881914, 0.5922226390338686, 0.6175669048547257, 0.6422854974087748, 0.6663533736400601, 0.6897461497504397, 0.7124401259034531, 0.7344123102352458, 0.755640442148237, 0.7761030148639082, 0.7957792972118911, 0.8146493546332603, 0.8326940693767585, 0.8498951598674921, 0.8662351992284798, 0.8816976329362729, 0.8962667955927832, 0.9099279267963079, 0.9226671860956801, 0.934471667012397, 0.9453294101165076, 0.9552294151430288, 0.9641616521366028, 0.9721170716131069, 0.9790876137279269, 0.9850662164415938, 0.9900468226745243, 0.9940243864436064, 0.9969948779744174, 0.9989552877838952, 0.9999036297293236, 0.999838943020546, 0.9987612931933679, 0.99667177204316, 0.9935724965187321, 0.989466606577596, 0.9843582620047928, 0.9782526381985048, 0.9711559209267238, 0.9630753000602871, 0.9540189622886287, 0.9439960828256319, 0.9330168161139766, 0.9210922855374075, 0.9082345721513418, 0.894456702443232, 0.8797726351350927, 0.8641972470415502, 0.8477463179977504, 0.8304365148723946, 0.8122853746820926, 0.7933112868241559, 0.7735334744458118, 0.7529719749687289, 0.7316476197885782, 0.7095820131701996, 0.6867975103597489, 0.6633171949360165, 0.6391648554238433, 0.6143649611933408, 0.5889426376693314, 0.5629236408761136, 0.5363343313433654, 0.5092016473995928, 0.48155307788020324, 0.4534166342778441, 0.4248208223632178, 0.39579461330514504, 0.366367414319106, 0.33656903887401857, 0.3064296764874311, 0.27597986213972164, 0.2452504453383167, 0.21427255886323762, 0.1830775872256642, 0.15169713487146322, 0.12016299416188636, 0.0885071131639038, 0.05676156328277063, 0.024958506769644805, 0.006869835862835028, 0.03869121848399617, 0.07047340201454169, 0.10218418708879855, 0.13379144667674858, 0.16526315863276547, 0.19656743813809044, 0.22767257000418756, 0.2585470408042244, 0.28915957080016025, 0.3194791456330693, 0.3494750477446014, 0.3791168874977547, 0.4083746339654055, 0.4372186453554372, 0.46561969904161693, 0.4935490211698053, 0.5209783158095106, 0.5478797936212304, 0.5742262000105683, 0.5999908427405786, 0.6251476189743727, 0.6496710417205888, 0.673536265654938, 0.696719112291647, 0.7191960944793238, 0.7409444401964062, 0.7619421156220948, 0.7821678474594006, 0.8016011444876746, 0.8202223183228065, 0.8380125033640432, 0.8549536759072242, 0.8710286724050736, 0.8862212068560356, 0.9005158873040542, 0.9138982314325681, 0.9263546812369255, 0.9378726167603577, 0.9484403688795848, 0.9580472311271144, 0.9666834705382441, 0.9743403375117846, 0.9810100746745096, 0.9866859247403567, 0.9913621373564085, 0.9950339749287288, 0.9976977174221424, 0.9993506661291007, 0.9999911464038138, 0.9996185093588774, 0.9982331325226782, 0.9958364194569096, 0.9924307983345872, 0.9880197194800016, 0.9826076518731053, 0.9762000786218712, 0.9688034914072119, 0.9604253839060872, 0.9510742441994616, 0.9407595461728087, 0.929491739917867, 0.9172822411453783, 0.9041434196195298, 0.8900885866258228, 0.8751319814850556, 0.8592887571270982, 0.8425749647390574, 0.8250075375033982, 0.8066042734424922, 0.7873838173869694, 0.767365642086156, 0.7465700284797164, 0.7250180451505006, 0.7027315269794109, 0.6797330530239043, 0.6560459236425639, 0.6316941368888878, 0.6067023641982314, 0.5810959253925294, 0.5549007630281124, 0.5281434161126299, 0.5008509932176782, 0.4730511450143952, 0.4447720362598403, 0.4160423172625321, 0.3868910948560771, 0.35734790291026525, 0.3274426724095314, 0.29720570112908823, 0.26666762293945534, 0.23585937677047286, 0.2048121752662684, 0.17355747316290007, 0.1421269354207381, 0.11055240514386265, 0.0788658713189711, 0.0470994364065051, 0.015285283816796887, 0.016544354695787987, 0.04835723168768662, 0.08012111669687871, 0.11180382889640425, 0.1433732696976125, 0.17479745527008414, 0.20604454894528915, 0.23708289347115938, 0.2678810430848726, 0.29840779537138673, 0.32863222287543353, 0.35852370443491804, 0.38805195620404803, 0.4171870623346678, 0.4458995052848136, 0.47416019572370105, 0.5019405020028819, 0.5292122791637417],
[0.0, 0.03182561363494727, 0.06361898390404276, 0.0953479001080363, 0.12698021684778552, 0.15848388659160487, 0.18982699214346238, 0.2209777789791301, 0.2519046874175275, 0.28257638459466383, 0.31296179620778664, 0.3430301379975745, 0.37275094693647975, 0.4020941120916224, 0.4310299051309676, 0.459529010441881, 0.487562554831546, 0.5151021367791545, 0.5421198552102349, 0.5685883377639631, 0.5944807685248221, 0.6197709151905099, 0.6444331556485751, 0.6684425039348527, 0.6917746355474012, 0.7144059120902945, 0.736313405222303, 0.7574749198861987, 0.7778690167951506, 0.7974750341534307, 0.8162731085894214, 0.8342441952797205, 0.8513700872439518, 0.8676334337907361, 0.8830177580961318, 0.8975074738967401, 0.9110879012805561, 0.9237452815595754, 0.9354667912090815, 0.9462405548594964, 0.9560556573276295, 0.9649021546751365, 0.972771084282985, 0.9796544739317193, 0.9855453498783258, 0.990437743921515, 0.9943266994482632, 0.9972082764554879, 0.999079555541768, 0.9999386408650661, 0.9997846620634563, 0.9986177751369107, 0.9964391622892512, 0.9932510307304285, 0.9890566104403391, 0.9838601508964477, 0.9776669167685306, 0.9704831825848994, 0.9623162263755122, 0.9531743222984094, 0.9430667322569474, 0.9320036965163202, 0.919996423328878, 0.9070570775787515, 0.8931987684572888, 0.8784355361817884, 0.8627823377709855, 0.8462550318917048, 0.8288703627920264, 0.8106459433372488, 0.7916002371658313, 0.7717525399833982, 0.75112296001375, 0.7297323976266944, 0.7076025241633322, 0.6847557599802512, 0.6612152517348749, 0.6370048489349738, 0.6121490797761028, 0.5866731262914412, 0.560602798839214, 0.5339645099535378, 0.5067852475851887, 0.47909254775939986, 0.45091446667839175, 0.42227955229689373, 0.393216815399462, 0.3637557002088908, 0.33392605455549723, 0.30375809963749767, 0.27328239940311877, 0.2425298295854577, 0.21153154642146482, 0.1803189550867425, 0.1489236778781338, 0.11737752217634358, 0.08571244822104433, 0.05396053673111876, 0.022153956402837488, 0.009675068681091057, 0.0414942916985869, 0.07327147575829991, 0.10497442655965462, 0.1365710250097407, 0.16802925976401129, 0.1993172596578159, 0.23040332599591795, 0.2612559646672744, 0.29184391805254806, 0.322136196692024, 0.3521021106818439, 0.3817113007667575, 0.41093376909788015, 0.43973990962430304, 0.4681005380877601, 0.4959869215899699, 0.5233708077026882, 0.5502244530909871, 0.5765206516207552, 0.6022327619219493, 0.6273347343796659, 0.651801137525691, 0.6756071838037883, 0.6987287546826255, 0.7211424250908917, 0.742825487149853, 0.7637559731793018, 0.7839126779535895, 0.8032751801852016, 0.8218238632140968, 0.8395399348818634, 0.8564054465705497, 0.8724033113868781, 0.8875173214734277, 0.9017321644292412, 0.915033438823219, 0.9274076687845899, 0.9388423176556677, 0.9493258006930703, 0.9588474968045277, 0.9673977593093892, 0.9749679257119288, 0.981550326477549, 0.9871382928029862, 0.9917261633726514, 0.9953092900942581, 0.9978840428079259, 0.9994478129639919, 0.9999990162658001, 0.9995370942747949, 0.9980625149762887, 0.9955767723053351, 0.9920823846331827, 0.9875828922158475, 0.9820828536073872, 0.9755878410415086, 0.9681044347861931, 0.9596402164770522, 0.9502037614361747, 0.9398046299842422, 0.9284533577547156, 0.9161614450199101, 0.9029413450397645, 0.8888064514451182, 0.8737710846682711, 0.8578504774345758, 0.8410607593297651, 0.8234189404586443, 0.8049428942117046, 0.7856513391571252, 0.7655638200764951, 0.7447006881634859, 0.7230830804055194, 0.7007328981693293, 0.6776727850121139, 0.6539261037407516, 0.6295169127423244, 0.6044699416099384, 0.5788105660885181, 0.5525647823659761, 0.5257591807357896, 0.49842091865767385, 0.4705776932436496, 0.44225771319736584, 0.41348967023512506, 0.3843027100175495, 0.3547264026213435, 0.32479071258107595, 0.29452596853132074, 0.2639628324799168, 0.23313226874348783, 0.2020655125766724, 0.17079403852687225, 0.13934952854655822, 0.10776383989544888, 0.07606897286508914, 0.04429703835850661, 0.012480225357817844, 0.01934923168727843, 0.05115908551707235, 0.08291710873238416, 0.11459112644499225, 0.1461490488748609, 0.1775589038611339, 0.20878886925397827, 0.23980730515443469, 0.2705827859696336, 0.3010841322508921, 0.3312804422824277, 0.3611411233887021, 0.3906359229286669, 0.41973495894550417, 0.44840875044082895, 0.4766282472426579, 0.5043648594369051, 0.5315904863325774, 0.5582775449313184, 0.5843989978724747, 0.609928380825352, 0.6348398293009263, 0.6591081048558398, 0.6827086206621275, 0.7056174664167802, 0.7278114325659043, 0.7492680338189254, 0.7699655319290326, 0.7898829577167641, 0.8090001323144352, 0.8272976876098822, 0.8447570858688016, 0.8613606385158188, 0.8770915240552496, 0.8919338051133966, 0.905872444585125, 0.9188933208683463, 0.9309832421709858, 0.9421299598759351, 0.9523221809504453, 0.9615495793873985, 0.9698028066668546, 0.9770735012272838, 0.9833542969368844, 0.9886388305564042, 0.9929217481859058, 0.9961987106889432, 0.9984663980886549, 0.99972251293132, 0.9999657826139688, 0.9991959606736905, 0.9974138270373308, 0.9946211872313283, 0.9908208705524871, 0.986016727201543, 0.9802136243824232, 0.9734174413711545, 0.9656350635594129, 0.9568743754787543, 0.9471442528125871, 0.936454553403984, 0.9248161072684422, 0.9122407056217109, 0.8987410889337976, 0.884330934021267, 0.8690248401908961, 0.8528383144487337, 0.835787755789545, 0.8178904385825557, 0.79916449507034, 0.7796288969985652, 0.7593034363952178, 0.738208705518782, 0.7163660759956753, 0.6937976771680996, 0.6705263736742183, 0.646575742283391, 0.6219700480099303, 0.5967342195295723, 0.5708938239235881, 0.5444750407760963, 0.5175046356508392, 0.49000993297428647, 0.46201878835254223, 0.4335595603500918, 0.4046610817590037, 0.37535263038766403, 0.34566389939866043, 0.3156249672258603, 0.2852662671011519, 0.25461855622174695, 0.2237128845892493, 0.19258056355208322, 0.16125313408314515, 0.12976233482480778, 0.09814006993367762, 0.06641837675764817, 0.034629393378022226, 0.002805326049578726, 0.02902158342843789, 0.06081909037732138, 0.09255497990684253, 0.12419709955299955, 0.15571339185252295, 0.1870719268211412, 0.21824093430267696, 0.24918883615623422, 0.27988427824884154, 0.31029616222114825, 0.3403936769939883, 0.3701463299839013, 0.3995239779959603, 0.4284968577626417, 0.4570356160977725, 0.4851113396350133, 0.512695584120756, 0.5397604032317357, 0.5662783768881914, 0.5922226390338686, 0.6175669048547257, 0.6422854974087748, 0.6663533736400601, 0.6897461497504397, 0.7124401259034531, 0.7344123102352458, 0.755640442148237, 0.7761030148639082, 0.7957792972118911, 0.8146493546332603, 0.8326940693767585, 0.8498951598674921, 0.8662351992284798, 0.8816976329362729, 0.8962667955927832, 0.9099279267963079, 0.9226671860956801, 0.934471667012397, 0.9453294101165076, 0.9552294151430288, 0.9641616521366028, 0.9721170716131069, 0.9790876137279269, 0.9850662164415938, 0.9900468226745243, 0.9940243864436064, 0.9969948779744174, 0.9989552877838952, 0.9999036297293236, 0.999838943020546, 0.9987612931933679, 0.99667177204316, 0.9935724965187321, 0.989466606577596, 0.9843582620047928, 0.9782526381985048, 0.9711559209267238, 0.9630753000602871, 0.9540189622886287, 0.9439960828256319, 0.9330168161139766, 0.9210922855374075, 0.9082345721513418, 0.894456702443232, 0.8797726351350927, 0.8641972470415502, 0.8477463179977504, 0.8304365148723946, 0.8122853746820926, 0.7933112868241559, 0.7735334744458118, 0.7529719749687289, 0.7316476197885782, 0.7095820131701996, 0.6867975103597489, 0.6633171949360165, 0.6391648554238433, 0.6143649611933408, 0.5889426376693314, 0.5629236408761136, 0.5363343313433654, 0.5092016473995928, 0.48155307788020324, 0.4534166342778441, 0.4248208223632178, 0.39579461330514504, 0.366367414319106, 0.33656903887401857, 0.3064296764874311, 0.27597986213972164, 0.2452504453383167, 0.21427255886323762, 0.1830775872256642, 0.15169713487146322, 0.12016299416188636, 0.0885071131639038, 0.05676156328277063, 0.024958506769644805, 0.006869835862835028, 0.03869121848399617, 0.07047340201454169, 0.10218418708879855, 0.13379144667674858, 0.16526315863276547, 0.19656743813809044, 0.22767257000418756, 0.2585470408042244, 0.28915957080016025, 0.3194791456330693, 0.3494750477446014, 0.3791168874977547, 0.4083746339654055, 0.4372186453554372, 0.46561969904161693, 0.4935490211698053, 0.5209783158095106, 0.5478797936212304, 0.5742262000105683, 0.5999908427405786, 0.6251476189743727, 0.6496710417205888, 0.673536265654938, 0.696719112291647, 0.7191960944793238, 0.7409444401964062, 0.7619421156220948, 0.7821678474594006, 0.8016011444876746, 0.8202223183228065, 0.8380125033640432, 0.8549536759072242, 0.8710286724050736, 0.8862212068560356, 0.9005158873040542, 0.9138982314325681, 0.9263546812369255, 0.9378726167603577, 0.9484403688795848, 0.9580472311271144, 0.9666834705382441, 0.9743403375117846, 0.9810100746745096, 0.9866859247403567, 0.9913621373564085, 0.9950339749287288, 0.9976977174221424, 0.9993506661291007, 0.9999911464038138, 0.9996185093588774, 0.9982331325226782, 0.9958364194569096, 0.9924307983345872, 0.9880197194800016, 0.9826076518731053, 0.9762000786218712, 0.9688034914072119, 0.9604253839060872, 0.9510742441994616, 0.9407595461728087, 0.929491739917867, 0.9172822411453783, 0.9041434196195298, 0.8900885866258228, 0.8751319814850556, 0.8592887571270982, 0.8425749647390574, 0.8250075375033982, 0.8066042734424922, 0.7873838173869694, 0.767365642086156, 0.7465700284797164, 0.7250180451505006, 0.7027315269794109, 0.6797330530239043, 0.6560459236425639, 0.6316941368888878, 0.6067023641982314, 0.5810959253925294, 0.5549007630281124, 0.5281434161126299, 0.5008509932176782, 0.4730511450143952, 0.4447720362598403, 0.4160423172625321, 0.3868910948560771, 0.35734790291026525, 0.3274426724095314, 0.29720570112908823, 0.26666762293945534, 0.23585937677047286, 0.2048121752662684, 0.17355747316290007, 0.1421269354207381, 0.11055240514386265, 0.0788658713189711, 0.0470994364065051, 0.015285283816796887, 0.016544354695787987, 0.04835723168768662, 0.08012111669687871, 0.11180382889640425, 0.1433732696976125, 0.17479745527008414, 0.20604454894528915, 0.23708289347115938, 0.2678810430848726, 0.29840779537138673, 0.32863222287543353, 0.35852370443491804, 0.38805195620404803, 0.4171870623346678, 0.4458995052848136, 0.47416019572370105, 0.5019405020028819, 0.5292122791637417, 0.5559478974518723, 0.5821202703095243, 0.6077028818177006, 0.6326698135601236, 0.6569957708818835, 0.6806561085160959, 0.7036268555526913, 0.7258847397239533, 0.7474072109822828, 0.7681724643462361, 0.7881594619917183, 0.8073479545659717, 0.8257185017027101, 0.8432524917176795, 0.8599321604646386, 0.8757406093326764, 0.8906618223666529, 0.9046806824933692, 0.9177829868370845, 0.9299554611088169, 0.9411857730548713, 0.9514625449509786, 0.960775365129358, 0.969114798527059, 0.9764723962448676, 0.9828407041071073, 0.9882132702136649, 0.9925846514765746, 0.9959504191345585, 0.9983071632399182, 0.9996524961132426, 0.9999850547624298, 0.9993045022635696, 0.99761152810229, 0.9949078474752208, 0.9911961995522842, 0.9864803447015673, 0.9807650606795977, 0.9740561377908693, 0.9663603730215338, 0.9576855631531981, 0.9480404968637941, 0.9374349458235453, 0.9258796547950235, 0.9133863307473511, 0.899967630995566, 0.8856371503771543, 0.870409407478774, 0.8542998299270816, 0.8373247387586102, 0.8195013318844868, 0.8008476666667813, 0.7813826416241246, 0.7611259772851082, 0.7400981962089151, 0.7183206021933607, 0.6958152586914604, 0.6726049664583746, 0.6487132404513455, 0.6241642860060966, 0.5989829743137527, 0.5731948172231893, 0.5468259413943175, 0.5199030618284571, 0.49245345480269037, 0.46450493023552736, 0.4360858035119556, 0.4072248667963885, 0.37795135986254463, 0.34829494046988996, 0.31828565431655914, 0.2879539045992787, 0.2573304212110994, 0.22644622960810967, 0.19533261937675822, 0.164021112533527, 0.132543431589158, 0.10093146740975421, 0.06921724690728025, 0.03743290059228397, 0.005610630021606897, 0.026217324825849056, 0.058018718212281065, 0.08976133131001496, 0.12141300484321815, 0.15294167166933229, 0.1843153892671277, 0.21550237209850093, 0.2464710238112635, 0.2771899692502092, 0.3076280862441328, 0.3377545371365094, 0.36753880002792505, 0.39695069969863894, 0.4259604381798651, 0.45453862494290437, 0.4826563066754355, 0.5102849966148999, 0.5373967034091794, 0.5639639594753628, 0.5899598488278962, 0.6153580343478532, 0.6401327844657815, 0.6642589992310247, 0.6877122357411336, 0.7104687329056321, 0.7325054355189815, 0.753800017618434, 0.7743309051030438, 0.7940772975909496, 0.8130191894928044, 0.8311373902799462, 0.8484135439268417, 0.8648301475080523, 0.8803705689309027, 0.8950190637859037, 0.9087607912978131, 0.9215818293612278, 0.9334691886454288, 0.9444108257542105, 0.9543956554273707, 0.9634135617714694, 0.9714554085085146, 0.9785130482321621, 0.9845793306620646, 0.9896481098880145, 0.9937142505965215, 0.9967736332735376, 0.9988231583780435, 0.9998607494822743, 0.999885355375405, 0.9988969511285583, 0.9968965381200613, 0.9938861430209224, 0.9898688157415588, 0.9848486263418499, 0.9788306609076554, 0.9718210163979646, 0.9638267944679116, 0.9548560942738967, 0.9449180042681203, 0.9340225929908365, 0.9221808988696422, 0.909404919036166, 0.8957075971714498, 0.8811028103923679, 0.8656053551923616, 0.8492309324507104, 0.8319961315255721, 0.8139184134468558, 0.7950160932259984, 0.7753083213005517, 0.7548150641323557, 0.7335570839790085, 0.7115559178590624, 0.6888338557323094, 0.6654139179172461, 0.6413198317685654, 0.6165760076383702, 0.5912075141453859, 0.5652400527772915, 0.5386999318518753, 0.5116140398633663, 0.48400981824101763, 0.45591523354745206, 0.4273587491450109, 0.3983692963587831, 0.36897624516549704, 0.33920937443805066, 0.3090988417757297, 0.27867515295076056, 0.24796913100212095, 0.21701188500788499, 0.1858347785678256, 0.15446939802810308, 0.12294752048031762, 0.0913010815673131, 0.05956214312831302, 0.027762860716256565, 0.004064548979862151, 0.03588784077455812, 0.06767477365428987, 0.09939314344170269, 0.13101081542249907, 0.16249575690199078, 0.19381606965823578, 0.22494002225899012, 0.2558360822096455, 0.28647294789961825, 0.3168195803148581, 0.34684523448426163, 0.37651949062823514, 0.40581228497776217, 0.4346939402327889, 0.4631351956290998, 0.49110723658314126, 0.5185817238848561, 0.5455308224088724, 0.5719272293149923, 0.5977442017094377, 0.6229555837387558, 0.6475358330890221, 0.6714600468634208, 0.6947039868120171, 0.7172441038881836, 0.7390575621067379, 0.7601222616796974, 0.7804168614061473, 0.799920800293566, 0.8186143183887232, 0.83647847679699, 0.8534951768698457, 0.8696471785410888, 0.8849181177931948, 0.8992925232361463, 0.9127558317818903, 0.9252944033985979, 0.9368955349297329, 0.9475474729639511, 0.9572394257427989, 0.9659615740941173, 0.9737050813801125, 0.9804621024499809, 0.9862257915880368, 0.9909903094492913, 0.9947508289754416, 0.9975035402852946, 0.9992456545346544, 0.999975406741776, 0.9996920575755135, 0.9983958941043564, 0.9960882295055925, 0.9927714017348945, 0.9884487711576745, 0.983124717144609, 0.9768046336347866, 0.9694949236709611, 0.9612029929124668, 0.9519372421323449, 0.941707058706301, 0.9305228071021124, 0.9183958183791047, 0.9053383787083679, 0.891363716925304, 0.8764859911271503, 0.8607202743290435, 0.8440825391931405, 0.8265896418463081, 0.8082593048027256, 0.7891100990087442, 0.7691614250281803, 0.7484334933870793, 0.7269473040979173, 0.7047246253839213, 0.6817879716251161, 0.6581605805484221, 0.6338663896848852, 0.6089300121179558, 0.5833767115473083, 0.5572323766935303, 0.5305234950695884, 0.5032771261456119, 0.4755208739342563, 0.4472828590243307, 0.41859169009109926, 0.38947643491209, 0.3599665909177427, 0.3300920553068124, 0.29988309475670877, 0.26937031475953893, 0.23858462861488938, 0.2075572261107254, 0.17631954192422467, 0.14490322377445133, 0.11334010035924803, 0.08166214910872069, 0.04990146378807391, 0.018090221982584424, 0.01373934750238135, 0.045554997293188876, 0.07732449411862237, 0.1090156514662486, 0.14059636219140817, 0.1720346310458326, 0.20329860709284336, 0.2343566159763977, 0.26517719201120077, 0.2957291100614085, 0.3259814171756577, 0.35590346394628725, 0.38546493556108274, 0.4146358825159994, 0.4433867509577822, 0.47168841262577427, 0.49951219436249816, 0.5268299071632073, 0.5536138747348984, 0.5798369615358825, 0.6054726002675366, 0.6304948187903087, 0.6548782664367957, 0.678598239695162, 0.7016307072369088, 0.7239523342636636, 0.7455405061482571, 0.7663733513462142, 0.7864297635543827, 0.8056894230942778, 0.8241328174984979, 0.841741261279301, 0.8584969148593785, 0.874382802645593, 0.8893828302273934, 0.9034818006824971, 0.9166654299732787, 0.9289203614183161, 0.9402341792243841, 0.9505954210652305, 0.9599935896943562, 0.9684191635800478, 0.9758636065519, 0.9823193764490258, 0.9877799327612248, 0.9922397432553423, 0.9956942895801197, 0.9981400718438582, 0.9995746121602475, 0.9999964571587798, 0.9994051794571972, 0.9978013780944835, 0.9951866779239622, 0.9915637279671154, 0.9869361987297899, 0.9813087784835103, 0.9746871685156714, 0.9670780773534084, 0.9584892139670188, 0.9489292799597948, 0.9384079607522036, 0.9269359157693364, 0.9145247676415569, 0.9011870904293183, 0.8869363968840434, 0.8717871247580041, 0.8557546221770611, 0.8388551320910612, 0.821105775817693, 0.8025245356964159, 0.7831302368700856, 0.7629425282127129, 0.7419818624226582, 0.7202694753014826, 0.697827364239383, 0.6746782659290651, 0.6508456333306094, 0.6263536119106431, 0.6012270151799544, 0.5754912995542529, 0.5491725385636145, 0.5222973964367102, 0.49489310108655626, 0.4669874165252219, 0.4386086147353554, 0.40978544702711983, 0.38054711490946275, 0.35092324050530926, 0.3209438365406205, 0.29063927593768873, 0.2600402610435581, 0.22917779252464607, 0.19808313795916332, 0.16678780015911882, 0.13532348525396828, 0.10372207056832905, 0.07201557232619978, 0.04023611321449273, 0.008415889838705929, 0.023412859896329224, 0.0552178894474884, 0.0869669763031998, 0.11862795462890709, 0.15016874785511136, 0.18155740117501173, 0.2127621139187321, 0.2437512717714402, 0.2744934788026307, 0.3049575892741573, 0.33511273919482315, 0.3649283775894742, 0.39437429745101926, 0.4234206663439324, 0.45203805662826835, 0.4801974752736007, 0.5078703932325976, 0.5350287743445721, 0.5616451037396432, 0.5876924157147658, 0.6131443210534142, 0.637975033761167, 0.6621593971901971, 0.685672909526123, 0.7084917486114346, 0.7305927960803645, 0.7519536607806925, 0.7725527014588297, 0.792369048685123, 0.8113826259972386, 0.829574170240147, 0.8469252510821257, 0.8634182896870269, 0.8790365765238435, 0.8937642882955863, 0.9075865039702774, 0.9204892198978353, 0.9324593639975509, 0.9434848090017457, 0.9535543847422342, 0.962657889467111, 0.9707861001764111, 0.9779307819661806, 0.9840846963714664, 0.9892416086998028, 0.9933962943477376, 0.9965445440940156, 0.9986831683640538, 0.9998100004613804, 0.9999238987627741, 0.9990247478748713, 0.9971134587510749, 0.9941919677686426, 0.9902632347668937, 0.9853312400485164, 0.9794009803470184, 0.9724784637644082, 0.9645707036842233, 0.9556857116660933, 0.9458324893290126, 0.9350210192315657, 0.9232622547583383, 0.9105681090227473, 0.8969514427975623, 0.8824260514853097, 0.8670066511417909, 0.8507088635668647, 0.8335492004775801, 0.8155450467797347, 0.7967146429547576, 0.7770770665798027, 0.756652212999761, 0.7354607751707486, 0.7135242226955459, 0.690864780072161, 0.6675054041776253, 0.6434697610097602, 0.6187822017105404, 0.5934677378953204, 0.5675520163128889, 0.5410612928620975, 0.5140224059912979, 0.4864627495076138, 0.4584102448235634, 0.429893312669121, 0.4009408442979534, 0.37158217221690915, 0.34184704046849373, 0.31176557449640874, 0.2813682506246493, 0.2506858651811659, 0.21974950329727133, 0.18859050741448694, 0.1572404455307017, 0.12573107921777932, 0.09409433144310266, 0.062362254227551796, 0.030566996172769433, 0.0012592301094218514, 0.03308418063248835, 0.06487561270238727, 0.09660131758351254, 0.12822915313020097, 0.1597270763507744, 0.19106317587116167, 0.2222057042651226, 0.25312311021842443, 0.2837840704942974, 0.314157521667819, 0.3442126915971106, 0.37391913059937587, 0.40324674230029867, 0.43216581412546096, 0.46064704740292345, 0.48866158704650275, 0.5161810507895926, 0.5431775579400093, 0.569623757626626, 0.5954928565092957, 0.62075864592385, 0.6453955284348086, 0.6693785437688045, 0.6926833941025126, 0.7152864686793825, 0.7371648677303332, 0.7582964256741244, 0.778659733573861, 0.7982341608269653, 0.8169998760665946, 0.8349378672532948, 0.8520299609366144, 0.8682588406670975, 0.8836080645400478, 0.8980620818532331, 0.9116062488617221],
[0.0, 0.03182561363494727, 0.06361898390404276, 0.0953479001080363, 0.12698021684778552, 0.15848388659160487, 0.18982699214346238, 0.2209777789791301, 0.2519046874175275, 0.28257638459466383, 0.31296179620778664, 0.3430301379975745, 0.37275094693647975, 0.4020941120916224, 0.4310299051309676, 0.459529010441881, 0.487562554831546, 0.5151021367791545, 0.5421198552102349, 0.5685883377639631, 0.5944807685248221, 0.6197709151905099, 0.6444331556485751, 0.6684425039348527, 0.6917746355474012, 0.7144059120902945, 0.736313405222303, 0.7574749198861987, 0.7778690167951506, 0.7974750341534307, 0.8162731085894214, 0.8342441952797205, 0.8513700872439518, 0.8676334337907361, 0.8830177580961318, 0.8975074738967401, 0.9110879012805561, 0.9237452815595754, 0.9354667912090815, 0.9462405548594964, 0.9560556573276295, 0.9649021546751365, 0.972771084282985, 0.9796544739317193, 0.9855453498783258, 0.990437743921515, 0.9943266994482632, 0.9972082764554879, 0.999079555541768, 0.9999386408650661, 0.9997846620634563, 0.9986177751369107, 0.9964391622892512, 0.9932510307304285, 0.9890566104403391, 0.9838601508964477, 0.9776669167685306, 0.9704831825848994, 0.9623162263755122, 0.9531743222984094, 0.9430667322569474, 0.9320036965163202, 0.919996423328878, 0.9070570775787515, 0.8931987684572888, 0.8784355361817884, 0.8627823377709855, 0.8462550318917048, 0.8288703627920264, 0.8106459433372488, 0.7916002371658313, 0.7717525399833982, 0.75112296001375, 0.7297323976266944, 0.7076025241633322, 0.6847557599802512, 0.6612152517348749, 0.6370048489349738, 0.6121490797761028, 0.5866731262914412, 0.560602798839214, 0.5339645099535378, 0.5067852475851887, 0.47909254775939986, 0.45091446667839175, 0.42227955229689373, 0.393216815399462, 0.3637557002088908, 0.33392605455549723, 0.30375809963749767, 0.27328239940311877, 0.2425298295854577, 0.21153154642146482, 0.1803189550867425, 0.1489236778781338, 0.11737752217634358, 0.08571244822104433, 0.05396053673111876, 0.022153956402837488, 0.009675068681091057, 0.0414942916985869, 0.07327147575829991, 0.10497442655965462, 0.1365710250097407, 0.16802925976401129, 0.1993172596578159, 0.23040332599591795, 0.2612559646672744, 0.29184391805254806, 0.322136196692024, 0.3521021106818439, 0.3817113007667575, 0.41093376909788015, 0.43973990962430304, 0.4681005380877601, 0.4959869215899699, 0.5233708077026882, 0.5502244530909871, 0.5765206516207552, 0.6022327619219493, 0.6273347343796659, 0.651801137525691, 0.6756071838037883, 0.6987287546826255, 0.7211424250908917, 0.742825487149853, 0.7637559731793018, 0.7839126779535895, 0.8032751801852016, 0.8218238632140968, 0.8395399348818634, 0.8564054465705497, 0.8724033113868781, 0.8875173214734277, 0.9017321644292412, 0.915033438823219, 0.9274076687845899, 0.9388423176556677, 0.9493258006930703, 0.9588474968045277, 0.9673977593093892, 0.9749679257119288, 0.981550326477549, 0.9871382928029862, 0.9917261633726514, 0.9953092900942581, 0.9978840428079259, 0.9994478129639919, 0.9999990162658001, 0.9995370942747949, 0.9980625149762887, 0.9955767723053351, 0.9920823846331827, 0.9875828922158475, 0.9820828536073872, 0.9755878410415086, 0.9681044347861931, 0.9596402164770522, 0.9502037614361747, 0.9398046299842422, 0.9284533577547156, 0.9161614450199101, 0.9029413450397645, 0.8888064514451182, 0.8737710846682711, 0.8578504774345758, 0.8410607593297651, 0.8234189404586443, 0.8049428942117046, 0.7856513391571252, 0.7655638200764951, 0.7447006881634859, 0.7230830804055194, 0.7007328981693293, 0.6776727850121139, 0.6539261037407516, 0.6295169127423244, 0.6044699416099384, 0.5788105660885181, 0.5525647823659761, 0.5257591807357896, 0.49842091865767385, 0.4705776932436496, 0.44225771319736584, 0.41348967023512506, 0.3843027100175495, 0.3547264026213435, 0.32479071258107595, 0.29452596853132074, 0.2639628324799168, 0.23313226874348783, 0.2020655125766724, 0.17079403852687225, 0.13934952854655822, 0.10776383989544888, 0.07606897286508914, 0.04429703835850661, 0.012480225357817844, 0.01934923168727843, 0.05115908551707235, 0.08291710873238416, 0.11459112644499225, 0.1461490488748609, 0.1775589038611339, 0.20878886925397827, 0.23980730515443469, 0.2705827859696336, 0.3010841322508921, 0.3312804422824277, 0.3611411233887021, 0.3906359229286669, 0.41973495894550417, 0.44840875044082895, 0.4766282472426579, 0.5043648594369051, 0.5315904863325774, 0.5582775449313184, 0.5843989978724747, 0.609928380825352, 0.6348398293009263, 0.6591081048558398, 0.6827086206621275, 0.7056174664167802, 0.7278114325659043, 0.7492680338189254, 0.7699655319290326, 0.7898829577167641, 0.8090001323144352, 0.8272976876098822, 0.8447570858688016, 0.8613606385158188, 0.8770915240552496, 0.8919338051133966, 0.905872444585125, 0.9188933208683463, 0.9309832421709858, 0.9421299598759351, 0.9523221809504453, 0.9615495793873985, 0.9698028066668546, 0.9770735012272838, 0.9833542969368844, 0.9886388305564042, 0.9929217481859058, 0.9961987106889432, 0.9984663980886549, 0.99972251293132, 0.9999657826139688, 0.9991959606736905, 0.9974138270373308, 0.9946211872313283, 0.9908208705524871, 0.986016727201543, 0.9802136243824232, 0.9734174413711545, 0.9656350635594129, 0.9568743754787543, 0.9471442528125871, 0.936454553403984, 0.9248161072684422, 0.9122407056217109, 0.8987410889337976, 0.884330934021267, 0.8690248401908961, 0.8528383144487337, 0.835787755789545, 0.8178904385825557, 0.79916449507034, 0.7796288969985652, 0.7593034363952178, 0.738208705518782, 0.7163660759956753, 0.6937976771680996, 0.6705263736742183, 0.646575742283391, 0.6219700480099303, 0.5967342195295723, 0.5708938239235881, 0.5444750407760963, 0.5175046356508392, 0.49000993297428647, 0.46201878835254223, 0.4335595603500918, 0.4046610817590037, 0.37535263038766403, 0.34566389939866043, 0.3156249672258603, 0.2852662671011519, 0.25461855622174695, 0.2237128845892493, 0.19258056355208322, 0.16125313408314515, 0.12976233482480778, 0.09814006993367762, 0.06641837675764817, 0.034629393378022226, 0.002805326049578726, 0.02902158342843789, 0.06081909037732138, 0.09255497990684253, 0.12419709955299955, 0.15571339185252295, 0.1870719268211412, 0.21824093430267696, 0.24918883615623422, 0.27988427824884154, 0.31029616222114825, 0.3403936769939883, 0.3701463299839013, 0.3995239779959603, 0.4284968577626417, 0.4570356160977725, 0.4851113396350133, 0.512695584120756, 0.5397604032317357, 0.5662783768881914, 0.5922226390338686, 0.6175669048547257, 0.6422854974087748, 0.6663533736400601, 0.6897461497504397, 0.7124401259034531, 0.7344123102352458, 0.755640442148237, 0.7761030148639082, 0.7957792972118911, 0.8146493546332603, 0.8326940693767585, 0.8498951598674921, 0.8662351992284798, 0.8816976329362729, 0.8962667955927832, 0.9099279267963079, 0.9226671860956801, 0.934471667012397, 0.9453294101165076, 0.9552294151430288, 0.9641616521366028, 0.9721170716131069, 0.9790876137279269, 0.9850662164415938, 0.9900468226745243, 0.9940243864436064, 0.9969948779744174, 0.9989552877838952, 0.9999036297293236, 0.999838943020546, 0.9987612931933679, 0.99667177204316, 0.9935724965187321, 0.989466606577596, 0.9843582620047928, 0.9782526381985048, 0.9711559209267238, 0.9630753000602871, 0.9540189622886287, 0.9439960828256319, 0.9330168161139766, 0.9210922855374075, 0.9082345721513418, 0.894456702443232, 0.8797726351350927, 0.8641972470415502, 0.8477463179977504, 0.8304365148723946, 0.8122853746820926, 0.7933112868241559, 0.7735334744458118, 0.7529719749687289, 0.7316476197885782, 0.7095820131701996, 0.6867975103597489, 0.6633171949360165, 0.6391648554238433, 0.6143649611933408, 0.5889426376693314, 0.5629236408761136, 0.5363343313433654, 0.5092016473995928, 0.48155307788020324, 0.4534166342778441, 0.4248208223632178, 0.39579461330514504, 0.366367414319106, 0.33656903887401857, 0.3064296764874311, 0.27597986213972164, 0.2452504453383167, 0.21427255886323762, 0.1830775872256642, 0.15169713487146322, 0.12016299416188636, 0.0885071131639038, 0.05676156328277063, 0.024958506769644805, 0.006869835862835028, 0.03869121848399617, 0.07047340201454169, 0.10218418708879855, 0.13379144667674858, 0.16526315863276547, 0.19656743813809044, 0.22767257000418756, 0.2585470408042244, 0.28915957080016025, 0.3194791456330693, 0.3494750477446014, 0.3791168874977547, 0.4083746339654055, 0.4372186453554372, 0.46561969904161693, 0.4935490211698053, 0.5209783158095106, 0.5478797936212304, 0.5742262000105683, 0.5999908427405786, 0.6251476189743727, 0.6496710417205888, 0.673536265654938, 0.696719112291647, 0.7191960944793238, 0.7409444401964062, 0.7619421156220948, 0.7821678474594006, 0.8016011444876746, 0.8202223183228065, 0.8380125033640432, 0.8549536759072242, 0.8710286724050736, 0.8862212068560356, 0.9005158873040542, 0.9138982314325681, 0.9263546812369255, 0.9378726167603577, 0.9484403688795848, 0.9580472311271144, 0.9666834705382441, 0.9743403375117846, 0.9810100746745096, 0.9866859247403567, 0.9913621373564085, 0.9950339749287288, 0.9976977174221424, 0.9993506661291007, 0.9999911464038138, 0.9996185093588774, 0.9982331325226782, 0.9958364194569096, 0.9924307983345872, 0.9880197194800016, 0.9826076518731053, 0.9762000786218712, 0.9688034914072119, 0.9604253839060872, 0.9510742441994616, 0.9407595461728087, 0.929491739917867, 0.9172822411453783, 0.9041434196195298, 0.8900885866258228, 0.8751319814850556, 0.8592887571270982, 0.8425749647390574, 0.8250075375033982, 0.8066042734424922, 0.7873838173869694, 0.767365642086156, 0.7465700284797164, 0.7250180451505006, 0.7027315269794109, 0.6797330530239043, 0.6560459236425639, 0.6316941368888878, 0.6067023641982314, 0.5810959253925294, 0.5549007630281124, 0.5281434161126299, 0.5008509932176782, 0.4730511450143952, 0.4447720362598403, 0.4160423172625321, 0.3868910948560771, 0.35734790291026525, 0.3274426724095314, 0.29720570112908823, 0.26666762293945534, 0.23585937677047286, 0.2048121752662684, 0.17355747316290007, 0.1421269354207381, 0.11055240514386265, 0.0788658713189711, 0.0470994364065051, 0.015285283816796887, 0.016544354695787987, 0.04835723168768662, 0.08012111669687871, 0.11180382889640425, 0.1433732696976125, 0.17479745527008414, 0.20604454894528915, 0.23708289347115938, 0.2678810430848726, 0.29840779537138673, 0.32863222287543353, 0.35852370443491804, 0.38805195620404803, 0.4171870623346678, 0.4458995052848136, 0.47416019572370105, 0.5019405020028819, 0.5292122791637417, 0.5559478974518723, 0.5821202703095243, 0.6077028818177006, 0.6326698135601236, 0.6569957708818835, 0.6806561085160959, 0.7036268555526913, 0.7258847397239533, 0.7474072109822828, 0.7681724643462361, 0.7881594619917183, 0.8073479545659717, 0.8257185017027101, 0.8432524917176795, 0.8599321604646386, 0.8757406093326764, 0.8906618223666529, 0.9046806824933692, 0.9177829868370845, 0.9299554611088169, 0.9411857730548713, 0.9514625449509786, 0.960775365129358, 0.969114798527059, 0.9764723962448676, 0.9828407041071073, 0.9882132702136649, 0.9925846514765746, 0.9959504191345585, 0.9983071632399182, 0.9996524961132426, 0.9999850547624298, 0.9993045022635696, 0.99761152810229, 0.9949078474752208, 0.9911961995522842, 0.9864803447015673, 0.9807650606795977, 0.9740561377908693, 0.9663603730215338, 0.9576855631531981, 0.9480404968637941, 0.9374349458235453, 0.9258796547950235, 0.9133863307473511, 0.899967630995566, 0.8856371503771543, 0.870409407478774, 0.8542998299270816, 0.8373247387586102, 0.8195013318844868, 0.8008476666667813, 0.7813826416241246, 0.7611259772851082, 0.7400981962089151, 0.7183206021933607, 0.6958152586914604, 0.6726049664583746, 0.6487132404513455, 0.6241642860060966, 0.5989829743137527, 0.5731948172231893, 0.5468259413943175, 0.5199030618284571, 0.49245345480269037, 0.46450493023552736, 0.4360858035119556, 0.4072248667963885, 0.37795135986254463, 0.34829494046988996, 0.31828565431655914, 0.2879539045992787, 0.2573304212110994, 0.22644622960810967, 0.19533261937675822, 0.164021112533527, 0.132543431589158, 0.10093146740975421, 0.06921724690728025, 0.03743290059228397, 0.005610630021606897, 0.026217324825849056, 0.058018718212281065, 0.08976133131001496, 0.12141300484321815, 0.15294167166933229, 0.1843153892671277, 0.21550237209850093, 0.2464710238112635, 0.2771899692502092, 0.3076280862441328, 0.3377545371365094, 0.36753880002792505, 0.39695069969863894, 0.4259604381798651, 0.45453862494290437, 0.4826563066754355, 0.5102849966148999, 0.5373967034091794, 0.5639639594753628, 0.5899598488278962, 0.6153580343478532, 0.6401327844657815, 0.6642589992310247, 0.6877122357411336, 0.7104687329056321, 0.7325054355189815, 0.753800017618434, 0.7743309051030438, 0.7940772975909496, 0.8130191894928044, 0.8311373902799462, 0.8484135439268417, 0.8648301475080523, 0.8803705689309027, 0.8950190637859037, 0.9087607912978131, 0.9215818293612278, 0.9334691886454288, 0.9444108257542105, 0.9543956554273707, 0.9634135617714694, 0.9714554085085146, 0.9785130482321621, 0.9845793306620646, 0.9896481098880145, 0.9937142505965215, 0.9967736332735376, 0.9988231583780435, 0.9998607494822743, 0.999885355375405, 0.9988969511285583, 0.9968965381200613, 0.9938861430209224, 0.9898688157415588, 0.9848486263418499, 0.9788306609076554, 0.9718210163979646, 0.9638267944679116, 0.9548560942738967, 0.9449180042681203, 0.9340225929908365, 0.9221808988696422, 0.909404919036166, 0.8957075971714498, 0.8811028103923679, 0.8656053551923616, 0.8492309324507104, 0.8319961315255721, 0.8139184134468558, 0.7950160932259984, 0.7753083213005517, 0.7548150641323557, 0.7335570839790085, 0.7115559178590624, 0.6888338557323094, 0.6654139179172461, 0.6413198317685654, 0.6165760076383702, 0.5912075141453859, 0.5652400527772915, 0.5386999318518753, 0.5116140398633663, 0.48400981824101763, 0.45591523354745206, 0.4273587491450109, 0.3983692963587831, 0.36897624516549704, 0.33920937443805066, 0.3090988417757297, 0.27867515295076056, 0.24796913100212095, 0.21701188500788499, 0.1858347785678256, 0.15446939802810308, 0.12294752048031762, 0.0913010815673131, 0.05956214312831302, 0.027762860716256565, 0.004064548979862151, 0.03588784077455812, 0.06767477365428987, 0.09939314344170269, 0.13101081542249907, 0.16249575690199078, 0.19381606965823578, 0.22494002225899012, 0.2558360822096455, 0.28647294789961825, 0.3168195803148581, 0.34684523448426163, 0.37651949062823514, 0.40581228497776217, 0.4346939402327889, 0.4631351956290998, 0.49110723658314126, 0.5185817238848561, 0.5455308224088724, 0.5719272293149923, 0.5977442017094377, 0.6229555837387558, 0.6475358330890221, 0.6714600468634208, 0.6947039868120171, 0.7172441038881836, 0.7390575621067379, 0.7601222616796974, 0.7804168614061473, 0.799920800293566, 0.8186143183887232, 0.83647847679699, 0.8534951768698457, 0.8696471785410888, 0.8849181177931948, 0.8992925232361463, 0.9127558317818903, 0.9252944033985979, 0.9368955349297329, 0.9475474729639511, 0.9572394257427989, 0.9659615740941173, 0.9737050813801125, 0.9804621024499809, 0.9862257915880368, 0.9909903094492913, 0.9947508289754416, 0.9975035402852946, 0.9992456545346544, 0.999975406741776, 0.9996920575755135, 0.9983958941043564, 0.9960882295055925, 0.9927714017348945, 0.9884487711576745, 0.983124717144609, 0.9768046336347866, 0.9694949236709611, 0.9612029929124668, 0.9519372421323449, 0.941707058706301, 0.9305228071021124, 0.9183958183791047, 0.9053383787083679, 0.891363716925304, 0.8764859911271503, 0.8607202743290435, 0.8440825391931405, 0.8265896418463081, 0.8082593048027256, 0.7891100990087442, 0.7691614250281803, 0.7484334933870793, 0.7269473040979173, 0.7047246253839213, 0.6817879716251161, 0.6581605805484221, 0.6338663896848852, 0.6089300121179558, 0.5833767115473083, 0.5572323766935303, 0.5305234950695884, 0.5032771261456119, 0.4755208739342563, 0.4472828590243307, 0.41859169009109926, 0.38947643491209, 0.3599665909177427, 0.3300920553068124, 0.29988309475670877, 0.26937031475953893, 0.23858462861488938, 0.2075572261107254, 0.17631954192422467, 0.14490322377445133, 0.11334010035924803, 0.08166214910872069, 0.04990146378807391, 0.018090221982584424, 0.01373934750238135, 0.045554997293188876, 0.07732449411862237, 0.1090156514662486, 0.14059636219140817, 0.1720346310458326, 0.20329860709284336, 0.2343566159763977, 0.26517719201120077, 0.2957291100614085, 0.3259814171756577, 0.35590346394628725, 0.38546493556108274, 0.4146358825159994, 0.4433867509577822, 0.47168841262577427, 0.49951219436249816, 0.5268299071632073, 0.5536138747348984, 0.5798369615358825, 0.6054726002675366, 0.6304948187903087, 0.6548782664367957, 0.678598239695162, 0.7016307072369088, 0.7239523342636636, 0.7455405061482571, 0.7663733513462142, 0.7864297635543827, 0.8056894230942778, 0.8241328174984979, 0.841741261279301, 0.8584969148593785, 0.874382802645593, 0.8893828302273934, 0.9034818006824971, 0.9166654299732787, 0.9289203614183161, 0.9402341792243841, 0.9505954210652305, 0.9599935896943562, 0.9684191635800478, 0.9758636065519, 0.9823193764490258, 0.9877799327612248, 0.9922397432553423, 0.9956942895801197, 0.9981400718438582, 0.9995746121602475, 0.9999964571587798, 0.9994051794571972, 0.9978013780944835, 0.9951866779239622, 0.9915637279671154, 0.9869361987297899, 0.9813087784835103, 0.9746871685156714, 0.9670780773534084, 0.9584892139670188, 0.9489292799597948, 0.9384079607522036, 0.9269359157693364, 0.9145247676415569, 0.9011870904293183, 0.8869363968840434, 0.8717871247580041, 0.8557546221770611, 0.8388551320910612, 0.821105775817693, 0.8025245356964159, 0.7831302368700856, 0.7629425282127129, 0.7419818624226582, 0.7202694753014826, 0.697827364239383, 0.6746782659290651, 0.6508456333306094, 0.6263536119106431, 0.6012270151799544, 0.5754912995542529, 0.5491725385636145, 0.5222973964367102, 0.49489310108655626, 0.4669874165252219, 0.4386086147353554, 0.40978544702711983, 0.38054711490946275, 0.35092324050530926, 0.3209438365406205, 0.29063927593768873, 0.2600402610435581, 0.22917779252464607, 0.19808313795916332, 0.16678780015911882, 0.13532348525396828, 0.10372207056832905, 0.07201557232619978, 0.04023611321449273, 0.008415889838705929, 0.023412859896329224, 0.0552178894474884, 0.0869669763031998, 0.11862795462890709, 0.15016874785511136, 0.18155740117501173, 0.2127621139187321, 0.2437512717714402, 0.2744934788026307, 0.3049575892741573, 0.33511273919482315, 0.3649283775894742, 0.39437429745101926, 0.4234206663439324, 0.45203805662826835, 0.4801974752736007, 0.5078703932325976, 0.5350287743445721, 0.5616451037396432, 0.5876924157147658, 0.6131443210534142, 0.637975033761167, 0.6621593971901971, 0.685672909526123, 0.7084917486114346, 0.7305927960803645, 0.7519536607806925, 0.7725527014588297, 0.792369048685123, 0.8113826259972386, 0.829574170240147, 0.8469252510821257, 0.8634182896870269, 0.8790365765238435, 0.8937642882955863, 0.9075865039702774, 0.9204892198978353, 0.9324593639975509, 0.9434848090017457, 0.9535543847422342, 0.962657889467111, 0.9707861001764111, 0.9779307819661806, 0.9840846963714664, 0.9892416086998028, 0.9933962943477376, 0.9965445440940156, 0.9986831683640538, 0.9998100004613804, 0.9999238987627741, 0.9990247478748713, 0.9971134587510749, 0.9941919677686426, 0.9902632347668937, 0.9853312400485164, 0.9794009803470184, 0.9724784637644082, 0.9645707036842233, 0.9556857116660933, 0.9458324893290126, 0.9350210192315657, 0.9232622547583383, 0.9105681090227473, 0.8969514427975623, 0.8824260514853097, 0.8670066511417909, 0.8507088635668647, 0.8335492004775801, 0.8155450467797347, 0.7967146429547576, 0.7770770665798027, 0.756652212999761, 0.7354607751707486, 0.7135242226955459, 0.690864780072161, 0.6675054041776253, 0.6434697610097602, 0.6187822017105404, 0.5934677378953204, 0.5675520163128889, 0.5410612928620975, 0.5140224059912979, 0.4864627495076138, 0.4584102448235634, 0.429893312669121, 0.4009408442979534, 0.37158217221690915, 0.34184704046849373, 0.31176557449640874, 0.2813682506246493, 0.2506858651811659, 0.21974950329727133, 0.18859050741448694, 0.1572404455307017, 0.12573107921777932, 0.09409433144310266, 0.062362254227551796, 0.030566996172769433, 0.0012592301094218514, 0.03308418063248835, 0.06487561270238727, 0.09660131758351254, 0.12822915313020097, 0.1597270763507744, 0.19106317587116167, 0.2222057042651226, 0.25312311021842443, 0.2837840704942974, 0.314157521667819, 0.3442126915971106, 0.37391913059937587, 0.40324674230029867, 0.43216581412546096, 0.46064704740292345, 0.48866158704650275, 0.5161810507895926, 0.5431775579400093, 0.569623757626626, 0.5954928565092957, 0.62075864592385, 0.6453955284348086, 0.6693785437688045, 0.6926833941025126, 0.7152864686793825, 0.7371648677303332, 0.7582964256741244, 0.778659733573861, 0.7982341608269653, 0.8169998760665946, 0.8349378672532948, 0.8520299609366144, 0.8682588406670975, 0.8836080645400478, 0.8980620818532331, 0.9116062488617221, 0.924226843613856, 0.9359110798532992, 0.9466471199731434, 0.956424087008903, 0.9652320756582404, 0.9730621623162979, 0.9799064141164318, 0.9857578969672148, 0.9906106825775369, 0.9944598544627183, 0.9973015129255314, 0.9991327790070805, 0.9999517974035559, 0.9997577383458883, 0.9985507984404108, 0.9963322004696714, 0.9931041921536008, 0.9888700438722852, 0.9836340453526624, 0.9774015013224858, 0.9701787261359598, 0.96197303737651, 0.9527927484431417, 0.9426471601279205, 0.931546551193086, 0.9195021679573816, 0.9065262129021063, 0.892631832308452, 0.8778331029386764, 0.8621450177745585, 0.8455834708276022, 0.8281652410364171, 0.8099079752675205, 0.7908301704368408, 0.7709511547699887, 0.750291068220349, 0.7288708420647526, 0.7067121776974344, 0.683837524643809, 0.6602700578162526, 0.6360336540349735, 0.61115286783781, 0.5856529066033606, 0.5595596050127382, 0.5328993988757513, 0.5056992983481258, 0.4779868605667881, 0.4497901617309873, 0.4211377686575961, 0.39205870983929303, 0.3625824460350464, 0.3327388404226182, 0.30255812834343315, 0.2720708866703402, 0.24130800282935783, 0.21030064350685182, 0.1790802230737255, 0.1476783717586738, 0.11612690360280861, 0.08445778422798975, 0.05270309845162702, 0.020895017780679825, 0.010934232182099721, 0.04275240438679342, 0.0745272630066723, 0.10622661609711757, 0.13781834821014738, 0.1692704529314425, 0.20055106530684386, 0.23162849412560352, 0.2624712540275695, 0.29304809740185933, 0.32332804604459486, 0.3532804225437561, 0.38287488135929315, 0.412081439566951, 0.44087050723477833, 0.4692129174014848, 0.4970799556262172, 0.5244433890799408, 0.5512754951488477, 0.5775490895208899, 0.6032375537268845, 0.6283148621084028, 0.6527556081850658, 0.6765350303944838, 0.6996290371788629, 0.7220142313928114, 0.7436679340075709, 0.7645682070877586, 0.7846938760172554, 0.8040245509517849, 0.8225406474763733, 0.8402234064468496, 0.8570549129952384, 0.8730181146797603, 0.8880968387611247, 0.9022758085875503, 0.9155406590719546, 0.927877951245583, 0.9392751858733912, 0.9497208161173557, 0.9592042592348636, 0.9677159073003762, 0.9752471369394755, 0.9817903180654202, 0.9873388216093949, 0.991887026236591, 0.995430324041333, 0.9979651252154623, 0.9994888616852708, 0.9999999897132833, 0.9994979914622567, 0.9979833755198172, 0.9954576763831935, 0.9919234529045806, 0.9873842856986974, 0.981844773515174, 0.9753105285794315, 0.9677881709067975, 0.9592853215955908, 0.94981059510598, 0.9393735905324591, 0.9279848818787473, 0.9156560073449795, 0.9023994576380682, 0.8882286633170285, 0.8731579811861334, 0.8572026797496429, 0.8403789237429019, 0.8227037577554123, 0.8041950889624988, 0.7848716689831022, 0.7647530748820106, 0.7438596893358056, 0.7222126799826646, 0.6998339779768512, 0.6767462557696968, 0.6529729041395231, 0.6285380084938601, 0.6034663244678724, 0.5777832528437565, 0.5515148138165746, 0.5246876206324881, 0.49732885262619037, 0.4694662276847837, 0.4411279741660956, 0.41234280229976983, 0.38313987510015785, 0.35354877882054125, 0.3235994929785016, 0.2933223599828626, 0.2627480543930383, 0.23190755184180378, 0.20083209765308085, 0.1695531751864511, 0.13810247394057845, 0.10651185744772411, 0.07481333099194315, 0.04303900918373213, 0.011221083423851835, 0.020608210710648493, 0.05241662612509912, 0.08417193687761758, 0.11584197082804344, 0.14739464223240756, 0.17879798424980037, 0.21002018132884018, 0.24102960144086544, 0.27179482812713274, 0.3022846923276782, 0.3324683039595318, 0.3623150832122316, 0.391794791529061, 0.42087756224251205, 0.4495339308330176, 0.47773486478019167, 0.5054517929764588, 0.5326566346732128, 0.5593218279301236, 0.5854203575388836, 0.6109257823930412, 0.6358122622761472, 0.6600545840411797, 0.6836281871546358, 0.7065091885794746, 0.728674406971618, 0.7501013861655966, 0.7707684179254948, 0.7906545639381067, 0.8097396770261108, 0.8280044215596963, 0.8454302930460144, 0.8619996368765409, 0.8776956662134389, 0.892502478996754, 0.9064050740551886, 0.9193893663041945, 0.9314422010159501, 0.9425513671467434, 0.9527056097083131, 0.9618946411705688, 0.9701091518841654, 0.9773408195123416, 0.9835823174625057, 0.9888273223090014, 0.9930705201995288, 0.996307612238753, 0.9985353188436292, 0.99975138306603, 0.9999545728793218, 0.9991446824265635, 0.9973225322290639, 0.9944899683550916, 0.9906498605495748, 0.9858060993266838, 0.9799635920282544, 0.9731282578520285, 0.9653070218547543, 0.9565078079362379, 0.9467395308114269, 0.9360120869786818, 0.9243363446933632, 0.9117241329569319, 0.8981882295326693, 0.88374234800018, 0.8684011238618221, 0.8521800997150826, 0.8350957095059703, 0.8171652618793381, 0.798406922643062, 0.7788396963637731, 0.7584834071128184, 0.7373586783819979, 0.7154869121893515, 0.6928902673961963, 0.6695916372574305, 0.6456146262277563, 0.620983526047396, 0.5957232911314696, 0.569859513288054, 0.5434183957904328, 0.516426726829854, 0.48891185237574375, 0.46090164847076903, 0.4324244929888666, 0.40350923688491047, 0.37418517496502596, 0.34448201620726465, 0.31442985366263076, 0.28405913396706, 0.25340062649511264, 0.22248539218669044, 0.19134475207842297, 0.16001025557148152, 0.1285136484680303, 0.0968868408087616, 0.06516187454396676, 0.03337089107100799, 0.001546098670995367, 0.030280260122226944, 0.062075941187860625, 0.09380873148554097, 0.12544648169117237, 0.15695713876827508, 0.18830877844178015, 0.21946963754130963, 0.25040814618130763, 0.2810929597453074, 0.31149299064201375, 0.341577439800918, 0.3713158278756666, 0.4006780261235082, 0.4296342869294749, 0.4581552739444996, 0.48621209180682995, 0.5137763154167034, 0.5408200187345259, 0.567315803073496, 0.5932368248579519, 0.618556822819268, 0.6432501446018551, 0.6672917727522525, 0.6906573500649336, 0.7133232042592547, 0.7352663719624535, 0.7564646219744658, 0.776896477790909, 0.7965412393615083, 0.8153790040618755, 0.8333906868573533, 0.8505580396385805, 0.8668636697091416, 0.8822910574065421, 0.8968245728387279, 0.9104494917191335, 0.9231520102842555, 0.9349192592785901, 0.9457393169928241, 0.9556012213420386, 0.9644949809716707, 0.972411585380026, 0.9793430140470585, 0.9852822445601602, 0.9902232597287592, 0.9941610536804909, 0.9970916369327818, 0.9990120404346948, 0.9999203185749556, 0.9998155511531025, 0.9986978443117646, 0.9965683304291272, 0.9934291669716879, 0.9892835343084725, 0.9841356324889187, 0.9779906769876967, 0.9708548934207699, 0.9627355112380669, 0.9536407563991334, 0.9435798430391888, 0.9325629641340561, 0.9206012811733818, 0.9077069128526426, 0.8938929227953644, 0.8791733063180394, 0.863562976251095, 0.8470777478303023, 0.8297343226739657, 0.8115502718620644, 0.7925440181345167, 0.7727348172266422, 0.7521427383606536, 0.7307886439130099, 0.7086941682781739, 0.6858816959502677, 0.6623743388447398, 0.6381959128830591, 0.6133709138642102, 0.5879244926473406, 0.5618824296707465, 0.535271108833067, 0.50811749076304, 0.4804490855049923, 0.4522939246476661, 0.423680532924716, 0.39463789931553367, 0.3651954476757308, 0.3353830069270963, 0.30523078083711236, 0.27476931741870303, 0.24402947798127966, 0.21304240586431009, 0.1818394948851964, 0.15045235753334532, 0.1189127929427659, 0.08725275467551004, 0.05550431834865749, 0.023699649137707328, 0.008129030810828136, 0.03994947502454842, 0.07172944537487118, 0.10343674473835622, 0.13503924961644356, 0.1665049426806425, 0.19780194521008654, 0.22889854938872486, 0.25976325042936504, 0.29036477849195946, 0.3206721303639302, 0.3506546008703253, 0.38028181398206506, 0.40952375359065574, 0.43835079391831877, 0.4667337295326637, 0.494643804935441, 0.5220527436955134, 0.5489327770964688, 0.5752566722698012, 0.6009977597852694, 0.6261299606703866, 0.6506278128317353, 0.67446649685125, 0.6976218611314391, 0.7200704463640186, 0.7417895092971232, 0.7627570457771081, 0.7829518130415496, 0.8023533512408177, 0.820942004166507, 0.8386989391656487, 0.855606166220583, 0.8716465561750947, 0.8868038580884241, 0.9010627156995326, 0.9144086829849128, 0.9268282387942475, 0.938308800549048, 0.9488387369903798, 0.9584073799628081, 0.9670050352225812, 0.9746229922591314, 0.981253533119911, 0.9868899402296596, 0.9915265031961586, 0.9951585245955725, 0.9977823247315378, 0.9993952453631582, 0.9999956523981395, 0.9995829375483299, 0.9981575189459942, 0.9957208407201914, 0.9922753715336909, 0.9878246020819066, 0.982373041556376, 0.9759262130763873, 0.968490648093357, 0.9600738797736486, 0.9506844353665151, 0.9403318275649308, 0.9290265448680266, 0.9167800409549073, 0.9036047230806437, 0.8895139395061494, 0.8745219659746969, 0.8586439912488044, 0.8418961017220904, 0.8242952651217312, 0.8058593133179955, 0.7866069242583307, 0.7665576030442324, 0.7457316621700996, 0.7241502009441392, 0.701835084112089, 0.6788089197054541, 0.6550950361367452, 0.6307174585648311, 0.6057008845544287, 0.5800706590543292, 0.5538527487197968, 0.5270737156050526, 0.4997606902525391, 0.4719413442062918, 0.44364386197715205, 0.41489691248827915, 0.38572962002994826, 0.3561715347529414, 0.3262526027305255, 0.2960031356192716, 0.2654537799495591, 0.23463548607674992, 0.2035794768245491, 0.17231721585238272, 0.1408803757787096, 0.10930080609267302, 0.07761050088651769, 0.045841566441576026, 0.014026188700529435, 0.017803399341037692, 0.04961495029069065, 0.08137623502984674, 0.11305507536596386, 0.14461937663307076, 0.1760371602077453, 0.2072765959074844, 0.2383060342387267, 0.2690940384617446, 0.29960941644005307, 0.3298212522420029, 0.3596989374624836, 0.3892122022331234, 0.4183311458895086, 0.4470262672642929, 0.4752684945756323, 0.5030292148805585, 0.5302803030635288, 0.5569941503306822, 0.583143692181054, 0.60870243582635, 0.6336444870314515, 0.6579445763485643, 0.6815780847183758, 0.7045210684122388, 0.7267502832902144, 0.7482432083503111, 0.7689780685451225, 0.7889338568426721, 0.8080903555092047, 0.8264281565923157, 0.8439286815836324, 0.8605742002412032, 0.8763478485524819, 0.8912336458196799, 0.9052165108502459, 0.918282277236013, 0.9304177077055694, 0.9416105075352695, 0.9518493370053511, 0.9611238228885093, 0.9694245689592722, 0.9767431655135733, 0.9830721978888403, 0.9884052539759898, 0.9927369307156951, 0.9960628395723732, 0.9983796109803266, 0.9996848977575342, 0.9999773774836469, 0.9992567538397652, 0.9975237569086474, 0.9947801424350436, 0.9910286900469066, 0.9862732004392726, 0.9805184915236829, 0.97377039354703, 0.9660357431847746, 0.9573223766145401, 0.9476391215770706, 0.936995788432608, 0.9254031602217704, 0.9128729817409621, 0.8994179476434169, 0.8850516895778983, 0.8697887623781368, 0.8536446293169363, 0.8366356464399144, 0.8187790459947842, 0.8000929189729015, 0.780596196780792, 0.7603086320602707, 0.7392507786765053, 0.7174439708943635, 0.6949103017640886, 0.6716726007382796, 0.6477544105427576, 0.6231799633247954, 0.5979741561029237, 0.5721625255430919, 0.5457712220867824, 0.5188269834573458, 0.4913571075712869, 0.46338942488204055, 0.4349522701841819, 0.4060744539067391, 0.37678523292457156, 0.34711428091744106, 0.31709165830686586, 0.2867477818010913, 0.2561133935791364, 0.22521953014505666, 0.19409749088408698, 0.1627788063523913, 0.13129520633260602, 0.099678587687605, 0.06796098204492845, 0.03617452334467756, 0.004351415283817458, 0.02747610131026304, 0.05927578114376713, 0.09101540712507326, 0.122662823004675, 0.15418596595354422, 0.1855528990468471, 0.21673184362003994, 0.24769121146469117, 0.2783996368313461, 0.3088260082069509, 0.338939499834772, 0.36870960294476723, 0.39810615666284926, 0.42709937856762, 0.45565989486374514, 0.48375877014133745, 0.5113675366911424, 0.5384582233459428, 0.5650033838189013, 0.5909761245100785, 0.6163501317530711, 0.6410996984740666, 0.6651997502363782, 0.6886258706439828, 0.7113543260784302, 0.7333620897440111, 0.7546268649967786, 0.7751271079338844, 0.7948420492202646, 0.8137517151306154, 0.8318369477852736, 0.8490794245595821, 0.8654616766470331, 0.8809671067573522, 0.8955800059316623, 0.9092855694576509, 0.9220699118685949, 0.933920081011104, 0.9448240711672833, 0.9547708352180502, 0.9637502958352474, 0.9717533556912568, 0.9787719066757455, 0.9847988381101955, 0.9898280439519248, 0.9938544289802821, 0.9968739139587426, 0.998883439767694, 0.9998809705037096, 0.9998654955421769, 0.9988370305611858, 0.9967966175256467, 0.9937463236316467, 0.9896892392121206, 0.9846294746059536, 0.9785721559936844, 0.9715234202040438, 0.9634904084965678, 0.9544812593266027, 0.9445051001000144, 0.9335720379259852, 0.9216931493772306, 0.9088804692680233, 0.8951469784614235, 0.8805065907180164, 0.8649741385995063, 0.8485653584414775, 0.8312968744104859, 0.8131861816616869, 0.7942516286140161, 0.7745123983609496, 0.7539884892355951, 0.7327006945498408, 0.7106705815281318, 0.6879204694571305, 0.6644734070734692, 0.6403531492124492, 0.6155841327414232, 0.5901914518021477, 0.5642008323872308, 0.537638606276487, 0.5105316843595014, 0.4829075293714826, 0.45479412807007913, 0.4262199628812354, 0.3972139830429104, 0.3678055752758167, 0.3380245340110009, 0.3079010312043026, 0.2774655857683325, 0.24674903265299994, 0.21578249160579455, 0.1845973356435311, 0.1532251592675628, 0.12169774645453373, 0.09004703845521052, 0.05830510143393321, 0.026504093981583512, 0.005323765465150157, 0.037146231265139266, 0.06893106324167927, 0.10064605934588158, 0.13225908828143706, 0.1637381220576345, 0.19505126843778844, 0.22616680325008848, 0.25705320252821956, 0.2876791744490792, 0.3180136910353675, 0.3480260195908661, 0.37768575383650094, 0.4069628447157627, 0.43582763083821585, 0.46425086853019343, 0.4922037614623581, 0.5196579898240078, 0.5465857390146446, 0.5729597278236415, 0.5987532360695722, 0.623940131671147, 0.6484948971222764, 0.67239265534455, 0.6956091948908486, 0.718120994474618, 0.7399052467998716, 0.7609398816678776, 0.7812035883370717, 0.8006758371135004, 0.8193369001500095, 0.8371678714330556, 0.854150685936861, 0.870268137925583, 0.8855038983848892, 0.8998425315653253, 0.9132695106206578, 0.9257712323254148, 0.9373350308566795, 0.9479491906261509, 0.9576029581495238, 0.9662865529411325, 0.9739911774228042, 0.980709025836926, 0.9864332921546599, 0.991158176971314, 0.9948788933818646, 0.9975916718307001, 0.9992937639306575, 0.9999834452474827, 0.9996600170469024, 0.9983238070025292, 0.9959761688638873, 0.992619481084893, 0.9882571444141819, 0.9828935784497166, 0.9765342171611818, 0.9691855033846849, 0.9608548822953432, 0.9515507938643915, 0.941282664308421, 0.9300608965394238, 0.9178968596253432, 0.9048028772717628, 0.8907922153364419, 0.8758790683893137, 0.860078545331612, 0.8434066540886376, 0.8258802853916962, 0.8075171956656773, 0.7883359890395378, 0.768356098497979, 0.7475977661933602, 0.7260820229378687, 0.7038306668966391, 0.680866241503443, 0.6572120126213735, 0.6328919449715734, 0.6079306778539294, 0.5823535001843819, 0.5561863248740382, 0.5294556625761321, 0.5021885948273603, 0.4744127466109017, 0.4461562583688027, 0.4174477574921366, 0.38831632931787874, 0.35879148766176877, 0.3289031449170674, 0.2986815817495642, 0.2681574164194129, 0.2373615737609801, 0.20632525385205544, 0.17507990040427363, 0.14365716890664312, 0.11208889455451682, 0.08040705999656073, 0.04864376293227098, 0.01683118359292918, 0.014998447860993358, 0.04681288399308682, 0.07857989276171433, 0.11026729017516904, 0.1418429728980797, 0.17327495077616506, 0.20453137924632112, 0.23558059159914307, 0.2663911310613292, 0.2969317826653512, 0.32717160487418534, 0.35707996092895644, 0.3866265498878642, 0.41578143732488376, 0.44451508565707976, 0.4727983840699284, 0.5006026780102677, 0.5278997982169403, 0.5546620892598384, 0.5808624375583343, 0.6064742988507856, 0.631471725087189, 0.6558293907178501, 0.6795226183513794, 0.7025274037559738, 0.7248204401787516, 0.7463791419584509, 0.767181667407527, 0.7872069409405602, 0.8064346744264764, 0.824845387743003, 0.8424204285124667, 0.859141990999022, 0.8749931341481204, 0.8899577987499159, 0.9040208237092842, 0.9171679614059359, 0.9293858921290362, 0.9406622375717668, 0.9509855733721083, 0.9603454406871711, 0.96873235678931, 0.976137824673333, 0.9825543416650442, 0.9879754070223907, 0.9923955285215414, 0.9958102280212049, 0.9982160459995463, 0.999610545059124, 0.9999923123962802, 0.9993609612324893, 0.9977171312062132, 0.9950624877248674, 0.991399720277551, 0.9867325397102574, 0.9810656744663171, 0.974404865795889, 0.9667568619393434, 0.9581294112904505, 0.948531254546276, 0.9379721158517436, 0.9264626929478607, 0.9140146463335506, 0.9006405874520853, 0.8863540659141169, 0.8711695557702006, 0.8551024408467595, 0.838168999160311, 0.8203863864258022, 0.801772618675694, 0.7823465540074317, 0.7621278734778355, 0.7411370611636929, 0.7193953834087873, 0.6969248672784334, 0.6737482782432609, 0.6498890971149267, 0.6253714962570663, 0.6002203150956684, 0.5744610349535847, 0.5481197532347128, 0.5212231569840663, 0.49379849585040975, 0.4658735544789049, 0.43747662436179297, 0.40863647517551555, 0.3793823256334119, 0.34974381388344583, 0.31975096748105813, 0.2894341729674418, 0.258824145084117, 0.22795189565506035, 0.19684870216779013, 0.1655460760853006, 0.13407573092101197, 0.10246955010894862, 0.07075955470180778, 0.038977870929560926, 0.007156697651568132, 0.02467172626495408, 0.056475154606998, 0.08822136648520822, 0.11987819897774012, 0.15141357971534744, 0.18279555937477088, 0.21399234404739503, 0.2449723274505146, 0.2757041229485106, 0.30615659535143475, 0.3362988924589099, 0.36610047631732523, 0.39553115415859935, 0.4245611089892947, 0.4531609297989841, 0.4813016413573441, 0.5089547335696845, 0.5360921903612939, 0.5626865180612798, 0.5887107732570928, 0.6141385900916246, 0.6389442069751682, 0.6631024926851264, 0.6865889718271386, 0.709379849631734, 0.7314520360614588, 0.7527831692039703, 0.7733516379274932, 0.793136603775639, 0.812118022079366, 0.8302766622647758, 0.8475941273361236, 0.8640528725142747, 0.8796362230117974, 0.894328390926622, 0.9081144912371911, 0.9209805568828475, 0.9329135529142414, 0.9439013896993863, 0.9539329351719683, 0.9629980261095433, 0.9710874784301702, 0.9781930964970333, 0.9843076814216665, 0.9894250383573321, 0.9935399827751886, 0.9966483457168652, 0.9987469780181482, 0.9998337534994822, 0.9999075711200558, 0.9989683560932968, 0.9970170599626409, 0.9940556596374959, 0.9900871553903848, 0.9851155678172917, 0.9791459337642848, 0.9721843012245629, 0.9642377232110713, 0.9553142506109031, 0.9454229240287447, 0.9345737646275939, 0.9227777639760615, 0.9100468729125126, 0.8963939894373746, 0.8818329456458264, 0.8663784937141287, 0.8500462909538259, 0.8328528839489031, 0.8148156917919949, 0.795952988436667, 0.7762838841835792, 0.7558283063193451, 0.7346069789276557, 0.7126414018931935, 0.6899538291195203, 0.6665672459830472, 0.6425053460459751, 0.6177925070517104, 0.5924537662271144, 0.5665147949166638, 0.5400018725741146, 0.5129418601381087, 0.48536217281862604, 0.4572907523219511, 0.4287560385421779, 0.3997869407479863, 0.3704128082939399, 0.3406634008858646, 0.31056885843048815, 0.280159670499948, 0.24946664544197772, 0.21852087916717353, 0.18735372364488212, 0.15599675513973774, 0.12448174222089778, 0.09284061357644895, 0.06110542566565615, 0.029308330241692798, 0.002518458222131506, 0.03434269516972916, 0.06613213862999943, 0.09785458188203833, 0.12947788608463542, 0.1609700128369364, 0.19229905663741045, 0.2234332772081737, 0.25434113165185696, 0.28499130640857256, 0.31535274898049115, 0.34539469939197076, 0.37508672135325594, 0.4043987330963041, 0.4333010378514343, 0.46176435393386667, 0.4897598444097866, 0.5172591463118191, 0.5442343993742591, 0.5706582742590642, 0.5965040002439136, 0.6217453923443531, 0.6463568778424575, 0.6703135221952409, 0.6935910542965126, 0.7161658910665417, 0.7380151613447116, 0.7591167290609075, 0.7794492156621224, 0.7989920217716505, 0.8177253480588483, 0.8356302152983737, 0.8526884835985125, 0.8688828707791938, 0.8841969698810302, 0.8986152657876189, 0.9121231509443274, 0.9247069401575885, 0.9363538844597241, 0.9470521840252805, 0.9567910001257096, 0.965560466110392, 0.9733516974027941, 0.9801568005016663, 0.9859688809781566, 0.9907820504607358, 0.9945914326008699, 0.9973931680133636, 0.9991844181864147, 0.9999633683573815, 0.9997292293513679, 0.998482238380756, 0.9962236588048824, 0.99295577885009, 0.9886819092914728, 0.9834063800986381, 0.9771345360489022, 0.9698727313123552, 0.9616283230142885, 0.9524096637814825, 0.9422260932799632, 0.9310879287527261, 0.9190064545670701, 0.9059939107821081, 0.8920634807480415, 0.8772292777497681, 0.8615063307083155, 0.8449105689546853, 0.8274588060914041, 0.8091687229582258, 0.7900588497192094, 0.7701485470893212, 0.7494579867195964, 0.7280081307606706],
[0.0, 0.0078125, 0.015625, 0.0234375, 0.03125, 0.0390625, 0.046875, 0.0546875, 0.0625, 0.0703125, 0.078125, 0.0859375, 0.09375, 0.1015625, 0.109375, 0.1171875, 0.125, 0.1328125, 0.140625, 0.1484375, 0.15625, 0.1640625, 0.171875, 0.1796875, 0.1875, 0.1953125, 0.203125, 0.2109375, 0.21875, 0.2265625, 0.234375, 0.2421875, 0.25, 0.2578125, 0.265625, 0.2734375, 0.28125, 0.2890625, 0.296875, 0.3046875, 0.3125, 0.3203125, 0.328125, 0.3359375, 0.34375, 0.3515625, 0.359375, 0.3671875, 0.375, 0.3828125, 0.390625, 0.3984375, 0.40625, 0.4140625, 0.421875, 0.4296875, 0.4375, 0.4453125, 0.453125, 0.4609375, 0.46875, 0.4765625, 0.484375, 0.4921875, 0.5, 0.5078125, 0.515625, 0.5234375, 0.53125, 0.5390625, 0.546875, 0.5546875, 0.5625, 0.5703125, 0.578125, 0.5859375, 0.59375, 0.6015625, 0.609375, 0.6171875, 0.625, 0.6328125, 0.640625, 0.6484375, 0.65625, 0.6640625, 0.671875, 0.6796875, 0.6875, 0.6953125, 0.703125, 0.7109375, 0.71875, 0.7265625, 0.734375, 0.7421875, 0.75, 0.7578125, 0.765625, 0.7734375, 0.78125, 0.7890625, 0.796875, 0.8046875, 0.8125, 0.8203125, 0.828125, 0.8359375, 0.84375, 0.8515625, 0.859375, 0.8671875, 0.875, 0.8828125, 0.890625, 0.8984375, 0.90625, 0.9140625, 0.921875, 0.9296875, 0.9375, 0.9453125, 0.953125, 0.9609375, 0.96875, 0.9765625, 0.984375, 0.9921875],
[0.0, 0.00390625, 0.0078125, 0.01171875, 0.015625, 0.01953125, 0.0234375, 0.02734375, 0.03125, 0.03515625, 0.0390625, 0.04296875, 0.046875, 0.05078125, 0.0546875, 0.05859375, 0.0625, 0.06640625, 0.0703125, 0.07421875, 0.078125, 0.08203125, 0.0859375, 0.08984375, 0.09375, 0.09765625, 0.1015625, 0.10546875, 0.109375, 0.11328125, 0.1171875, 0.12109375, 0.125, 0.12890625, 0.1328125, 0.13671875, 0.140625, 0.14453125, 0.1484375, 0.15234375, 0.15625, 0.16015625, 0.1640625, 0.16796875, 0.171875, 0.17578125, 0.1796875, 0.18359375, 0.1875, 0.19140625, 0.1953125, 0.19921875, 0.203125, 0.20703125, 0.2109375, 0.21484375, 0.21875, 0.22265625, 0.2265625, 0.23046875, 0.234375, 0.23828125, 0.2421875, 0.24609375, 0.25, 0.25390625, 0.2578125, 0.26171875, 0.265625, 0.26953125, 0.2734375, 0.27734375, 0.28125, 0.28515625, 0.2890625, 0.29296875, 0.296875, 0.30078125, 0.3046875, 0.30859375, 0.3125, 0.31640625, 0.3203125, 0.32421875, 0.328125, 0.33203125, 0.3359375, 0.33984375, 0.34375, 0.34765625, 0.3515625, 0.35546875, 0.359375, 0.36328125, 0.3671875, 0.37109375, 0.375, 0.37890625, 0.3828125, 0.38671875, 0.390625, 0.39453125, 0.3984375, 0.40234375, 0.40625, 0.41015625, 0.4140625, 0.41796875, 0.421875, 0.42578125, 0.4296875, 0.43359375, 0.4375, 0.44140625, 0.4453125, 0.44921875, 0.453125, 0.45703125, 0.4609375, 0.46484375, 0.46875, 0.47265625, 0.4765625, 0.48046875, 0.484375, 0.48828125, 0.4921875, 0.49609375, 0.5, 0.50390625, 0.5078125, 0.51171875, 0.515625, 0.51953125, 0.5234375, 0.52734375, 0.53125, 0.53515625, 0.5390625, 0.54296875, 0.546875, 0.55078125, 0.5546875, 0.55859375, 0.5625, 0.56640625, 0.5703125, 0.57421875, 0.578125, 0.58203125, 0.5859375, 0.58984375, 0.59375, 0.59765625, 0.6015625, 0.60546875, 0.609375, 0.61328125, 0.6171875, 0.62109375, 0.625, 0.62890625, 0.6328125, 0.63671875, 0.640625, 0.64453125, 0.6484375, 0.65234375, 0.65625, 0.66015625, 0.6640625, 0.66796875, 0.671875, 0.67578125, 0.6796875, 0.68359375, 0.6875, 0.69140625, 0.6953125, 0.69921875, 0.703125, 0.70703125, 0.7109375, 0.71484375, 0.71875, 0.72265625, 0.7265625, 0.73046875, 0.734375, 0.73828125, 0.7421875, 0.74609375, 0.75, 0.75390625, 0.7578125, 0.76171875, 0.765625, 0.76953125, 0.7734375, 0.77734375, 0.78125, 0.78515625, 0.7890625, 0.79296875, 0.796875, 0.80078125, 0.8046875, 0.80859375, 0.8125, 0.81640625, 0.8203125, 0.82421875, 0.828125, 0.83203125, 0.8359375, 0.83984375, 0.84375, 0.84765625, 0.8515625, 0.85546875, 0.859375, 0.86328125, 0.8671875, 0.87109375, 0.875, 0.87890625, 0.8828125, 0.88671875, 0.890625, 0.89453125, 0.8984375, 0.90234375, 0.90625, 0.91015625, 0.9140625, 0.91796875, 0.921875, 0.92578125, 0.9296875, 0.93359375, 0.9375, 0.94140625, 0.9453125, 0.94921875, 0.953125, 0.95703125, 0.9609375, 0.96484375, 0.96875, 0.97265625, 0.9765625, 0.98046875, 0.984375, 0.98828125, 0.9921875, 0.99609375],
[0.0, 0.001953125, 0.00390625, 0.005859375, 0.0078125, 0.009765625, 0.01171875, 0.013671875, 0.015625, 0.017578125, 0.01953125, 0.021484375, 0.0234375, 0.025390625, 0.02734375, 0.029296875, 0.03125, 0.033203125, 0.03515625, 0.037109375, 0.0390625, 0.041015625, 0.04296875, 0.044921875, 0.046875, 0.048828125, 0.05078125, 0.052734375, 0.0546875, 0.056640625, 0.05859375, 0.060546875, 0.0625, 0.064453125, 0.06640625, 0.068359375, 0.0703125, 0.072265625, 0.07421875, 0.076171875, 0.078125, 0.080078125, 0.08203125, 0.083984375, 0.0859375, 0.087890625, 0.08984375, 0.091796875, 0.09375, 0.095703125, 0.09765625, 0.099609375, 0.1015625, 0.103515625, 0.10546875, 0.107421875, 0.109375, 0.111328125, 0.11328125, 0.115234375, 0.1171875, 0.119140625, 0.12109375, 0.123046875, 0.125, 0.126953125, 0.12890625, 0.130859375, 0.1328125, 0.134765625, 0.13671875, 0.138671875, 0.140625, 0.142578125, 0.14453125, 0.146484375, 0.1484375, 0.150390625, 0.15234375, 0.154296875, 0.15625, 0.158203125, 0.16015625, 0.162109375, 0.1640625, 0.166015625, 0.16796875, 0.169921875, 0.171875, 0.173828125, 0.17578125, 0.177734375, 0.1796875, 0.181640625, 0.18359375, 0.185546875, 0.1875, 0.189453125, 0.19140625, 0.193359375, 0.1953125, 0.197265625, 0.19921875, 0.201171875, 0.203125, 0.205078125, 0.20703125, 0.208984375, 0.2109375, 0.212890625, 0.21484375, 0.216796875, 0.21875, 0.220703125, 0.22265625, 0.224609375, 0.2265625, 0.228515625, 0.23046875, 0.232421875, 0.234375, 0.236328125, 0.23828125, 0.240234375, 0.2421875, 0.244140625, 0.24609375, 0.248046875, 0.25, 0.251953125, 0.25390625, 0.255859375, 0.2578125, 0.259765625, 0.26171875, 0.263671875, 0.265625, 0.267578125, 0.26953125, 0.271484375, 0.2734375, 0.275390625, 0.27734375, 0.279296875, 0.28125, 0.283203125, 0.28515625, 0.287109375, 0.2890625, 0.291015625, 0.29296875, 0.294921875, 0.296875, 0.298828125, 0.30078125, 0.302734375, 0.3046875, 0.306640625, 0.30859375, 0.310546875, 0.3125, 0.314453125, 0.31640625, 0.318359375, 0.3203125, 0.322265625, 0.32421875, 0.326171875, 0.328125, 0.330078125, 0.33203125, 0.333984375, 0.3359375, 0.337890625, 0.33984375, 0.341796875, 0.34375, 0.345703125, 0.34765625, 0.349609375, 0.3515625, 0.353515625, 0.35546875, 0.357421875, 0.359375, 0.361328125, 0.36328125, 0.365234375, 0.3671875, 0.369140625, 0.37109375, 0.373046875, 0.375, 0.376953125, 0.37890625, 0.380859375, 0.3828125, 0.384765625, 0.38671875, 0.388671875, 0.390625, 0.392578125, 0.39453125, 0.396484375, 0.3984375, 0.400390625, 0.40234375, 0.404296875, 0.40625, 0.408203125, 0.41015625, 0.412109375, 0.4140625, 0.416015625, 0.41796875, 0.419921875, 0.421875, 0.423828125, 0.42578125, 0.427734375, 0.4296875, 0.431640625, 0.43359375, 0.435546875, 0.4375, 0.439453125, 0.44140625, 0.443359375, 0.4453125, 0.447265625, 0.44921875, 0.451171875, 0.453125, 0.455078125, 0.45703125, 0.458984375, 0.4609375, 0.462890625, 0.46484375, 0.466796875, 0.46875, 0.470703125, 0.47265625, 0.474609375, 0.4765625, 0.478515625, 0.48046875, 0.482421875, 0.484375, 0.486328125, 0.48828125, 0.490234375, 0.4921875, 0.494140625, 0.49609375, 0.498046875, 0.5, 0.501953125, 0.50390625, 0.505859375, 0.5078125, 0.509765625, 0.51171875, 0.513671875, 0.515625, 0.517578125, 0.51953125, 0.521484375, 0.5234375, 0.525390625, 0.52734375, 0.529296875, 0.53125, 0.533203125, 0.53515625, 0.537109375, 0.5390625, 0.541015625, 0.54296875, 0.544921875, 0.546875, 0.548828125, 0.55078125, 0.552734375, 0.5546875, 0.556640625, 0.55859375, 0.560546875, 0.5625, 0.564453125, 0.56640625, 0.568359375, 0.5703125, 0.572265625, 0.57421875, 0.576171875, 0.578125, 0.580078125, 0.58203125, 0.583984375, 0.5859375, 0.587890625, 0.58984375, 0.591796875, 0.59375, 0.595703125, 0.59765625, 0.599609375, 0.6015625, 0.603515625, 0.60546875, 0.607421875, 0.609375, 0.611328125, 0.61328125, 0.615234375, 0.6171875, 0.619140625, 0.62109375, 0.623046875, 0.625, 0.626953125, 0.62890625, 0.630859375, 0.6328125, 0.634765625, 0.63671875, 0.638671875, 0.640625, 0.642578125, 0.64453125, 0.646484375, 0.6484375, 0.650390625, 0.65234375, 0.654296875, 0.65625, 0.658203125, 0.66015625, 0.662109375, 0.6640625, 0.666015625, 0.66796875, 0.669921875, 0.671875, 0.673828125, 0.67578125, 0.677734375, 0.6796875, 0.681640625, 0.68359375, 0.685546875, 0.6875, 0.689453125, 0.69140625, 0.693359375, 0.6953125, 0.697265625, 0.69921875, 0.701171875, 0.703125, 0.705078125, 0.70703125, 0.708984375, 0.7109375, 0.712890625, 0.71484375, 0.716796875, 0.71875, 0.720703125, 0.72265625, 0.724609375, 0.7265625, 0.728515625, 0.73046875, 0.732421875, 0.734375, 0.736328125, 0.73828125, 0.740234375, 0.7421875, 0.744140625, 0.74609375, 0.748046875, 0.75, 0.751953125, 0.75390625, 0.755859375, 0.7578125, 0.759765625, 0.76171875, 0.763671875, 0.765625, 0.767578125, 0.76953125, 0.771484375, 0.7734375, 0.775390625, 0.77734375, 0.779296875, 0.78125, 0.783203125, 0.78515625, 0.787109375, 0.7890625, 0.791015625, 0.79296875, 0.794921875, 0.796875, 0.798828125, 0.80078125, 0.802734375, 0.8046875, 0.806640625, 0.80859375, 0.810546875, 0.8125, 0.814453125, 0.81640625, 0.818359375, 0.8203125, 0.822265625, 0.82421875, 0.826171875, 0.828125, 0.830078125, 0.83203125, 0.833984375, 0.8359375, 0.837890625, 0.83984375, 0.841796875, 0.84375, 0.845703125, 0.84765625, 0.849609375, 0.8515625, 0.853515625, 0.85546875, 0.857421875, 0.859375, 0.861328125, 0.86328125, 0.865234375, 0.8671875, 0.869140625, 0.87109375, 0.873046875, 0.875, 0.876953125, 0.87890625, 0.880859375, 0.8828125, 0.884765625, 0.88671875, 0.888671875, 0.890625, 0.892578125, 0.89453125, 0.896484375, 0.8984375, 0.900390625, 0.90234375, 0.904296875, 0.90625, 0.908203125, 0.91015625, 0.912109375, 0.9140625, 0.916015625, 0.91796875, 0.919921875, 0.921875, 0.923828125, 0.92578125, 0.927734375, 0.9296875, 0.931640625, 0.93359375, 0.935546875, 0.9375, 0.939453125, 0.94140625, 0.943359375, 0.9453125, 0.947265625, 0.94921875, 0.951171875, 0.953125, 0.955078125, 0.95703125, 0.958984375, 0.9609375, 0.962890625, 0.96484375, 0.966796875, 0.96875, 0.970703125, 0.97265625, 0.974609375, 0.9765625, 0.978515625, 0.98046875, 0.982421875, 0.984375, 0.986328125, 0.98828125, 0.990234375, 0.9921875, 0.994140625, 0.99609375, 0.998046875],
[0.0, 0.0009765625, 0.001953125, 0.0029296875, 0.00390625, 0.0048828125, 0.005859375, 0.0068359375, 0.0078125, 0.0087890625, 0.009765625, 0.0107421875, 0.01171875, 0.0126953125, 0.013671875, 0.0146484375, 0.015625, 0.0166015625, 0.017578125, 0.0185546875, 0.01953125, 0.0205078125, 0.021484375, 0.0224609375, 0.0234375, 0.0244140625, 0.025390625, 0.0263671875, 0.02734375, 0.0283203125, 0.029296875, 0.0302734375, 0.03125, 0.0322265625, 0.033203125, 0.0341796875, 0.03515625, 0.0361328125, 0.037109375, 0.0380859375, 0.0390625, 0.0400390625, 0.041015625, 0.0419921875, 0.04296875, 0.0439453125, 0.044921875, 0.0458984375, 0.046875, 0.0478515625, 0.048828125, 0.0498046875, 0.05078125, 0.0517578125, 0.052734375, 0.0537109375, 0.0546875, 0.0556640625, 0.056640625, 0.0576171875, 0.05859375, 0.0595703125, 0.060546875, 0.0615234375, 0.0625, 0.0634765625, 0.064453125, 0.0654296875, 0.06640625, 0.0673828125, 0.068359375, 0.0693359375, 0.0703125, 0.0712890625, 0.072265625, 0.0732421875, 0.07421875, 0.0751953125, 0.076171875, 0.0771484375, 0.078125, 0.0791015625, 0.080078125, 0.0810546875, 0.08203125, 0.0830078125, 0.083984375, 0.0849609375, 0.0859375, 0.0869140625, 0.087890625, 0.0888671875, 0.08984375, 0.0908203125, 0.091796875, 0.0927734375, 0.09375, 0.0947265625, 0.095703125, 0.0966796875, 0.09765625, 0.0986328125, 0.099609375, 0.1005859375, 0.1015625, 0.1025390625, 0.103515625, 0.1044921875, 0.10546875, 0.1064453125, 0.107421875, 0.1083984375, 0.109375, 0.1103515625, 0.111328125, 0.1123046875, 0.11328125, 0.1142578125, 0.115234375, 0.1162109375, 0.1171875, 0.1181640625, 0.119140625, 0.1201171875, 0.12109375, 0.1220703125, 0.123046875, 0.1240234375, 0.125, 0.1259765625, 0.126953125, 0.1279296875, 0.12890625, 0.1298828125, 0.130859375, 0.1318359375, 0.1328125, 0.1337890625, 0.134765625, 0.1357421875, 0.13671875, 0.1376953125, 0.138671875, 0.1396484375, 0.140625, 0.1416015625, 0.142578125, 0.1435546875, 0.14453125, 0.1455078125, 0.146484375, 0.1474609375, 0.1484375, 0.1494140625, 0.150390625, 0.1513671875, 0.15234375, 0.1533203125, 0.154296875, 0.1552734375, 0.15625, 0.1572265625, 0.158203125, 0.1591796875, 0.16015625, 0.1611328125, 0.162109375, 0.1630859375, 0.1640625, 0.1650390625, 0.166015625, 0.1669921875, 0.16796875, 0.1689453125, 0.169921875, 0.1708984375, 0.171875, 0.1728515625, 0.173828125, 0.1748046875, 0.17578125, 0.1767578125, 0.177734375, 0.1787109375, 0.1796875, 0.1806640625, 0.181640625, 0.1826171875, 0.18359375, 0.1845703125, 0.185546875, 0.1865234375, 0.1875, 0.1884765625, 0.189453125, 0.1904296875, 0.19140625, 0.1923828125, 0.193359375, 0.1943359375, 0.1953125, 0.1962890625, 0.197265625, 0.1982421875, 0.19921875, 0.2001953125, 0.201171875, 0.2021484375, 0.203125, 0.2041015625, 0.205078125, 0.2060546875, 0.20703125, 0.2080078125, 0.208984375, 0.2099609375, 0.2109375, 0.2119140625, 0.212890625, 0.2138671875, 0.21484375, 0.2158203125, 0.216796875, 0.2177734375, 0.21875, 0.2197265625, 0.220703125, 0.2216796875, 0.22265625, 0.2236328125, 0.224609375, 0.2255859375, 0.2265625, 0.2275390625, 0.228515625, 0.2294921875, 0.23046875, 0.2314453125, 0.232421875, 0.2333984375, 0.234375, 0.2353515625, 0.236328125, 0.2373046875, 0.23828125, 0.2392578125, 0.240234375, 0.2412109375, 0.2421875, 0.2431640625, 0.244140625, 0.2451171875, 0.24609375, 0.2470703125, 0.248046875, 0.2490234375, 0.25, 0.2509765625, 0.251953125, 0.2529296875, 0.25390625, 0.2548828125, 0.255859375, 0.2568359375, 0.2578125, 0.2587890625, 0.259765625, 0.2607421875, 0.26171875, 0.2626953125, 0.263671875, 0.2646484375, 0.265625, 0.2666015625, 0.267578125, 0.2685546875, 0.26953125, 0.2705078125, 0.271484375, 0.2724609375, 0.2734375, 0.2744140625, 0.275390625, 0.2763671875, 0.27734375, 0.2783203125, 0.279296875, 0.2802734375, 0.28125, 0.2822265625, 0.283203125, 0.2841796875, 0.28515625, 0.2861328125, 0.287109375, 0.2880859375, 0.2890625, 0.2900390625, 0.291015625, 0.2919921875, 0.29296875, 0.2939453125, 0.294921875, 0.2958984375, 0.296875, 0.2978515625, 0.298828125, 0.2998046875, 0.30078125, 0.3017578125, 0.302734375, 0.3037109375, 0.3046875, 0.3056640625, 0.306640625, 0.3076171875, 0.30859375, 0.3095703125, 0.310546875, 0.3115234375, 0.3125, 0.3134765625, 0.314453125, 0.3154296875, 0.31640625, 0.3173828125, 0.318359375, 0.3193359375, 0.3203125, 0.3212890625, 0.322265625, 0.3232421875, 0.32421875, 0.3251953125, 0.326171875, 0.3271484375, 0.328125, 0.3291015625, 0.330078125, 0.3310546875, 0.33203125, 0.3330078125, 0.333984375, 0.3349609375, 0.3359375, 0.3369140625, 0.337890625, 0.3388671875, 0.33984375, 0.3408203125, 0.341796875, 0.3427734375, 0.34375, 0.3447265625, 0.345703125, 0.3466796875, 0.34765625, 0.3486328125, 0.349609375, 0.3505859375, 0.3515625, 0.3525390625, 0.353515625, 0.3544921875, 0.35546875, 0.3564453125, 0.357421875, 0.3583984375, 0.359375, 0.3603515625, 0.361328125, 0.3623046875, 0.36328125, 0.3642578125, 0.365234375, 0.3662109375, 0.3671875, 0.3681640625, 0.369140625, 0.3701171875, 0.37109375, 0.3720703125, 0.373046875, 0.3740234375, 0.375, 0.3759765625, 0.376953125, 0.3779296875, 0.37890625, 0.3798828125, 0.380859375, 0.3818359375, 0.3828125, 0.3837890625, 0.384765625, 0.3857421875, 0.38671875, 0.3876953125, 0.388671875, 0.3896484375, 0.390625, 0.3916015625, 0.392578125, 0.3935546875, 0.39453125, 0.3955078125, 0.396484375, 0.3974609375, 0.3984375, 0.3994140625, 0.400390625, 0.4013671875, 0.40234375, 0.4033203125, 0.404296875, 0.4052734375, 0.40625, 0.4072265625, 0.408203125, 0.4091796875, 0.41015625, 0.4111328125, 0.412109375, 0.4130859375, 0.4140625, 0.4150390625, 0.416015625, 0.4169921875, 0.41796875, 0.4189453125, 0.419921875, 0.4208984375, 0.421875, 0.4228515625, 0.423828125, 0.4248046875, 0.42578125, 0.4267578125, 0.427734375, 0.4287109375, 0.4296875, 0.4306640625, 0.431640625, 0.4326171875, 0.43359375, 0.4345703125, 0.435546875, 0.4365234375, 0.4375, 0.4384765625, 0.439453125, 0.4404296875, 0.44140625, 0.4423828125, 0.443359375, 0.4443359375, 0.4453125, 0.4462890625, 0.447265625, 0.4482421875, 0.44921875, 0.4501953125, 0.451171875, 0.4521484375, 0.453125, 0.4541015625, 0.455078125, 0.4560546875, 0.45703125, 0.4580078125, 0.458984375, 0.4599609375, 0.4609375, 0.4619140625, 0.462890625, 0.4638671875, 0.46484375, 0.4658203125, 0.466796875, 0.4677734375, 0.46875, 0.4697265625, 0.470703125, 0.4716796875, 0.47265625, 0.4736328125, 0.474609375, 0.4755859375, 0.4765625, 0.4775390625, 0.478515625, 0.4794921875, 0.48046875, 0.4814453125, 0.482421875, 0.4833984375, 0.484375, 0.4853515625, 0.486328125, 0.4873046875, 0.48828125, 0.4892578125, 0.490234375, 0.4912109375, 0.4921875, 0.4931640625, 0.494140625, 0.4951171875, 0.49609375, 0.4970703125, 0.498046875, 0.4990234375, 0.5, 0.5009765625, 0.501953125, 0.5029296875, 0.50390625, 0.5048828125, 0.505859375, 0.5068359375, 0.5078125, 0.5087890625, 0.509765625, 0.5107421875, 0.51171875, 0.5126953125, 0.513671875, 0.5146484375, 0.515625, 0.5166015625, 0.517578125, 0.5185546875, 0.51953125, 0.5205078125, 0.521484375, 0.5224609375, 0.5234375, 0.5244140625, 0.525390625, 0.5263671875, 0.52734375, 0.5283203125, 0.529296875, 0.5302734375, 0.53125, 0.5322265625, 0.533203125, 0.5341796875, 0.53515625, 0.5361328125, 0.537109375, 0.5380859375, 0.5390625, 0.5400390625, 0.541015625, 0.5419921875, 0.54296875, 0.5439453125, 0.544921875, 0.5458984375, 0.546875, 0.5478515625, 0.548828125, 0.5498046875, 0.55078125, 0.5517578125, 0.552734375, 0.5537109375, 0.5546875, 0.5556640625, 0.556640625, 0.5576171875, 0.55859375, 0.5595703125, 0.560546875, 0.5615234375, 0.5625, 0.5634765625, 0.564453125, 0.5654296875, 0.56640625, 0.5673828125, 0.568359375, 0.5693359375, 0.5703125, 0.5712890625, 0.572265625, 0.5732421875, 0.57421875, 0.5751953125, 0.576171875, 0.5771484375, 0.578125, 0.5791015625, 0.580078125, 0.5810546875, 0.58203125, 0.5830078125, 0.583984375, 0.5849609375, 0.5859375, 0.5869140625, 0.587890625, 0.5888671875, 0.58984375, 0.5908203125, 0.591796875, 0.5927734375, 0.59375, 0.5947265625, 0.595703125, 0.5966796875, 0.59765625, 0.5986328125, 0.599609375, 0.6005859375, 0.6015625, 0.6025390625, 0.603515625, 0.6044921875, 0.60546875, 0.6064453125, 0.607421875, 0.6083984375, 0.609375, 0.6103515625, 0.611328125, 0.6123046875, 0.61328125, 0.6142578125, 0.615234375, 0.6162109375, 0.6171875, 0.6181640625, 0.619140625, 0.6201171875, 0.62109375, 0.6220703125, 0.623046875, 0.6240234375, 0.625, 0.6259765625, 0.626953125, 0.6279296875, 0.62890625, 0.6298828125, 0.630859375, 0.6318359375, 0.6328125, 0.6337890625, 0.634765625, 0.6357421875, 0.63671875, 0.6376953125, 0.638671875, 0.6396484375, 0.640625, 0.6416015625, 0.642578125, 0.6435546875, 0.64453125, 0.6455078125, 0.646484375, 0.6474609375, 0.6484375, 0.6494140625, 0.650390625, 0.6513671875, 0.65234375, 0.6533203125, 0.654296875, 0.6552734375, 0.65625, 0.6572265625, 0.658203125, 0.6591796875, 0.66015625, 0.6611328125, 0.662109375, 0.6630859375, 0.6640625, 0.6650390625, 0.666015625, 0.6669921875, 0.66796875, 0.6689453125, 0.669921875, 0.6708984375, 0.671875, 0.6728515625, 0.673828125, 0.6748046875, 0.67578125, 0.6767578125, 0.677734375, 0.6787109375, 0.6796875, 0.6806640625, 0.681640625, 0.6826171875, 0.68359375, 0.6845703125, 0.685546875, 0.6865234375, 0.6875, 0.6884765625, 0.689453125, 0.6904296875, 0.69140625, 0.6923828125, 0.693359375, 0.6943359375, 0.6953125, 0.6962890625, 0.697265625, 0.6982421875, 0.69921875, 0.7001953125, 0.701171875, 0.7021484375, 0.703125, 0.7041015625, 0.705078125, 0.7060546875, 0.70703125, 0.7080078125, 0.708984375, 0.7099609375, 0.7109375, 0.7119140625, 0.712890625, 0.7138671875, 0.71484375, 0.7158203125, 0.716796875, 0.7177734375, 0.71875, 0.7197265625, 0.720703125, 0.7216796875, 0.72265625, 0.7236328125, 0.724609375, 0.7255859375, 0.7265625, 0.7275390625, 0.728515625, 0.7294921875, 0.73046875, 0.7314453125, 0.732421875, 0.7333984375, 0.734375, 0.7353515625, 0.736328125, 0.7373046875, 0.73828125, 0.7392578125, 0.740234375, 0.7412109375, 0.7421875, 0.7431640625, 0.744140625, 0.7451171875, 0.74609375, 0.7470703125, 0.748046875, 0.7490234375, 0.75, 0.7509765625, 0.751953125, 0.7529296875, 0.75390625, 0.7548828125, 0.755859375, 0.7568359375, 0.7578125, 0.7587890625, 0.759765625, 0.7607421875, 0.76171875, 0.7626953125, 0.763671875, 0.7646484375, 0.765625, 0.7666015625, 0.767578125, 0.7685546875, 0.76953125, 0.7705078125, 0.771484375, 0.7724609375, 0.7734375, 0.7744140625, 0.775390625, 0.7763671875, 0.77734375, 0.7783203125, 0.779296875, 0.7802734375, 0.78125, 0.7822265625, 0.783203125, 0.7841796875, 0.78515625, 0.7861328125, 0.787109375, 0.7880859375, 0.7890625, 0.7900390625, 0.791015625, 0.7919921875, 0.79296875, 0.7939453125, 0.794921875, 0.7958984375, 0.796875, 0.7978515625, 0.798828125, 0.7998046875, 0.80078125, 0.8017578125, 0.802734375, 0.8037109375, 0.8046875, 0.8056640625, 0.806640625, 0.8076171875, 0.80859375, 0.8095703125, 0.810546875, 0.8115234375, 0.8125, 0.8134765625, 0.814453125, 0.8154296875, 0.81640625, 0.8173828125, 0.818359375, 0.8193359375, 0.8203125, 0.8212890625, 0.822265625, 0.8232421875, 0.82421875, 0.8251953125, 0.826171875, 0.8271484375, 0.828125, 0.8291015625, 0.830078125, 0.8310546875, 0.83203125, 0.8330078125, 0.833984375, 0.8349609375, 0.8359375, 0.8369140625, 0.837890625, 0.8388671875, 0.83984375, 0.8408203125, 0.841796875, 0.8427734375, 0.84375, 0.8447265625, 0.845703125, 0.8466796875, 0.84765625, 0.8486328125, 0.849609375, 0.8505859375, 0.8515625, 0.8525390625, 0.853515625, 0.8544921875, 0.85546875, 0.8564453125, 0.857421875, 0.8583984375, 0.859375, 0.8603515625, 0.861328125, 0.8623046875, 0.86328125, 0.8642578125, 0.865234375, 0.8662109375, 0.8671875, 0.8681640625, 0.869140625, 0.8701171875, 0.87109375, 0.8720703125, 0.873046875, 0.8740234375, 0.875, 0.8759765625, 0.876953125, 0.8779296875, 0.87890625, 0.8798828125, 0.880859375, 0.8818359375, 0.8828125, 0.8837890625, 0.884765625, 0.8857421875, 0.88671875, 0.8876953125, 0.888671875, 0.8896484375, 0.890625, 0.8916015625, 0.892578125, 0.8935546875, 0.89453125, 0.8955078125, 0.896484375, 0.8974609375, 0.8984375, 0.8994140625, 0.900390625, 0.9013671875, 0.90234375, 0.9033203125, 0.904296875, 0.9052734375, 0.90625, 0.9072265625, 0.908203125, 0.9091796875, 0.91015625, 0.9111328125, 0.912109375, 0.9130859375, 0.9140625, 0.9150390625, 0.916015625, 0.9169921875, 0.91796875, 0.9189453125, 0.919921875, 0.9208984375, 0.921875, 0.9228515625, 0.923828125, 0.9248046875, 0.92578125, 0.9267578125, 0.927734375, 0.9287109375, 0.9296875, 0.9306640625, 0.931640625, 0.9326171875, 0.93359375, 0.9345703125, 0.935546875, 0.9365234375, 0.9375, 0.9384765625, 0.939453125, 0.9404296875, 0.94140625, 0.9423828125, 0.943359375, 0.9443359375, 0.9453125, 0.9462890625, 0.947265625, 0.9482421875, 0.94921875, 0.9501953125, 0.951171875, 0.9521484375, 0.953125, 0.9541015625, 0.955078125, 0.9560546875, 0.95703125, 0.9580078125, 0.958984375, 0.9599609375, 0.9609375, 0.9619140625, 0.962890625, 0.9638671875, 0.96484375, 0.9658203125, 0.966796875, 0.9677734375, 0.96875, 0.9697265625, 0.970703125, 0.9716796875, 0.97265625, 0.9736328125, 0.974609375, 0.9755859375, 0.9765625, 0.9775390625, 0.978515625, 0.9794921875, 0.98046875, 0.9814453125, 0.982421875, 0.9833984375, 0.984375, 0.9853515625, 0.986328125, 0.9873046875, 0.98828125, 0.9892578125, 0.990234375, 0.9912109375, 0.9921875, 0.9931640625, 0.994140625, 0.9951171875, 0.99609375, 0.9970703125, 0.998046875, 0.9990234375],
[0.0, 0.00048828125, 0.0009765625, 0.00146484375, 0.001953125, 0.00244140625, 0.0029296875, 0.00341796875, 0.00390625, 0.00439453125, 0.0048828125, 0.00537109375, 0.005859375, 0.00634765625, 0.0068359375, 0.00732421875, 0.0078125, 0.00830078125, 0.0087890625, 0.00927734375, 0.009765625, 0.01025390625, 0.0107421875, 0.01123046875, 0.01171875, 0.01220703125, 0.0126953125, 0.01318359375, 0.013671875, 0.01416015625, 0.0146484375, 0.01513671875, 0.015625, 0.01611328125, 0.0166015625, 0.01708984375, 0.017578125, 0.01806640625, 0.0185546875, 0.01904296875, 0.01953125, 0.02001953125, 0.0205078125, 0.02099609375, 0.021484375, 0.02197265625, 0.0224609375, 0.02294921875, 0.0234375, 0.02392578125, 0.0244140625, 0.02490234375, 0.025390625, 0.02587890625, 0.0263671875, 0.02685546875, 0.02734375, 0.02783203125, 0.0283203125, 0.02880859375, 0.029296875, 0.02978515625, 0.0302734375, 0.03076171875, 0.03125, 0.03173828125, 0.0322265625, 0.03271484375, 0.033203125, 0.03369140625, 0.0341796875, 0.03466796875, 0.03515625, 0.03564453125, 0.0361328125, 0.03662109375, 0.037109375, 0.03759765625, 0.0380859375, 0.03857421875, 0.0390625, 0.03955078125, 0.0400390625, 0.04052734375, 0.041015625, 0.04150390625, 0.0419921875, 0.04248046875, 0.04296875, 0.04345703125, 0.0439453125, 0.04443359375, 0.044921875, 0.04541015625, 0.0458984375, 0.04638671875, 0.046875, 0.04736328125, 0.0478515625, 0.04833984375, 0.048828125, 0.04931640625, 0.0498046875, 0.05029296875, 0.05078125, 0.05126953125, 0.0517578125, 0.05224609375, 0.052734375, 0.05322265625, 0.0537109375, 0.05419921875, 0.0546875, 0.05517578125, 0.0556640625, 0.05615234375, 0.056640625, 0.05712890625, 0.0576171875, 0.05810546875, 0.05859375, 0.05908203125, 0.0595703125, 0.06005859375, 0.060546875, 0.06103515625, 0.0615234375, 0.06201171875, 0.0625, 0.06298828125, 0.0634765625, 0.06396484375, 0.064453125, 0.06494140625, 0.0654296875, 0.06591796875, 0.06640625, 0.06689453125, 0.0673828125, 0.06787109375, 0.068359375, 0.06884765625, 0.0693359375, 0.06982421875, 0.0703125, 0.07080078125, 0.0712890625, 0.07177734375, 0.072265625, 0.07275390625, 0.0732421875, 0.07373046875, 0.07421875, 0.07470703125, 0.0751953125, 0.07568359375, 0.076171875, 0.07666015625, 0.0771484375, 0.07763671875, 0.078125, 0.07861328125, 0.0791015625, 0.07958984375, 0.080078125, 0.08056640625, 0.0810546875, 0.08154296875, 0.08203125, 0.08251953125, 0.0830078125, 0.08349609375, 0.083984375, 0.08447265625, 0.0849609375, 0.08544921875, 0.0859375, 0.08642578125, 0.0869140625, 0.08740234375, 0.087890625, 0.08837890625, 0.0888671875, 0.08935546875, 0.08984375, 0.09033203125, 0.0908203125, 0.09130859375, 0.091796875, 0.09228515625, 0.0927734375, 0.09326171875, 0.09375, 0.09423828125, 0.0947265625, 0.09521484375, 0.095703125, 0.09619140625, 0.0966796875, 0.09716796875, 0.09765625, 0.09814453125, 0.0986328125, 0.09912109375, 0.099609375, 0.10009765625, 0.1005859375, 0.10107421875, 0.1015625, 0.10205078125, 0.1025390625, 0.10302734375, 0.103515625, 0.10400390625, 0.1044921875, 0.10498046875, 0.10546875, 0.10595703125, 0.1064453125, 0.10693359375, 0.107421875, 0.10791015625, 0.1083984375, 0.10888671875, 0.109375, 0.10986328125, 0.1103515625, 0.11083984375, 0.111328125, 0.11181640625, 0.1123046875, 0.11279296875, 0.11328125, 0.11376953125, 0.1142578125, 0.11474609375, 0.115234375, 0.11572265625, 0.1162109375, 0.11669921875, 0.1171875, 0.11767578125, 0.1181640625, 0.11865234375, 0.119140625, 0.11962890625, 0.1201171875, 0.12060546875, 0.12109375, 0.12158203125, 0.1220703125, 0.12255859375, 0.123046875, 0.12353515625, 0.1240234375, 0.12451171875, 0.125, 0.12548828125, 0.1259765625, 0.12646484375, 0.126953125, 0.12744140625, 0.1279296875, 0.12841796875, 0.12890625, 0.12939453125, 0.1298828125, 0.13037109375, 0.130859375, 0.13134765625, 0.1318359375, 0.13232421875, 0.1328125, 0.13330078125, 0.1337890625, 0.13427734375, 0.134765625, 0.13525390625, 0.1357421875, 0.13623046875, 0.13671875, 0.13720703125, 0.1376953125, 0.13818359375, 0.138671875, 0.13916015625, 0.1396484375, 0.14013671875, 0.140625, 0.14111328125, 0.1416015625, 0.14208984375, 0.142578125, 0.14306640625, 0.1435546875, 0.14404296875, 0.14453125, 0.14501953125, 0.1455078125, 0.14599609375, 0.146484375, 0.14697265625, 0.1474609375, 0.14794921875, 0.1484375, 0.14892578125, 0.1494140625, 0.14990234375, 0.150390625, 0.15087890625, 0.1513671875, 0.15185546875, 0.15234375, 0.15283203125, 0.1533203125, 0.15380859375, 0.154296875, 0.15478515625, 0.1552734375, 0.15576171875, 0.15625, 0.15673828125, 0.1572265625, 0.15771484375, 0.158203125, 0.15869140625, 0.1591796875, 0.15966796875, 0.16015625, 0.16064453125, 0.1611328125, 0.16162109375, 0.162109375, 0.16259765625, 0.1630859375, 0.16357421875, 0.1640625, 0.16455078125, 0.1650390625, 0.16552734375, 0.166015625, 0.16650390625, 0.1669921875, 0.16748046875, 0.16796875, 0.16845703125, 0.1689453125, 0.16943359375, 0.169921875, 0.17041015625, 0.1708984375, 0.17138671875, 0.171875, 0.17236328125, 0.1728515625, 0.17333984375, 0.173828125, 0.17431640625, 0.1748046875, 0.17529296875, 0.17578125, 0.17626953125, 0.1767578125, 0.17724609375, 0.177734375, 0.17822265625, 0.1787109375, 0.17919921875, 0.1796875, 0.18017578125, 0.1806640625, 0.18115234375, 0.181640625, 0.18212890625, 0.1826171875, 0.18310546875, 0.18359375, 0.18408203125, 0.1845703125, 0.18505859375, 0.185546875, 0.18603515625, 0.1865234375, 0.18701171875, 0.1875, 0.18798828125, 0.1884765625, 0.18896484375, 0.189453125, 0.18994140625, 0.1904296875, 0.19091796875, 0.19140625, 0.19189453125, 0.1923828125, 0.19287109375, 0.193359375, 0.19384765625, 0.1943359375, 0.19482421875, 0.1953125, 0.19580078125, 0.1962890625, 0.19677734375, 0.197265625, 0.19775390625, 0.1982421875, 0.19873046875, 0.19921875, 0.19970703125, 0.2001953125, 0.20068359375, 0.201171875, 0.20166015625, 0.2021484375, 0.20263671875, 0.203125, 0.20361328125, 0.2041015625, 0.20458984375, 0.205078125, 0.20556640625, 0.2060546875, 0.20654296875, 0.20703125, 0.20751953125, 0.2080078125, 0.20849609375, 0.208984375, 0.20947265625, 0.2099609375, 0.21044921875, 0.2109375, 0.21142578125, 0.2119140625, 0.21240234375, 0.212890625, 0.21337890625, 0.2138671875, 0.21435546875, 0.21484375, 0.21533203125, 0.2158203125, 0.21630859375, 0.216796875, 0.21728515625, 0.2177734375, 0.21826171875, 0.21875, 0.21923828125, 0.2197265625, 0.22021484375, 0.220703125, 0.22119140625, 0.2216796875, 0.22216796875, 0.22265625, 0.22314453125, 0.2236328125, 0.22412109375, 0.224609375, 0.22509765625, 0.2255859375, 0.22607421875, 0.2265625, 0.22705078125, 0.2275390625, 0.22802734375, 0.228515625, 0.22900390625, 0.2294921875, 0.22998046875, 0.23046875, 0.23095703125, 0.2314453125, 0.23193359375, 0.232421875, 0.23291015625, 0.2333984375, 0.23388671875, 0.234375, 0.23486328125, 0.2353515625, 0.23583984375, 0.236328125, 0.23681640625, 0.2373046875, 0.23779296875, 0.23828125, 0.23876953125, 0.2392578125, 0.23974609375, 0.240234375, 0.24072265625, 0.2412109375, 0.24169921875, 0.2421875, 0.24267578125, 0.2431640625, 0.24365234375, 0.244140625, 0.24462890625, 0.2451171875, 0.24560546875, 0.24609375, 0.24658203125, 0.2470703125, 0.24755859375, 0.248046875, 0.24853515625, 0.2490234375, 0.24951171875, 0.25, 0.25048828125, 0.2509765625, 0.25146484375, 0.251953125, 0.25244140625, 0.2529296875, 0.25341796875, 0.25390625, 0.25439453125, 0.2548828125, 0.25537109375, 0.255859375, 0.25634765625, 0.2568359375, 0.25732421875, 0.2578125, 0.25830078125, 0.2587890625, 0.25927734375, 0.259765625, 0.26025390625, 0.2607421875, 0.26123046875, 0.26171875, 0.26220703125, 0.2626953125, 0.26318359375, 0.263671875, 0.26416015625, 0.2646484375, 0.26513671875, 0.265625, 0.26611328125, 0.2666015625, 0.26708984375, 0.267578125, 0.26806640625, 0.2685546875, 0.26904296875, 0.26953125, 0.27001953125, 0.2705078125, 0.27099609375, 0.271484375, 0.27197265625, 0.2724609375, 0.27294921875, 0.2734375, 0.27392578125, 0.2744140625, 0.27490234375, 0.275390625, 0.27587890625, 0.2763671875, 0.27685546875, 0.27734375, 0.27783203125, 0.2783203125, 0.27880859375, 0.279296875, 0.27978515625, 0.2802734375, 0.28076171875, 0.28125, 0.28173828125, 0.2822265625, 0.28271484375, 0.283203125, 0.28369140625, 0.2841796875, 0.28466796875, 0.28515625, 0.28564453125, 0.2861328125, 0.28662109375, 0.287109375, 0.28759765625, 0.2880859375, 0.28857421875, 0.2890625, 0.28955078125, 0.2900390625, 0.29052734375, 0.291015625, 0.29150390625, 0.2919921875, 0.29248046875, 0.29296875, 0.29345703125, 0.2939453125, 0.29443359375, 0.294921875, 0.29541015625, 0.2958984375, 0.29638671875, 0.296875, 0.29736328125, 0.2978515625, 0.29833984375, 0.298828125, 0.29931640625, 0.2998046875, 0.30029296875, 0.30078125, 0.30126953125, 0.3017578125, 0.30224609375, 0.302734375, 0.30322265625, 0.3037109375, 0.30419921875, 0.3046875, 0.30517578125, 0.3056640625, 0.30615234375, 0.306640625, 0.30712890625, 0.3076171875, 0.30810546875, 0.30859375, 0.30908203125, 0.3095703125, 0.31005859375, 0.310546875, 0.31103515625, 0.3115234375, 0.31201171875, 0.3125, 0.31298828125, 0.3134765625, 0.31396484375, 0.314453125, 0.31494140625, 0.3154296875, 0.31591796875, 0.31640625, 0.31689453125, 0.3173828125, 0.31787109375, 0.318359375, 0.31884765625, 0.3193359375, 0.31982421875, 0.3203125, 0.32080078125, 0.3212890625, 0.32177734375, 0.322265625, 0.32275390625, 0.3232421875, 0.32373046875, 0.32421875, 0.32470703125, 0.3251953125, 0.32568359375, 0.326171875, 0.32666015625, 0.3271484375, 0.32763671875, 0.328125, 0.32861328125, 0.3291015625, 0.32958984375, 0.330078125, 0.33056640625, 0.3310546875, 0.33154296875, 0.33203125, 0.33251953125, 0.3330078125, 0.33349609375, 0.333984375, 0.33447265625, 0.3349609375, 0.33544921875, 0.3359375, 0.33642578125, 0.3369140625, 0.33740234375, 0.337890625, 0.33837890625, 0.3388671875, 0.33935546875, 0.33984375, 0.34033203125, 0.3408203125, 0.34130859375, 0.341796875, 0.34228515625, 0.3427734375, 0.34326171875, 0.34375, 0.34423828125, 0.3447265625, 0.34521484375, 0.345703125, 0.34619140625, 0.3466796875, 0.34716796875, 0.34765625, 0.34814453125, 0.3486328125, 0.34912109375, 0.349609375, 0.35009765625, 0.3505859375, 0.35107421875, 0.3515625, 0.35205078125, 0.3525390625, 0.35302734375, 0.353515625, 0.35400390625, 0.3544921875, 0.35498046875, 0.35546875, 0.35595703125, 0.3564453125, 0.35693359375, 0.357421875, 0.35791015625, 0.3583984375, 0.35888671875, 0.359375, 0.35986328125, 0.3603515625, 0.36083984375, 0.361328125, 0.36181640625, 0.3623046875, 0.36279296875, 0.36328125, 0.36376953125, 0.3642578125, 0.36474609375, 0.365234375, 0.36572265625, 0.3662109375, 0.36669921875, 0.3671875, 0.36767578125, 0.3681640625, 0.36865234375, 0.369140625, 0.36962890625, 0.3701171875, 0.37060546875, 0.37109375, 0.37158203125, 0.3720703125, 0.37255859375, 0.373046875, 0.37353515625, 0.3740234375, 0.37451171875, 0.375, 0.37548828125, 0.3759765625, 0.37646484375, 0.376953125, 0.37744140625, 0.3779296875, 0.37841796875, 0.37890625, 0.37939453125, 0.3798828125, 0.38037109375, 0.380859375, 0.38134765625, 0.3818359375, 0.38232421875, 0.3828125, 0.38330078125, 0.3837890625, 0.38427734375, 0.384765625, 0.38525390625, 0.3857421875, 0.38623046875, 0.38671875, 0.38720703125, 0.3876953125, 0.38818359375, 0.388671875, 0.38916015625, 0.3896484375, 0.39013671875, 0.390625, 0.39111328125, 0.3916015625, 0.39208984375, 0.392578125, 0.39306640625, 0.3935546875, 0.39404296875, 0.39453125, 0.39501953125, 0.3955078125, 0.39599609375, 0.396484375, 0.39697265625, 0.3974609375, 0.39794921875, 0.3984375, 0.39892578125, 0.3994140625, 0.39990234375, 0.400390625, 0.40087890625, 0.4013671875, 0.40185546875, 0.40234375, 0.40283203125, 0.4033203125, 0.40380859375, 0.404296875, 0.40478515625, 0.4052734375, 0.40576171875, 0.40625, 0.40673828125, 0.4072265625, 0.40771484375, 0.408203125, 0.40869140625, 0.4091796875, 0.40966796875, 0.41015625, 0.41064453125, 0.4111328125, 0.41162109375, 0.412109375, 0.41259765625, 0.4130859375, 0.41357421875, 0.4140625, 0.41455078125, 0.4150390625, 0.41552734375, 0.416015625, 0.41650390625, 0.4169921875, 0.41748046875, 0.41796875, 0.41845703125, 0.4189453125, 0.41943359375, 0.419921875, 0.42041015625, 0.4208984375, 0.42138671875, 0.421875, 0.42236328125, 0.4228515625, 0.42333984375, 0.423828125, 0.42431640625, 0.4248046875, 0.42529296875, 0.42578125, 0.42626953125, 0.4267578125, 0.42724609375, 0.427734375, 0.42822265625, 0.4287109375, 0.42919921875, 0.4296875, 0.43017578125, 0.4306640625, 0.43115234375, 0.431640625, 0.43212890625, 0.4326171875, 0.43310546875, 0.43359375, 0.43408203125, 0.4345703125, 0.43505859375, 0.435546875, 0.43603515625, 0.4365234375, 0.43701171875, 0.4375, 0.43798828125, 0.4384765625, 0.43896484375, 0.439453125, 0.43994140625, 0.4404296875, 0.44091796875, 0.44140625, 0.44189453125, 0.4423828125, 0.44287109375, 0.443359375, 0.44384765625, 0.4443359375, 0.44482421875, 0.4453125, 0.44580078125, 0.4462890625, 0.44677734375, 0.447265625, 0.44775390625, 0.4482421875, 0.44873046875, 0.44921875, 0.44970703125, 0.4501953125, 0.45068359375, 0.451171875, 0.45166015625, 0.4521484375, 0.45263671875, 0.453125, 0.45361328125, 0.4541015625, 0.45458984375, 0.455078125, 0.45556640625, 0.4560546875, 0.45654296875, 0.45703125, 0.45751953125, 0.4580078125, 0.45849609375, 0.458984375, 0.45947265625, 0.4599609375, 0.46044921875, 0.4609375, 0.46142578125, 0.4619140625, 0.46240234375, 0.462890625, 0.46337890625, 0.4638671875, 0.46435546875, 0.46484375, 0.46533203125, 0.4658203125, 0.46630859375, 0.466796875, 0.46728515625, 0.4677734375, 0.46826171875, 0.46875, 0.46923828125, 0.4697265625, 0.47021484375, 0.470703125, 0.47119140625, 0.4716796875, 0.47216796875, 0.47265625, 0.47314453125, 0.4736328125, 0.47412109375, 0.474609375, 0.47509765625, 0.4755859375, 0.47607421875, 0.4765625, 0.47705078125, 0.4775390625, 0.47802734375, 0.478515625, 0.47900390625, 0.4794921875, 0.47998046875, 0.48046875, 0.48095703125, 0.4814453125, 0.48193359375, 0.482421875, 0.48291015625, 0.4833984375, 0.48388671875, 0.484375, 0.48486328125, 0.4853515625, 0.48583984375, 0.486328125, 0.48681640625, 0.4873046875, 0.48779296875, 0.48828125, 0.48876953125, 0.4892578125, 0.48974609375, 0.490234375, 0.49072265625, 0.4912109375, 0.49169921875, 0.4921875, 0.49267578125, 0.4931640625, 0.49365234375, 0.494140625, 0.49462890625, 0.4951171875, 0.49560546875, 0.49609375, 0.49658203125, 0.4970703125, 0.49755859375, 0.498046875, 0.49853515625, 0.4990234375, 0.49951171875, 0.5, 0.50048828125, 0.5009765625, 0.50146484375, 0.501953125, 0.50244140625, 0.5029296875, 0.50341796875, 0.50390625, 0.50439453125, 0.5048828125, 0.50537109375, 0.505859375, 0.50634765625, 0.5068359375, 0.50732421875, 0.5078125, 0.50830078125, 0.5087890625, 0.50927734375, 0.509765625, 0.51025390625, 0.5107421875, 0.51123046875, 0.51171875, 0.51220703125, 0.5126953125, 0.51318359375, 0.513671875, 0.51416015625, 0.5146484375, 0.51513671875, 0.515625, 0.51611328125, 0.5166015625, 0.51708984375, 0.517578125, 0.51806640625, 0.5185546875, 0.51904296875, 0.51953125, 0.52001953125, 0.5205078125, 0.52099609375, 0.521484375, 0.52197265625, 0.5224609375, 0.52294921875, 0.5234375, 0.52392578125, 0.5244140625, 0.52490234375, 0.525390625, 0.52587890625, 0.5263671875, 0.52685546875, 0.52734375, 0.52783203125, 0.5283203125, 0.52880859375, 0.529296875, 0.52978515625, 0.5302734375, 0.53076171875, 0.53125, 0.53173828125, 0.5322265625, 0.53271484375, 0.533203125, 0.53369140625, 0.5341796875, 0.53466796875, 0.53515625, 0.53564453125, 0.5361328125, 0.53662109375, 0.537109375, 0.53759765625, 0.5380859375, 0.53857421875, 0.5390625, 0.53955078125, 0.5400390625, 0.54052734375, 0.541015625, 0.54150390625, 0.5419921875, 0.54248046875, 0.54296875, 0.54345703125, 0.5439453125, 0.54443359375, 0.544921875, 0.54541015625, 0.5458984375, 0.54638671875, 0.546875, 0.54736328125, 0.5478515625, 0.54833984375, 0.548828125, 0.54931640625, 0.5498046875, 0.55029296875, 0.55078125, 0.55126953125, 0.5517578125, 0.55224609375, 0.552734375, 0.55322265625, 0.5537109375, 0.55419921875, 0.5546875, 0.55517578125, 0.5556640625, 0.55615234375, 0.556640625, 0.55712890625, 0.5576171875, 0.55810546875, 0.55859375, 0.55908203125, 0.5595703125, 0.56005859375, 0.560546875, 0.56103515625, 0.5615234375, 0.56201171875, 0.5625, 0.56298828125, 0.5634765625, 0.56396484375, 0.564453125, 0.56494140625, 0.5654296875, 0.56591796875, 0.56640625, 0.56689453125, 0.5673828125, 0.56787109375, 0.568359375, 0.56884765625, 0.5693359375, 0.56982421875, 0.5703125, 0.57080078125, 0.5712890625, 0.57177734375, 0.572265625, 0.57275390625, 0.5732421875, 0.57373046875, 0.57421875, 0.57470703125, 0.5751953125, 0.57568359375, 0.576171875, 0.57666015625, 0.5771484375, 0.57763671875, 0.578125, 0.57861328125, 0.5791015625, 0.57958984375, 0.580078125, 0.58056640625, 0.5810546875, 0.58154296875, 0.58203125, 0.58251953125, 0.5830078125, 0.58349609375, 0.583984375, 0.58447265625, 0.5849609375, 0.58544921875, 0.5859375, 0.58642578125, 0.5869140625, 0.58740234375, 0.587890625, 0.58837890625, 0.5888671875, 0.58935546875, 0.58984375, 0.59033203125, 0.5908203125, 0.59130859375, 0.591796875, 0.59228515625, 0.5927734375, 0.59326171875, 0.59375, 0.59423828125, 0.5947265625, 0.59521484375, 0.595703125, 0.59619140625, 0.5966796875, 0.59716796875, 0.59765625, 0.59814453125, 0.5986328125, 0.59912109375, 0.599609375, 0.60009765625, 0.6005859375, 0.60107421875, 0.6015625, 0.60205078125, 0.6025390625, 0.60302734375, 0.603515625, 0.60400390625, 0.6044921875, 0.60498046875, 0.60546875, 0.60595703125, 0.6064453125, 0.60693359375, 0.607421875, 0.60791015625, 0.6083984375, 0.60888671875, 0.609375, 0.60986328125, 0.6103515625, 0.61083984375, 0.611328125, 0.61181640625, 0.6123046875, 0.61279296875, 0.61328125, 0.61376953125, 0.6142578125, 0.61474609375, 0.615234375, 0.61572265625, 0.6162109375, 0.61669921875, 0.6171875, 0.61767578125, 0.6181640625, 0.61865234375, 0.619140625, 0.61962890625, 0.6201171875, 0.62060546875, 0.62109375, 0.62158203125, 0.6220703125, 0.62255859375, 0.623046875, 0.62353515625, 0.6240234375, 0.62451171875, 0.625, 0.62548828125, 0.6259765625, 0.62646484375, 0.626953125, 0.62744140625, 0.6279296875, 0.62841796875, 0.62890625, 0.62939453125, 0.6298828125, 0.63037109375, 0.630859375, 0.63134765625, 0.6318359375, 0.63232421875, 0.6328125, 0.63330078125, 0.6337890625, 0.63427734375, 0.634765625, 0.63525390625, 0.6357421875, 0.63623046875, 0.63671875, 0.63720703125, 0.6376953125, 0.63818359375, 0.638671875, 0.63916015625, 0.6396484375, 0.64013671875, 0.640625, 0.64111328125, 0.6416015625, 0.64208984375, 0.642578125, 0.64306640625, 0.6435546875, 0.64404296875, 0.64453125, 0.64501953125, 0.6455078125, 0.64599609375, 0.646484375, 0.64697265625, 0.6474609375, 0.64794921875, 0.6484375, 0.64892578125, 0.6494140625, 0.64990234375, 0.650390625, 0.65087890625, 0.6513671875, 0.65185546875, 0.65234375, 0.65283203125, 0.6533203125, 0.65380859375, 0.654296875, 0.65478515625, 0.6552734375, 0.65576171875, 0.65625, 0.65673828125, 0.6572265625, 0.65771484375, 0.658203125, 0.65869140625, 0.6591796875, 0.65966796875, 0.66015625, 0.66064453125, 0.6611328125, 0.66162109375, 0.662109375, 0.66259765625, 0.6630859375, 0.66357421875, 0.6640625, 0.66455078125, 0.6650390625, 0.66552734375, 0.666015625, 0.66650390625, 0.6669921875, 0.66748046875, 0.66796875, 0.66845703125, 0.6689453125, 0.66943359375, 0.669921875, 0.67041015625, 0.6708984375, 0.67138671875, 0.671875, 0.67236328125, 0.6728515625, 0.67333984375, 0.673828125, 0.67431640625, 0.6748046875, 0.67529296875, 0.67578125, 0.67626953125, 0.6767578125, 0.67724609375, 0.677734375, 0.67822265625, 0.6787109375, 0.67919921875, 0.6796875, 0.68017578125, 0.6806640625, 0.68115234375, 0.681640625, 0.68212890625, 0.6826171875, 0.68310546875, 0.68359375, 0.68408203125, 0.6845703125, 0.68505859375, 0.685546875, 0.68603515625, 0.6865234375, 0.68701171875, 0.6875, 0.68798828125, 0.6884765625, 0.68896484375, 0.689453125, 0.68994140625, 0.6904296875, 0.69091796875, 0.69140625, 0.69189453125, 0.6923828125, 0.69287109375, 0.693359375, 0.69384765625, 0.6943359375, 0.69482421875, 0.6953125, 0.69580078125, 0.6962890625, 0.69677734375, 0.697265625, 0.69775390625, 0.6982421875, 0.69873046875, 0.69921875, 0.69970703125, 0.7001953125, 0.70068359375, 0.701171875, 0.70166015625, 0.7021484375, 0.70263671875, 0.703125, 0.70361328125, 0.7041015625, 0.70458984375, 0.705078125, 0.70556640625, 0.7060546875, 0.70654296875, 0.70703125, 0.70751953125, 0.7080078125, 0.70849609375, 0.708984375, 0.70947265625, 0.7099609375, 0.71044921875, 0.7109375, 0.71142578125, 0.7119140625, 0.71240234375, 0.712890625, 0.71337890625, 0.7138671875, 0.71435546875, 0.71484375, 0.71533203125, 0.7158203125, 0.71630859375, 0.716796875, 0.71728515625, 0.7177734375, 0.71826171875, 0.71875, 0.71923828125, 0.7197265625, 0.72021484375, 0.720703125, 0.72119140625, 0.7216796875, 0.72216796875, 0.72265625, 0.72314453125, 0.7236328125, 0.72412109375, 0.724609375, 0.72509765625, 0.7255859375, 0.72607421875, 0.7265625, 0.72705078125, 0.7275390625, 0.72802734375, 0.728515625, 0.72900390625, 0.7294921875, 0.72998046875, 0.73046875, 0.73095703125, 0.7314453125, 0.73193359375, 0.732421875, 0.73291015625, 0.7333984375, 0.73388671875, 0.734375, 0.73486328125, 0.7353515625, 0.73583984375, 0.736328125, 0.73681640625, 0.7373046875, 0.73779296875, 0.73828125, 0.73876953125, 0.7392578125, 0.73974609375, 0.740234375, 0.74072265625, 0.7412109375, 0.74169921875, 0.7421875, 0.74267578125, 0.7431640625, 0.74365234375, 0.744140625, 0.74462890625, 0.7451171875, 0.74560546875, 0.74609375, 0.74658203125, 0.7470703125, 0.74755859375, 0.748046875, 0.74853515625, 0.7490234375, 0.74951171875, 0.75, 0.75048828125, 0.7509765625, 0.75146484375, 0.751953125, 0.75244140625, 0.7529296875, 0.75341796875, 0.75390625, 0.75439453125, 0.7548828125, 0.75537109375, 0.755859375, 0.75634765625, 0.7568359375, 0.75732421875, 0.7578125, 0.75830078125, 0.7587890625, 0.75927734375, 0.759765625, 0.76025390625, 0.7607421875, 0.76123046875, 0.76171875, 0.76220703125, 0.7626953125, 0.76318359375, 0.763671875, 0.76416015625, 0.7646484375, 0.76513671875, 0.765625, 0.76611328125, 0.7666015625, 0.76708984375, 0.767578125, 0.76806640625, 0.7685546875, 0.76904296875, 0.76953125, 0.77001953125, 0.7705078125, 0.77099609375, 0.771484375, 0.77197265625, 0.7724609375, 0.77294921875, 0.7734375, 0.77392578125, 0.7744140625, 0.77490234375, 0.775390625, 0.77587890625, 0.7763671875, 0.77685546875, 0.77734375, 0.77783203125, 0.7783203125, 0.77880859375, 0.779296875, 0.77978515625, 0.7802734375, 0.78076171875, 0.78125, 0.78173828125, 0.7822265625, 0.78271484375, 0.783203125, 0.78369140625, 0.7841796875, 0.78466796875, 0.78515625, 0.78564453125, 0.7861328125, 0.78662109375, 0.787109375, 0.78759765625, 0.7880859375, 0.78857421875, 0.7890625, 0.78955078125, 0.7900390625, 0.79052734375, 0.791015625, 0.79150390625, 0.7919921875, 0.79248046875, 0.79296875, 0.79345703125, 0.7939453125, 0.79443359375, 0.794921875, 0.79541015625, 0.7958984375, 0.79638671875, 0.796875, 0.79736328125, 0.7978515625, 0.79833984375, 0.798828125, 0.79931640625, 0.7998046875, 0.80029296875, 0.80078125, 0.80126953125, 0.8017578125, 0.80224609375, 0.802734375, 0.80322265625, 0.8037109375, 0.80419921875, 0.8046875, 0.80517578125, 0.8056640625, 0.80615234375, 0.806640625, 0.80712890625, 0.8076171875, 0.80810546875, 0.80859375, 0.80908203125, 0.8095703125, 0.81005859375, 0.810546875, 0.81103515625, 0.8115234375, 0.81201171875, 0.8125, 0.81298828125, 0.8134765625, 0.81396484375, 0.814453125, 0.81494140625, 0.8154296875, 0.81591796875, 0.81640625, 0.81689453125, 0.8173828125, 0.81787109375, 0.818359375, 0.81884765625, 0.8193359375, 0.81982421875, 0.8203125, 0.82080078125, 0.8212890625, 0.82177734375, 0.822265625, 0.82275390625, 0.8232421875, 0.82373046875, 0.82421875, 0.82470703125, 0.8251953125, 0.82568359375, 0.826171875, 0.82666015625, 0.8271484375, 0.82763671875, 0.828125, 0.82861328125, 0.8291015625, 0.82958984375, 0.830078125, 0.83056640625, 0.8310546875, 0.83154296875, 0.83203125, 0.83251953125, 0.8330078125, 0.83349609375, 0.833984375, 0.83447265625, 0.8349609375, 0.83544921875, 0.8359375, 0.83642578125, 0.8369140625, 0.83740234375, 0.837890625, 0.83837890625, 0.8388671875, 0.83935546875, 0.83984375, 0.84033203125, 0.8408203125, 0.84130859375, 0.841796875, 0.84228515625, 0.8427734375, 0.84326171875, 0.84375, 0.84423828125, 0.8447265625, 0.84521484375, 0.845703125, 0.84619140625, 0.8466796875, 0.84716796875, 0.84765625, 0.84814453125, 0.8486328125, 0.84912109375, 0.849609375, 0.85009765625, 0.8505859375, 0.85107421875, 0.8515625, 0.85205078125, 0.8525390625, 0.85302734375, 0.853515625, 0.85400390625, 0.8544921875, 0.85498046875, 0.85546875, 0.85595703125, 0.8564453125, 0.85693359375, 0.857421875, 0.85791015625, 0.8583984375, 0.85888671875, 0.859375, 0.85986328125, 0.8603515625, 0.86083984375, 0.861328125, 0.86181640625, 0.8623046875, 0.86279296875, 0.86328125, 0.86376953125, 0.8642578125, 0.86474609375, 0.865234375, 0.86572265625, 0.8662109375, 0.86669921875, 0.8671875, 0.86767578125, 0.8681640625, 0.86865234375, 0.869140625, 0.86962890625, 0.8701171875, 0.87060546875, 0.87109375, 0.87158203125, 0.8720703125, 0.87255859375, 0.873046875, 0.87353515625, 0.8740234375, 0.87451171875, 0.875, 0.87548828125, 0.8759765625, 0.87646484375, 0.876953125, 0.87744140625, 0.8779296875, 0.87841796875, 0.87890625, 0.87939453125, 0.8798828125, 0.88037109375, 0.880859375, 0.88134765625, 0.8818359375, 0.88232421875, 0.8828125, 0.88330078125, 0.8837890625, 0.88427734375, 0.884765625, 0.88525390625, 0.8857421875, 0.88623046875, 0.88671875, 0.88720703125, 0.8876953125, 0.88818359375, 0.888671875, 0.88916015625, 0.8896484375, 0.89013671875, 0.890625, 0.89111328125, 0.8916015625, 0.89208984375, 0.892578125, 0.89306640625, 0.8935546875, 0.89404296875, 0.89453125, 0.89501953125, 0.8955078125, 0.89599609375, 0.896484375, 0.89697265625, 0.8974609375, 0.89794921875, 0.8984375, 0.89892578125, 0.8994140625, 0.89990234375, 0.900390625, 0.90087890625, 0.9013671875, 0.90185546875, 0.90234375, 0.90283203125, 0.9033203125, 0.90380859375, 0.904296875, 0.90478515625, 0.9052734375, 0.90576171875, 0.90625, 0.90673828125, 0.9072265625, 0.90771484375, 0.908203125, 0.90869140625, 0.9091796875, 0.90966796875, 0.91015625, 0.91064453125, 0.9111328125, 0.91162109375, 0.912109375, 0.91259765625, 0.9130859375, 0.91357421875, 0.9140625, 0.91455078125, 0.9150390625, 0.91552734375, 0.916015625, 0.91650390625, 0.9169921875, 0.91748046875, 0.91796875, 0.91845703125, 0.9189453125, 0.91943359375, 0.919921875, 0.92041015625, 0.9208984375, 0.92138671875, 0.921875, 0.92236328125, 0.9228515625, 0.92333984375, 0.923828125, 0.92431640625, 0.9248046875, 0.92529296875, 0.92578125, 0.92626953125, 0.9267578125, 0.92724609375, 0.927734375, 0.92822265625, 0.9287109375, 0.92919921875, 0.9296875, 0.93017578125, 0.9306640625, 0.93115234375, 0.931640625, 0.93212890625, 0.9326171875, 0.93310546875, 0.93359375, 0.93408203125, 0.9345703125, 0.93505859375, 0.935546875, 0.93603515625, 0.9365234375, 0.93701171875, 0.9375, 0.93798828125, 0.9384765625, 0.93896484375, 0.939453125, 0.93994140625, 0.9404296875, 0.94091796875, 0.94140625, 0.94189453125, 0.9423828125, 0.94287109375, 0.943359375, 0.94384765625, 0.9443359375, 0.94482421875, 0.9453125, 0.94580078125, 0.9462890625, 0.94677734375, 0.947265625, 0.94775390625, 0.9482421875, 0.94873046875, 0.94921875, 0.94970703125, 0.9501953125, 0.95068359375, 0.951171875, 0.95166015625, 0.9521484375, 0.95263671875, 0.953125, 0.95361328125, 0.9541015625, 0.95458984375, 0.955078125, 0.95556640625, 0.9560546875, 0.95654296875, 0.95703125, 0.95751953125, 0.9580078125, 0.95849609375, 0.958984375, 0.95947265625, 0.9599609375, 0.96044921875, 0.9609375, 0.96142578125, 0.9619140625, 0.96240234375, 0.962890625, 0.96337890625, 0.9638671875, 0.96435546875, 0.96484375, 0.96533203125, 0.9658203125, 0.96630859375, 0.966796875, 0.96728515625, 0.9677734375, 0.96826171875, 0.96875, 0.96923828125, 0.9697265625, 0.97021484375, 0.970703125, 0.97119140625, 0.9716796875, 0.97216796875, 0.97265625, 0.97314453125, 0.9736328125, 0.97412109375, 0.974609375, 0.97509765625, 0.9755859375, 0.97607421875, 0.9765625, 0.97705078125, 0.9775390625, 0.97802734375, 0.978515625, 0.97900390625, 0.9794921875, 0.97998046875, 0.98046875, 0.98095703125, 0.9814453125, 0.98193359375, 0.982421875, 0.98291015625, 0.9833984375, 0.98388671875, 0.984375, 0.98486328125, 0.9853515625, 0.98583984375, 0.986328125, 0.98681640625, 0.9873046875, 0.98779296875, 0.98828125, 0.98876953125, 0.9892578125, 0.98974609375, 0.990234375, 0.99072265625, 0.9912109375, 0.99169921875, 0.9921875, 0.99267578125, 0.9931640625, 0.99365234375, 0.994140625, 0.99462890625, 0.9951171875, 0.99560546875, 0.99609375, 0.99658203125, 0.9970703125, 0.99755859375, 0.998046875, 0.99853515625, 0.9990234375, 0.99951171875],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
]
def gen_test_data():
""" Help function to generate test data """
functions = [lambda x: 0.0]
sizes = (128, 256, 512, 1024, 2048)
for f in functions:
for s in sizes:
print "%s," % [f(i) for i in xrange(s)]
print ""
for s in sizes:
print "%s," % [abs(math.sin(i / (10 * math.pi))) for i in xrange(s)]
print ""
for s in sizes:
print "%s," % [i / float(s) for i in xrange(s)]
print ""
import sys
sys.exit()
class SliderTestCase(test.GuiTestCase):
frame_class = test.test_gui.xrcbutton_frame
def test_slider(self):
slider = slidecomp.Slider(self.panel, size=(-1, 20))
self.add_control(slider, flags=wx.EXPAND)
test.gui_loop()
self.assertEqual(slider.GetValue(), 0.0)
slider.SetValue(-3)
self.assertEqual(slider.GetValue(), -3.0)
test.gui_loop()
self.assertRaises(TypeError, slider.SetValue, "44")
self.assertEqual(slider.GetValue(), -3.0)
slider.SetValue(44)
self.assertEqual(slider.GetValue(), 44.0)
test.gui_loop()
slider.SetValue(0.5)
test.gui_loop()
slider = slidecomp.Slider(self.panel, value=0.5, min_val=0.01,
max_val=1.0, scale="log", size=(-1, 20))
self.add_control(slider, flags=wx.EXPAND)
test.gui_loop()
slider = slidecomp.Slider(self.panel, value=0.5, min_val=0.01,
max_val=1.0, scale="cubic", size=(-1, 20))
self.add_control(slider, flags=wx.EXPAND)
test.gui_loop()
def test_numberslider(self):
slider = slidecomp.NumberSlider(self.panel, size=(-1, 18), accuracy=2)
self.add_control(slider, flags=wx.EXPAND)
test.gui_loop()
slider.SetValue(1000)
def test_numberslider_bug(self):
""" This test was deviced to investigate a bug in the NumblerSlider code
The bug has been resolved
"""
start_val = 0.5
next_val = 0.1
reset_val = 0.9
slider = slidecomp.UnitFloatSlider(self.panel, size=(-1, 18), accuracy=2, unit='s')
self.add_control(slider, flags=wx.EXPAND)
va = model.FloatVA(start_val)
con = widgets.VigilantAttributeConnector(va, slider, events=wx.EVT_SLIDER)
# Test I: the value of the VA is assigned to the slider when the
# VigilantAttributeConnector is created.
#
# It works correctly if the gui_loop is added, otherwise GetValue
# is called before SetValue and things go wrong
test.gui_loop(100)
self.assertEqual(va.value, start_val)
self.assertEqual(slider.GetValue(), start_val)
# END Test I
# Test II: We assign a value to the VA ourselves and expect it to be
# reflected in the slider.
# The value is correctly shown in the slider, but again GetValue
# is called before SetValue and things go wrong, unless we add gui_loop
va.value = next_val
test.gui_loop(100)
self.assertEqual(va.value, next_val)
self.assertEqual(slider.GetValue(), next_val)
# END Test II
# Test III
# We create a function that resets the VA's value when it's changed.
# No worries about infinite recursion, since the VA won't change if
# the same value is assigned twice.
#
# If we click on the slider, changing it's value, without gui_loop
# present in the listening function, SetValue and GetValue get called
# in the right order, with the right values.
# However, even though the VA value gets set to 0.9, the SetValue method
# does not get triggered, unless we add gui_loop and all is well.
def p(v):
# test.gui_loop()
va.value = reset_val
self.p = p
va.subscribe(self.p, init=False)
# test.gui_loop(5000)
# va.value = reset_val
# END Test III
def test_visualrangeslider(self):
global DATA
vrs = slidecomp.VisualRangeSlider(self.panel, size=(-1, 40))
vrs.SetForegroundColour('#A020F0')
self.add_control(vrs, flags=wx.EXPAND)
# print "%s %s" % (vrs.__class__.__name__, vrs.GetSize())
test.gui_loop()
vrs.SetRange(0, 100)
vrs.SetValue((25, 75))
for d in DATA:
vrs.SetContent(d)
test.gui_loop()
vrs.SetContent([])
vrs.SetValue((0, 0)) # will be set to 1 px wide
vrs.SetContent([])
vrs.SetValue((1, 1))
self.assertEqual(vrs.GetValue(), (1, 1))
# it should be fine to put a range outside of the value
vrs.SetRange(0.2, 0.8)
test.gui_loop()
vrs.SetValue((0.4, 0.45))
test.gui_loop()
vrs.SetContent(DATA[7])
test.gui_loop()
vrs.SetContent(DATA[8])
test.gui_loop()
vrs.SetRange(0, 1)
test.gui_loop()
vrs.SetValue((0, 1))
test.gui_loop()
self.assertEqual(vrs.pixel_value, (0, vrs.GetSize()[0]))
self.assertEqual(vrs.GetValue(), (0, 1))
vrs.SetValue((0, 0.5))
test.gui_loop()
self.assertAlmostEqual(vrs.pixel_value, (0, vrs.GetSize()[0] / 2.0))
self.assertEqual(vrs.GetValue(), (0, 0.5))
def test_bandwidthslider(self):
global DATA
bws = slidecomp.BandwidthSlider(self.panel, size=(-1, 40))
bws.SetForegroundColour('#A020F0')
self.add_control(bws, flags=wx.EXPAND)
# print "%s %s" % (bws.__class__.__name__, bws.GetSize())
test.gui_loop()
# print bws.min_value, bws.max_value
bws.SetRange(0, 100)
bws.SetValue((25, 75))
for d in DATA:
bws.SetContent(d)
test.gui_loop()
bws.SetContent([])
bws.SetValue((0, 0)) # will be set to 1 px wide
bws.SetContent([])
bws.SetValue((1, 1))
self.assertEqual(bws.GetValue(), (1, 1))
# it should be fine to put a range outside of the value
bws.SetRange(0.2, 0.8)
test.gui_loop()
bws.SetValue((0.5, 0.5))
bws.set_center_value(0.2)
test.gui_loop()
bws.SetContent(DATA[7])
test.gui_loop()
bws.SetContent(DATA[8])
test.gui_loop()
bws.SetRange(0, 1)
test.gui_loop()
bws.SetValue((0, 1))
test.gui_loop()
self.assertEqual(bws.pixel_value, (0, bws.GetSize()[0]))
self.assertEqual(bws.GetValue(), (0, 1))
bws.SetValue((0, 0.5))
test.gui_loop()
self.assertAlmostEqual(bws.pixel_value, (0, bws.GetSize()[0] / 2.0))
self.assertEqual(bws.GetValue(), (0, 0.5))
self.app.test_frame.SetSize((500, 400))
bws.Disable()
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
kutenai/django | tests/db_functions/test_datetime.py | 9 | 35755 | from __future__ import unicode_literals
from datetime import datetime
from unittest import skipIf
from django.conf import settings
from django.db import connection
from django.db.models import DateField, DateTimeField, IntegerField
from django.db.models.functions import (
Extract, ExtractDay, ExtractHour, ExtractMinute, ExtractMonth,
ExtractSecond, ExtractWeekDay, ExtractYear, Trunc, TruncDate, TruncDay,
TruncHour, TruncMinute, TruncMonth, TruncSecond, TruncYear,
)
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import DTModel
try:
import pytz
except ImportError:
pytz = None
def microsecond_support(value):
return value if connection.features.supports_microsecond_precision else value.replace(microsecond=0)
def truncate_to(value, kind, tzinfo=None):
# Convert to target timezone before truncation
if tzinfo is not None:
value = value.astimezone(tzinfo)
def truncate(value, kind):
if kind == 'second':
return value.replace(microsecond=0)
if kind == 'minute':
return value.replace(second=0, microsecond=0)
if kind == 'hour':
return value.replace(minute=0, second=0, microsecond=0)
if kind == 'day':
if isinstance(value, datetime):
return value.replace(hour=0, minute=0, second=0, microsecond=0)
return value
if kind == 'month':
if isinstance(value, datetime):
return value.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
return value.replace(day=1)
# otherwise, truncate to year
if isinstance(value, datetime):
return value.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
return value.replace(month=1, day=1)
value = truncate(value, kind)
if tzinfo is not None:
# If there was a daylight saving transition, then reset the timezone.
value = timezone.make_aware(value.replace(tzinfo=None), tzinfo)
return value
@override_settings(USE_TZ=False)
class DateFunctionTests(TestCase):
def create_model(self, start_datetime, end_datetime):
return DTModel.objects.create(
name=start_datetime.isoformat(),
start_datetime=start_datetime, end_datetime=end_datetime,
start_date=start_datetime.date(), end_date=end_datetime.date(),
start_time=start_datetime.time(), end_time=end_datetime.time(),
duration=(end_datetime - start_datetime),
)
def test_extract_year_exact_lookup(self):
"""
Extract year uses a BETWEEN filter to compare the year to allow indexes
to be used.
"""
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
qs = DTModel.objects.filter(start_datetime__year__exact=2015)
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(' between '), 1)
self.assertEqual(query_string.count('extract'), 0)
# exact is implied and should be the same
qs = DTModel.objects.filter(start_datetime__year=2015)
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(' between '), 1)
self.assertEqual(query_string.count('extract'), 0)
# date and datetime fields should behave the same
qs = DTModel.objects.filter(start_date__year=2015)
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(' between '), 1)
self.assertEqual(query_string.count('extract'), 0)
def test_extract_year_greaterthan_lookup(self):
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
qs = DTModel.objects.filter(start_datetime__year__gt=2015)
self.assertEqual(qs.count(), 1)
self.assertEqual(str(qs.query).lower().count('extract'), 0)
qs = DTModel.objects.filter(start_datetime__year__gte=2015)
self.assertEqual(qs.count(), 2)
self.assertEqual(str(qs.query).lower().count('extract'), 0)
def test_extract_year_lessthan_lookup(self):
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
qs = DTModel.objects.filter(start_datetime__year__lt=2016)
self.assertEqual(qs.count(), 1)
self.assertEqual(str(qs.query).count('extract'), 0)
qs = DTModel.objects.filter(start_datetime__year__lte=2016)
self.assertEqual(qs.count(), 2)
self.assertEqual(str(qs.query).count('extract'), 0)
def test_extract_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
with self.assertRaisesMessage(ValueError, 'lookup_name must be provided'):
Extract('start_datetime')
msg = 'Extract input expression must be DateField, DateTimeField, or TimeField.'
with self.assertRaisesMessage(ValueError, msg):
list(DTModel.objects.annotate(extracted=Extract('name', 'hour')))
with self.assertRaisesMessage(
ValueError, "Cannot extract time component 'second' from DateField 'start_date'."):
list(DTModel.objects.annotate(extracted=Extract('start_date', 'second')))
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'year')).order_by('start_datetime'),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'month')).order_by('start_datetime'),
[(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'day')).order_by('start_datetime'),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'week_day')).order_by('start_datetime'),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1)
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'hour')).order_by('start_datetime'),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'minute')).order_by('start_datetime'),
[(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=Extract('start_datetime', 'second')).order_by('start_datetime'),
[(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__year=Extract('start_datetime', 'year')).count(), 2)
self.assertEqual(DTModel.objects.filter(start_datetime__hour=Extract('start_datetime', 'hour')).count(), 2)
self.assertEqual(DTModel.objects.filter(start_date__month=Extract('start_date', 'month')).count(), 2)
self.assertEqual(DTModel.objects.filter(start_time__hour=Extract('start_time', 'hour')).count(), 2)
def test_extract_year_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractYear('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractYear('start_date')).order_by('start_datetime'),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__year=ExtractYear('start_datetime')).count(), 2)
def test_extract_month_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractMonth('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractMonth('start_date')).order_by('start_datetime'),
[(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__month=ExtractMonth('start_datetime')).count(), 2)
def test_extract_day_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractDay('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractDay('start_date')).order_by('start_datetime'),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__day=ExtractDay('start_datetime')).count(), 2)
def test_extract_weekday_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractWeekDay('start_datetime')).order_by('start_datetime'),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractWeekDay('start_date')).order_by('start_datetime'),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__week_day=ExtractWeekDay('start_datetime')).count(), 2)
def test_extract_hour_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractHour('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractHour('start_time')).order_by('start_datetime'),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__hour=ExtractHour('start_datetime')).count(), 2)
def test_extract_minute_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractMinute('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractMinute('start_time')).order_by('start_datetime'),
[(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__minute=ExtractMinute('start_datetime')).count(), 2)
def test_extract_second_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractSecond('start_datetime')).order_by('start_datetime'),
[(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=ExtractSecond('start_time')).order_by('start_datetime'),
[(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__second=ExtractSecond('start_datetime')).count(), 2)
def test_trunc_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
with self.assertRaisesMessage(ValueError, 'output_field must be either DateField or DateTimeField'):
list(DTModel.objects.annotate(truncated=Trunc('start_datetime', 'year', output_field=IntegerField())))
with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField or DateTimeField."):
list(DTModel.objects.annotate(truncated=Trunc('name', 'year', output_field=DateTimeField())))
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=Trunc('start_date', 'second')))
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=Trunc('start_date', 'month', output_field=DateTimeField())))
def test_datetime_kind(kind):
self.assertQuerysetEqual(
DTModel.objects.annotate(
truncated=Trunc('start_datetime', kind, output_field=DateTimeField())
).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, kind)),
(end_datetime, truncate_to(end_datetime, kind))
],
lambda m: (m.start_datetime, m.truncated)
)
def test_date_kind(kind):
self.assertQuerysetEqual(
DTModel.objects.annotate(
truncated=Trunc('start_date', kind, output_field=DateField())
).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime.date(), kind)),
(end_datetime, truncate_to(end_datetime.date(), kind))
],
lambda m: (m.start_datetime, m.truncated)
)
test_date_kind('year')
test_date_kind('month')
test_date_kind('day')
test_datetime_kind('year')
test_datetime_kind('month')
test_datetime_kind('day')
test_datetime_kind('hour')
test_datetime_kind('minute')
test_datetime_kind('second')
qs = DTModel.objects.filter(start_datetime__date=Trunc('start_datetime', 'day', output_field=DateField()))
self.assertEqual(qs.count(), 2)
def test_trunc_year_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'year')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncYear('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'year')),
(end_datetime, truncate_to(end_datetime, 'year')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncYear('start_date')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime.date(), 'year')),
(end_datetime, truncate_to(end_datetime.date(), 'year')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime=TruncYear('start_datetime')).count(), 1)
def test_trunc_month_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'month')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncMonth('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'month')),
(end_datetime, truncate_to(end_datetime, 'month')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncMonth('start_date')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime.date(), 'month')),
(end_datetime, truncate_to(end_datetime.date(), 'month')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime=TruncMonth('start_datetime')).count(), 1)
def test_trunc_date_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncDate('start_datetime')).order_by('start_datetime'),
[
(start_datetime, start_datetime.date()),
(end_datetime, end_datetime.date()),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime__date=TruncDate('start_datetime')).count(), 2)
def test_trunc_day_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'day')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncDay('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'day')),
(end_datetime, truncate_to(end_datetime, 'day')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime=TruncDay('start_datetime')).count(), 1)
def test_trunc_hour_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'hour')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncHour('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'hour')),
(end_datetime, truncate_to(end_datetime, 'hour')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime=TruncHour('start_datetime')).count(), 1)
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncHour('start_date')))
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncHour('start_date', output_field=DateField())))
def test_trunc_minute_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'minute')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncMinute('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'minute')),
(end_datetime, truncate_to(end_datetime, 'minute')),
],
lambda m: (m.start_datetime, m.extracted)
)
self.assertEqual(DTModel.objects.filter(start_datetime=TruncMinute('start_datetime')).count(), 1)
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncMinute('start_date')))
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncMinute('start_date', output_field=DateField())))
def test_trunc_second_func(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'second')
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerysetEqual(
DTModel.objects.annotate(extracted=TruncSecond('start_datetime')).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime, 'second')),
(end_datetime, truncate_to(end_datetime, 'second'))
],
lambda m: (m.start_datetime, m.extracted)
)
result = 1 if connection.features.supports_microsecond_precision else 2
self.assertEqual(DTModel.objects.filter(start_datetime=TruncSecond('start_datetime')).count(), result)
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncSecond('start_date')))
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
list(DTModel.objects.annotate(truncated=TruncSecond('start_date', output_field=DateField())))
@skipIf(pytz is None, "this test requires pytz")
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
class DateFunctionWithTimeZoneTests(DateFunctionTests):
def test_extract_func_with_timezone(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 23, 30, 1, 321))
end_datetime = microsecond_support(datetime(2015, 6, 16, 13, 11, 27, 123))
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
melb = pytz.timezone('Australia/Melbourne')
qs = DTModel.objects.annotate(
day=Extract('start_datetime', 'day'),
day_melb=Extract('start_datetime', 'day', tzinfo=melb),
weekday=ExtractWeekDay('start_datetime'),
weekday_melb=ExtractWeekDay('start_datetime', tzinfo=melb),
hour=ExtractHour('start_datetime'),
hour_melb=ExtractHour('start_datetime', tzinfo=melb),
).order_by('start_datetime')
utc_model = qs.get()
self.assertEqual(utc_model.day, 15)
self.assertEqual(utc_model.day_melb, 16)
self.assertEqual(utc_model.weekday, 2)
self.assertEqual(utc_model.weekday_melb, 3)
self.assertEqual(utc_model.hour, 23)
self.assertEqual(utc_model.hour_melb, 9)
with timezone.override(melb):
melb_model = qs.get()
self.assertEqual(melb_model.day, 16)
self.assertEqual(melb_model.day_melb, 16)
self.assertEqual(melb_model.weekday, 3)
self.assertEqual(melb_model.weekday_melb, 3)
self.assertEqual(melb_model.hour, 9)
self.assertEqual(melb_model.hour_melb, 9)
def test_extract_func_explicit_timezone_priority(self):
start_datetime = microsecond_support(datetime(2015, 6, 15, 23, 30, 1, 321))
end_datetime = microsecond_support(datetime(2015, 6, 16, 13, 11, 27, 123))
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
melb = pytz.timezone('Australia/Melbourne')
with timezone.override(melb):
model = DTModel.objects.annotate(
day_melb=Extract('start_datetime', 'day'),
day_utc=Extract('start_datetime', 'day', tzinfo=timezone.utc),
).order_by('start_datetime').get()
self.assertEqual(model.day_melb, 16)
self.assertEqual(model.day_utc, 15)
def test_trunc_timezone_applied_before_truncation(self):
start_datetime = microsecond_support(datetime(2016, 1, 1, 1, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
melb = pytz.timezone('Australia/Melbourne')
pacific = pytz.timezone('US/Pacific')
model = DTModel.objects.annotate(
melb_year=TruncYear('start_datetime', tzinfo=melb),
pacific_year=TruncYear('start_datetime', tzinfo=pacific),
).order_by('start_datetime').get()
self.assertEqual(model.start_datetime, start_datetime)
self.assertEqual(model.melb_year, truncate_to(start_datetime, 'year', melb))
self.assertEqual(model.pacific_year, truncate_to(start_datetime, 'year', pacific))
self.assertEqual(model.start_datetime.year, 2016)
self.assertEqual(model.melb_year.year, 2016)
self.assertEqual(model.pacific_year.year, 2015)
def test_trunc_func_with_timezone(self):
"""
If the truncated datetime transitions to a different offset (daylight
saving) then the returned value will have that new timezone/offset.
"""
start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
start_datetime = timezone.make_aware(start_datetime, is_dst=False)
end_datetime = timezone.make_aware(end_datetime, is_dst=False)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
melb = pytz.timezone('Australia/Melbourne')
def test_datetime_kind(kind, tzinfo=melb):
self.assertQuerysetEqual(
DTModel.objects.annotate(
truncated=Trunc('start_datetime', kind, output_field=DateTimeField(), tzinfo=melb)
).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime.astimezone(melb), kind, melb)),
(end_datetime, truncate_to(end_datetime.astimezone(melb), kind, melb))
],
lambda m: (m.start_datetime, m.truncated)
)
def test_date_kind(kind, tzinfo=melb):
self.assertQuerysetEqual(
DTModel.objects.annotate(
truncated=Trunc('start_date', kind, output_field=DateField(), tzinfo=melb)
).order_by('start_datetime'),
[
(start_datetime, truncate_to(start_datetime.date(), kind)),
(end_datetime, truncate_to(end_datetime.date(), kind))
],
lambda m: (m.start_datetime, m.truncated)
)
test_date_kind('year')
test_date_kind('month')
test_date_kind('day')
test_datetime_kind('year')
test_datetime_kind('month')
test_datetime_kind('day')
test_datetime_kind('hour')
test_datetime_kind('minute')
test_datetime_kind('second')
qs = DTModel.objects.filter(start_datetime__date=Trunc('start_datetime', 'day', output_field=DateField()))
self.assertEqual(qs.count(), 2)
| bsd-3-clause |
ravibhure/ansible | lib/ansible/modules/net_tools/ldap/ldap_entry.py | 49 | 9945 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Peter Sagerson <psagers@ignorare.net>
# (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ldap_entry
short_description: Add or remove LDAP entries.
description:
- Add or remove LDAP entries. This module only asserts the existence or
non-existence of an LDAP entry, not its attributes. To assert the
attribute values of an entry, see M(ldap_attr).
notes:
- The default authentication settings will attempt to use a SASL EXTERNAL
bind over a UNIX domain socket. This works well with the default Ubuntu
install for example, which includes a cn=peercred,cn=external,cn=auth ACL
rule allowing root to modify the server configuration. If you need to use
a simple bind to access your server, pass the credentials in I(bind_dn)
and I(bind_pw).
version_added: '2.3'
author:
- Jiri Tyr (@jtyr)
requirements:
- python-ldap
options:
bind_dn:
required: false
default: null
description:
- A DN to bind with. If this is omitted, we'll try a SASL bind with
the EXTERNAL mechanism. If this is blank, we'll use an anonymous
bind.
bind_pw:
required: false
default: null
description:
- The password to use with I(bind_dn).
dn:
required: true
description:
- The DN of the entry to add or remove.
attributes:
required: false
default: null
description:
- If I(state=present), attributes necessary to create an entry. Existing
entries are never modified. To assert specific attribute values on an
existing entry, use M(ldap_attr) module instead.
objectClass:
required: false
default: null
description:
- If I(state=present), value or list of values to use when creating
the entry. It can either be a string or an actual list of
strings.
params:
required: false
default: null
description:
- List of options which allows to overwrite any of the task or the
I(attributes) options. To remove an option, set the value of the option
to C(null).
server_uri:
required: false
default: ldapi:///
description:
- A URI to the LDAP server. The default value lets the underlying
LDAP client library look for a UNIX domain socket in its default
location.
start_tls:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- If true, we'll use the START_TLS LDAP extension.
state:
required: false
choices: [present, absent]
default: present
description:
- The target state of the entry.
validate_certs:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- If C(no), SSL certificates will not be validated. This should only be
used on sites using self-signed certificates.
version_added: "2.4"
"""
EXAMPLES = """
- name: Make sure we have a parent entry for users
ldap_entry:
dn: ou=users,dc=example,dc=com
objectClass: organizationalUnit
- name: Make sure we have an admin user
ldap_entry:
dn: cn=admin,dc=example,dc=com
objectClass:
- simpleSecurityObject
- organizationalRole
attributes:
description: An LDAP administrator
userPassword: "{SSHA}tabyipcHzhwESzRaGA7oQ/SDoBZQOGND"
- name: Get rid of an old entry
ldap_entry:
dn: ou=stuff,dc=example,dc=com
state: absent
server_uri: ldap://localhost/
bind_dn: cn=admin,dc=example,dc=com
bind_pw: password
#
# The same as in the previous example but with the authentication details
# stored in the ldap_auth variable:
#
# ldap_auth:
# server_uri: ldap://localhost/
# bind_dn: cn=admin,dc=example,dc=com
# bind_pw: password
- name: Get rid of an old entry
ldap_entry:
dn: ou=stuff,dc=example,dc=com
state: absent
params: "{{ ldap_auth }}"
"""
RETURN = """
# Default return values
"""
import traceback
try:
import ldap
import ldap.modlist
import ldap.sasl
HAS_LDAP = True
except ImportError:
HAS_LDAP = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
class LdapEntry(object):
def __init__(self, module):
# Shortcuts
self.module = module
self.bind_dn = self.module.params['bind_dn']
self.bind_pw = self.module.params['bind_pw']
self.dn = self.module.params['dn']
self.server_uri = self.module.params['server_uri']
self.start_tls = self.module.params['start_tls']
self.state = self.module.params['state']
self.verify_cert = self.module.params['validate_certs']
# Add the objectClass into the list of attributes
self.module.params['attributes']['objectClass'] = (
self.module.params['objectClass'])
# Load attributes
if self.state == 'present':
self.attrs = self._load_attrs()
# Establish connection
self.connection = self._connect_to_ldap()
def _load_attrs(self):
""" Turn attribute's value to array. """
attrs = {}
for name, value in self.module.params['attributes'].items():
if name not in attrs:
attrs[name] = []
if isinstance(value, list):
attrs[name] = value
else:
attrs[name].append(str(value))
return attrs
def add(self):
""" If self.dn does not exist, returns a callable that will add it. """
def _add():
self.connection.add_s(self.dn, modlist)
if not self._is_entry_present():
modlist = ldap.modlist.addModlist(self.attrs)
action = _add
else:
action = None
return action
def delete(self):
""" If self.dn exists, returns a callable that will delete it. """
def _delete():
self.connection.delete_s(self.dn)
if self._is_entry_present():
action = _delete
else:
action = None
return action
def _is_entry_present(self):
try:
self.connection.search_s(self.dn, ldap.SCOPE_BASE)
except ldap.NO_SUCH_OBJECT:
is_present = False
else:
is_present = True
return is_present
def _connect_to_ldap(self):
if not self.verify_cert:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
connection = ldap.initialize(self.server_uri)
if self.start_tls:
try:
connection.start_tls_s()
except ldap.LDAPError as e:
self.module.fail_json(msg="Cannot start TLS.", details=to_native(e),
exception=traceback.format_exc())
try:
if self.bind_dn is not None:
connection.simple_bind_s(self.bind_dn, self.bind_pw)
else:
connection.sasl_interactive_bind_s('', ldap.sasl.external())
except ldap.LDAPError as e:
self.module.fail_json(
msg="Cannot bind to the server.", details=to_native(e),
exception=traceback.format_exc())
return connection
def main():
module = AnsibleModule(
argument_spec={
'attributes': dict(default={}, type='dict'),
'bind_dn': dict(),
'bind_pw': dict(default='', no_log=True),
'dn': dict(required=True),
'objectClass': dict(type='raw'),
'params': dict(type='dict'),
'server_uri': dict(default='ldapi:///'),
'start_tls': dict(default=False, type='bool'),
'state': dict(default='present', choices=['present', 'absent']),
'validate_certs': dict(default=True, type='bool'),
},
supports_check_mode=True,
)
if not HAS_LDAP:
module.fail_json(
msg="Missing required 'ldap' module (pip install python-ldap).")
state = module.params['state']
# Check if objectClass is present when needed
if state == 'present' and module.params['objectClass'] is None:
module.fail_json(msg="At least one objectClass must be provided.")
# Check if objectClass is of the correct type
if (
module.params['objectClass'] is not None and not (
isinstance(module.params['objectClass'], string_types) or
isinstance(module.params['objectClass'], list))):
module.fail_json(msg="objectClass must be either a string or a list.")
# Update module parameters with user's parameters if defined
if 'params' in module.params and isinstance(module.params['params'], dict):
for key, val in module.params['params'].items():
if key in module.argument_spec:
module.params[key] = val
else:
module.params['attributes'][key] = val
# Remove the params
module.params.pop('params', None)
# Instantiate the LdapEntry object
ldap = LdapEntry(module)
# Get the action function
if state == 'present':
action = ldap.add()
elif state == 'absent':
action = ldap.delete()
# Perform the action
if action is not None and not module.check_mode:
try:
action()
except Exception as e:
module.fail_json(msg="Entry action failed.", details=to_native(e), exception=traceback.format_exc())
module.exit_json(changed=(action is not None))
if __name__ == '__main__':
main()
| gpl-3.0 |
Yukarumya/Yukarum-Redfoxes | testing/mozharness/configs/marionette/windows_config.py | 1 | 1876 | # This is a template config file for marionette production on Windows.
import os
import sys
config = {
# marionette options
"marionette_address": "localhost:2828",
"test_manifest": "unit-tests.ini",
"virtualenv_python_dll": 'c:/mozilla-build/python27/python27.dll',
"virtualenv_path": 'venv',
"exes": {
'python': 'c:/mozilla-build/python27/python',
'virtualenv': ['c:/mozilla-build/python27/python', 'c:/mozilla-build/buildbotve/virtualenv.py'],
'hg': 'c:/mozilla-build/hg/hg',
'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
'%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
},
"find_links": [
"http://pypi.pvt.build.mozilla.org/pub",
"http://pypi.pub.build.mozilla.org/pub",
],
"pip_index": False,
"buildbot_json_path": "buildprops.json",
"default_actions": [
'clobber',
'read-buildbot-config',
'download-and-extract',
'create-virtualenv',
'install',
'run-tests',
],
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
"download_minidump_stackwalk": True,
"download_symbols": "ondemand",
"suite_definitions": {
"marionette_desktop": {
"options": [
"-vv",
"--log-raw=%(raw_log_file)s",
"--log-errorsummary=%(error_summary_file)s",
"--log-html=%(html_report_file)s",
"--binary=%(binary)s",
"--address=%(address)s",
"--symbols-path=%(symbols_path)s"
],
"run_filename": "",
"testsdir": ""
},
},
}
| mpl-2.0 |
mugurrus/superdesk-core | tests/io/feed_parsers/dpa_test.py | 4 | 2118 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import os
from superdesk.io.feed_parsers.dpa_iptc7901 import DPAIPTC7901FeedParser
from superdesk.tests import TestCase
def fixture(filename):
dirname = os.path.dirname(os.path.realpath(__file__))
return os.path.normpath(os.path.join(dirname, '../fixtures', filename))
class DPAIptcTestCase(TestCase):
parser = DPAIPTC7901FeedParser()
def open(self, filename):
provider = {'name': 'Test'}
return self.parser.parse(fixture(filename), provider)
def test_open_iptc7901_file(self):
with self.app.app_context():
item = self.open('IPTC7901.txt')
self.assertEqual('text', item['type'])
self.assertEqual('062', item['ingest_provider_sequence'])
self.assertEqual('i', item['anpa_category'][0]['qcode'])
self.assertEqual(211, item['word_count'])
self.assertEqual('Germany Social Democrats: Coalition talks with Merkel could fail =', item['headline'])
self.assertRegex(item['body_html'], '^<p></p><p>Negotiations')
self.assertEqual('Germany-politics', item['slugline'])
self.assertEqual(4, item['priority'])
self.assertEqual([{'qcode': 'i'}], item['anpa_category'])
self.assertTrue(item['ednote'].find('## Editorial contacts'))
self.assertEqual(item['dateline']['source'], 'dpa')
self.assertEqual(item['dateline']['located']['city'], 'Berlin')
def test_open_dpa_copyright(self):
with self.app.app_context():
item = self.open('dpa_copyright.txt')
self.assertEqual('text', item['type'])
self.assertEqual('rs', item['anpa_category'][0]['qcode'])
self.assertEqual('(Achtung)', item['headline'])
self.assertEqual('Impressum', item['slugline'])
| agpl-3.0 |
pquentin/django | tests/generic_relations/tests.py | 8 | 25089 | from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.forms import generic_inlineformset_factory
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.db.models import Q
from django.test import TestCase
from django.utils import six
from .models import (
AllowsNullGFK, Animal, Comparison, ConcreteRelatedModel,
ForConcreteModelModel, ForProxyModelModel, Gecko, ManualPK, Mineral,
ProxyRelatedModel, Rock, TaggedItem, ValuableTaggedItem, Vegetable,
)
class GenericRelationsTests(TestCase):
def setUp(self):
self.lion = Animal.objects.create(
common_name="Lion", latin_name="Panthera leo")
self.platypus = Animal.objects.create(
common_name="Platypus", latin_name="Ornithorhynchus anatinus")
Vegetable.objects.create(name="Eggplant", is_yucky=True)
self.bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
self.quartz = Mineral.objects.create(name="Quartz", hardness=7)
# Tagging stuff.
self.bacon.tags.create(tag="fatty")
self.bacon.tags.create(tag="salty")
self.lion.tags.create(tag="yellow")
self.lion.tags.create(tag="hairy")
# Original list of tags:
self.comp_func = lambda obj: (
obj.tag, obj.content_type.model_class(), obj.object_id
)
def test_generic_update_or_create_when_created(self):
"""
Should be able to use update_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.update_or_create(tag='stinky')
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_update_or_create_when_updated(self):
"""
Should be able to use update_or_create from the generic related manager
to update a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag='stinky')
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.update_or_create(defaults={'tag': 'juicy'}, id=tag.id)
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
self.assertEqual(tag.tag, 'juicy')
def test_generic_get_or_create_when_created(self):
"""
Should be able to use get_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.get_or_create(tag='stinky')
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_get_or_create_when_exists(self):
"""
Should be able to use get_or_create from the generic related manager
to get a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag="stinky")
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.get_or_create(id=tag.id, defaults={'tag': 'juicy'})
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
# shouldn't had changed the tag
self.assertEqual(tag.tag, 'stinky')
def test_generic_relations_m2m_mimic(self):
"""
Objects with declared GenericRelations can be tagged directly -- the
API mimics the many-to-many API.
"""
self.assertQuerysetEqual(self.lion.tags.all(), [
"<TaggedItem: hairy>",
"<TaggedItem: yellow>"
])
self.assertQuerysetEqual(self.bacon.tags.all(), [
"<TaggedItem: fatty>",
"<TaggedItem: salty>"
])
def test_access_content_object(self):
"""
Test accessing the content object like a foreign key.
"""
tagged_item = TaggedItem.objects.get(tag="salty")
self.assertEqual(tagged_item.content_object, self.bacon)
def test_query_content_object(self):
qs = TaggedItem.objects.filter(
animal__isnull=False).order_by('animal__common_name', 'tag')
self.assertQuerysetEqual(
qs, ["<TaggedItem: hairy>", "<TaggedItem: yellow>"]
)
mpk = ManualPK.objects.create(id=1)
mpk.tags.create(tag='mpk')
qs = TaggedItem.objects.filter(
Q(animal__isnull=False) | Q(manualpk__id=1)).order_by('tag')
self.assertQuerysetEqual(
qs, ["hairy", "mpk", "yellow"], lambda x: x.tag)
def test_exclude_generic_relations(self):
"""
Test lookups over an object without GenericRelations.
"""
# Recall that the Mineral class doesn't have an explicit GenericRelation
# defined. That's OK, because you can create TaggedItems explicitly.
# However, excluding GenericRelations means your lookups have to be a
# bit more explicit.
TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
ctype = ContentType.objects.get_for_model(self.quartz)
q = TaggedItem.objects.filter(
content_type__pk=ctype.id, object_id=self.quartz.id
)
self.assertQuerysetEqual(q, [
"<TaggedItem: clearish>",
"<TaggedItem: shiny>"
])
def test_access_via_content_type(self):
"""
Test lookups through content type.
"""
self.lion.delete()
self.platypus.tags.create(tag="fatty")
ctype = ContentType.objects.get_for_model(self.platypus)
self.assertQuerysetEqual(
Animal.objects.filter(tags__content_type=ctype),
["<Animal: Platypus>"])
def test_set_foreign_key(self):
"""
You can set a generic foreign key in the way you'd expect.
"""
tag1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
tag1.content_object = self.platypus
tag1.save()
self.assertQuerysetEqual(
self.platypus.tags.all(),
["<TaggedItem: shiny>"])
def test_queries_across_generic_relations(self):
"""
Queries across generic relations respect the content types. Even though
there are two TaggedItems with a tag of "fatty", this query only pulls
out the one with the content type related to Animals.
"""
self.assertQuerysetEqual(Animal.objects.order_by('common_name'), [
"<Animal: Lion>",
"<Animal: Platypus>"
])
def test_queries_content_type_restriction(self):
"""
Create another fatty tagged instance with different PK to ensure there
is a content type restriction in the generated queries below.
"""
mpk = ManualPK.objects.create(id=self.lion.pk)
mpk.tags.create(tag="fatty")
self.platypus.tags.create(tag="fatty")
self.assertQuerysetEqual(
Animal.objects.filter(tags__tag='fatty'), ["<Animal: Platypus>"])
self.assertQuerysetEqual(
Animal.objects.exclude(tags__tag='fatty'), ["<Animal: Lion>"])
def test_object_deletion_with_generic_relation(self):
"""
If you delete an object with an explicit Generic relation, the related
objects are deleted when the source object is deleted.
"""
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('hairy', Animal, self.lion.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk)
],
self.comp_func
)
self.lion.delete()
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('salty', Vegetable, self.bacon.pk),
],
self.comp_func
)
def test_object_deletion_without_generic_relation(self):
"""
If Generic Relation is not explicitly defined, any related objects
remain after deletion of the source object.
"""
TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
quartz_pk = self.quartz.pk
self.quartz.delete()
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('clearish', Mineral, quartz_pk),
('fatty', Vegetable, self.bacon.pk),
('hairy', Animal, self.lion.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk),
],
self.comp_func
)
def test_tag_deletion_related_objects_unaffected(self):
"""
If you delete a tag, the objects using the tag are unaffected (other
than losing a tag).
"""
ctype = ContentType.objects.get_for_model(self.lion)
tag = TaggedItem.objects.get(
content_type__pk=ctype.id, object_id=self.lion.id, tag="hairy")
tag.delete()
self.assertQuerysetEqual(self.lion.tags.all(), ["<TaggedItem: yellow>"])
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk)
],
self.comp_func
)
def test_assign_with_queryset(self):
# Ensure that querysets used in reverse GFK assignments are pre-evaluated
# so their value isn't affected by the clearing operation in
# ManyRelatedObjectsDescriptor.__set__. Refs #19816.
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
bacon.tags.create(tag="fatty")
bacon.tags.create(tag="salty")
self.assertEqual(2, bacon.tags.count())
qs = bacon.tags.filter(tag="fatty")
bacon.tags = qs
self.assertEqual(1, bacon.tags.count())
self.assertEqual(1, qs.count())
def test_generic_relation_related_name_default(self):
# Test that GenericRelation by default isn't usable from
# the reverse side.
with self.assertRaises(FieldError):
TaggedItem.objects.filter(vegetable__isnull=True)
def test_multiple_gfk(self):
# Simple tests for multiple GenericForeignKeys
# only uses one model, since the above tests should be sufficient.
tiger = Animal.objects.create(common_name="tiger")
cheetah = Animal.objects.create(common_name="cheetah")
bear = Animal.objects.create(common_name="bear")
# Create directly
Comparison.objects.create(
first_obj=cheetah, other_obj=tiger, comparative="faster"
)
Comparison.objects.create(
first_obj=tiger, other_obj=cheetah, comparative="cooler"
)
# Create using GenericRelation
tiger.comparisons.create(other_obj=bear, comparative="cooler")
tiger.comparisons.create(other_obj=cheetah, comparative="stronger")
self.assertQuerysetEqual(cheetah.comparisons.all(), [
"<Comparison: cheetah is faster than tiger>"
])
# Filtering works
self.assertQuerysetEqual(tiger.comparisons.filter(comparative="cooler"), [
"<Comparison: tiger is cooler than cheetah>",
"<Comparison: tiger is cooler than bear>",
], ordered=False)
# Filtering and deleting works
subjective = ["cooler"]
tiger.comparisons.filter(comparative__in=subjective).delete()
self.assertQuerysetEqual(Comparison.objects.all(), [
"<Comparison: cheetah is faster than tiger>",
"<Comparison: tiger is stronger than cheetah>"
], ordered=False)
# If we delete cheetah, Comparisons with cheetah as 'first_obj' will be
# deleted since Animal has an explicit GenericRelation to Comparison
# through first_obj. Comparisons with cheetah as 'other_obj' will not
# be deleted.
cheetah.delete()
self.assertQuerysetEqual(Comparison.objects.all(), [
"<Comparison: tiger is stronger than None>"
])
def test_gfk_subclasses(self):
# GenericForeignKey should work with subclasses (see #8309)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
valuedtag = ValuableTaggedItem.objects.create(
content_object=quartz, tag="shiny", value=10
)
self.assertEqual(valuedtag.content_object, quartz)
def test_generic_inline_formsets(self):
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet()
self.assertHTMLEqual(''.join(form.as_p() for form in formset.forms), """<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text" name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50" /></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label> <input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE" id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE" /><input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id" id="id_generic_relations-taggeditem-content_type-object_id-0-id" /></p>""")
formset = GenericFormSet(instance=Animal())
self.assertHTMLEqual(''.join(form.as_p() for form in formset.forms), """<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text" name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50" /></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label> <input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE" id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE" /><input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id" id="id_generic_relations-taggeditem-content_type-object_id-0-id" /></p>""")
platypus = Animal.objects.create(
common_name="Platypus", latin_name="Ornithorhynchus anatinus"
)
platypus.tags.create(tag="shiny")
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet(instance=platypus)
tagged_item_id = TaggedItem.objects.get(
tag='shiny', object_id=platypus.id
).id
self.assertHTMLEqual(''.join(form.as_p() for form in formset.forms), """<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text" name="generic_relations-taggeditem-content_type-object_id-0-tag" value="shiny" maxlength="50" /></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label> <input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE" id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE" /><input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id" value="%s" id="id_generic_relations-taggeditem-content_type-object_id-0-id" /></p><p><label for="id_generic_relations-taggeditem-content_type-object_id-1-tag">Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-1-tag" type="text" name="generic_relations-taggeditem-content_type-object_id-1-tag" maxlength="50" /></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-1-DELETE">Delete:</label> <input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-1-DELETE" id="id_generic_relations-taggeditem-content_type-object_id-1-DELETE" /><input type="hidden" name="generic_relations-taggeditem-content_type-object_id-1-id" id="id_generic_relations-taggeditem-content_type-object_id-1-id" /></p>""" % tagged_item_id)
lion = Animal.objects.create(common_name="Lion", latin_name="Panthera leo")
formset = GenericFormSet(instance=lion, prefix='x')
self.assertHTMLEqual(''.join(form.as_p() for form in formset.forms), """<p><label for="id_x-0-tag">Tag:</label> <input id="id_x-0-tag" type="text" name="x-0-tag" maxlength="50" /></p>
<p><label for="id_x-0-DELETE">Delete:</label> <input type="checkbox" name="x-0-DELETE" id="id_x-0-DELETE" /><input type="hidden" name="x-0-id" id="id_x-0-id" /></p>""")
def test_gfk_manager(self):
# GenericForeignKey should not use the default manager (which may filter objects) #16048
tailless = Gecko.objects.create(has_tail=False)
tag = TaggedItem.objects.create(content_object=tailless, tag="lizard")
self.assertEqual(tag.content_object, tailless)
def test_subclasses_with_gen_rel(self):
"""
Test that concrete model subclasses with generic relations work
correctly (ticket 11263).
"""
granite = Rock.objects.create(name='granite', hardness=5)
TaggedItem.objects.create(content_object=granite, tag="countertop")
self.assertEqual(Rock.objects.filter(tags__tag="countertop").count(), 1)
def test_generic_inline_formsets_initial(self):
"""
Test for #17927 Initial values support for BaseGenericInlineFormSet.
"""
quartz = Mineral.objects.create(name="Quartz", hardness=7)
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
ctype = ContentType.objects.get_for_model(quartz)
initial_data = [{
'tag': 'lizard',
'content_type': ctype.pk,
'object_id': quartz.pk,
}]
formset = GenericFormSet(initial=initial_data)
self.assertEqual(formset.forms[0].initial, initial_data[0])
def test_get_or_create(self):
# get_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
tag, created = TaggedItem.objects.get_or_create(tag="shiny",
defaults={'content_object': quartz})
self.assertTrue(created)
self.assertEqual(tag.tag, "shiny")
self.assertEqual(tag.content_object.id, quartz.id)
def test_update_or_create_defaults(self):
# update_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
diamond = Mineral.objects.create(name="Diamond", hardness=7)
tag, created = TaggedItem.objects.update_or_create(tag="shiny",
defaults={'content_object': quartz})
self.assertTrue(created)
self.assertEqual(tag.content_object.id, quartz.id)
tag, created = TaggedItem.objects.update_or_create(tag="shiny",
defaults={'content_object': diamond})
self.assertFalse(created)
self.assertEqual(tag.content_object.id, diamond.id)
def test_query_content_type(self):
msg = "Field 'content_object' does not generate an automatic reverse relation"
with self.assertRaisesMessage(FieldError, msg):
TaggedItem.objects.get(content_object='')
class CustomWidget(forms.TextInput):
pass
class TaggedItemForm(forms.ModelForm):
class Meta:
model = TaggedItem
fields = '__all__'
widgets = {'tag': CustomWidget}
class GenericInlineFormsetTest(TestCase):
def test_generic_inlineformset_factory(self):
"""
Regression for #14572: Using base forms with widgets
defined in Meta should not raise errors.
"""
Formset = generic_inlineformset_factory(TaggedItem, TaggedItemForm)
form = Formset().forms[0]
self.assertIsInstance(form['tag'].field.widget, CustomWidget)
def test_save_new_uses_form_save(self):
"""
Regression for #16260: save_new should call form.save()
"""
class SaveTestForm(forms.ModelForm):
def save(self, *args, **kwargs):
self.instance.saved_by = "custom method"
return super(SaveTestForm, self).save(*args, **kwargs)
Formset = generic_inlineformset_factory(
ForProxyModelModel, fields='__all__', form=SaveTestForm)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj = formset.save()[0]
self.assertEqual(new_obj.saved_by, "custom method")
def test_save_new_for_proxy(self):
Formset = generic_inlineformset_factory(ForProxyModelModel,
fields='__all__', for_concrete_model=False)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertEqual(new_obj.obj, instance)
def test_save_new_for_concrete(self):
Formset = generic_inlineformset_factory(ForProxyModelModel,
fields='__all__', for_concrete_model=True)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertNotIsInstance(new_obj.obj, ProxyRelatedModel)
class ProxyRelatedModelTest(TestCase):
def test_default_behavior(self):
"""
The default for for_concrete_model should be True
"""
base = ForConcreteModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
base = ForConcreteModelModel.objects.get(pk=base.pk)
rel = ConcreteRelatedModel.objects.get(pk=rel.pk)
self.assertEqual(base.obj, rel)
def test_works_normally(self):
"""
When for_concrete_model is False, we should still be able to get
an instance of the concrete class.
"""
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertEqual(base.obj, rel)
def test_proxy_is_returned(self):
"""
Instances of the proxy should be returned when
for_concrete_model is False.
"""
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertIsInstance(base.obj, ProxyRelatedModel)
def test_query(self):
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ConcreteRelatedModel.objects.get(bases__id=base.id))
def test_query_proxy(self):
base = ForProxyModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ProxyRelatedModel.objects.get(bases__id=base.id))
def test_generic_relation(self):
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
rel = ProxyRelatedModel.objects.get(pk=base.obj.pk)
self.assertEqual(base, rel.bases.get())
def test_generic_relation_set(self):
base = ForProxyModelModel()
base.obj = ConcreteRelatedModel.objects.create()
base.save()
newrel = ConcreteRelatedModel.objects.create()
newrel.bases = [base]
newrel = ConcreteRelatedModel.objects.get(pk=newrel.pk)
self.assertEqual(base, newrel.bases.get())
class TestInitWithNoneArgument(TestCase):
def test_none_not_allowed(self):
# TaggedItem requires a content_type, initializing with None should
# raise a ValueError.
with six.assertRaisesRegex(self, ValueError,
'Cannot assign None: "TaggedItem.content_type" does not allow null values'):
TaggedItem(content_object=None)
def test_none_allowed(self):
# AllowsNullGFK doesn't require a content_type, so None argument should
# also be allowed.
AllowsNullGFK(content_object=None)
| bsd-3-clause |
saradbowman/osf.io | osf/models/action.py | 5 | 1878 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from include import IncludeManager
from osf.models.base import BaseModel, ObjectIDMixin
from osf.utils.workflows import DefaultStates, DefaultTriggers, ReviewStates, ReviewTriggers
from osf.utils import permissions
class BaseAction(ObjectIDMixin, BaseModel):
class Meta:
abstract = True
objects = IncludeManager()
creator = models.ForeignKey('OSFUser', related_name='+', on_delete=models.CASCADE)
trigger = models.CharField(max_length=31, choices=DefaultTriggers.choices())
from_state = models.CharField(max_length=31, choices=DefaultStates.choices())
to_state = models.CharField(max_length=31, choices=DefaultStates.choices())
comment = models.TextField(blank=True)
is_deleted = models.BooleanField(default=False)
auto = models.BooleanField(default=False)
@property
def target(self):
raise NotImplementedError()
class ReviewAction(BaseAction):
target = models.ForeignKey('Preprint', related_name='actions', on_delete=models.CASCADE)
trigger = models.CharField(max_length=31, choices=ReviewTriggers.choices())
from_state = models.CharField(max_length=31, choices=ReviewStates.choices())
to_state = models.CharField(max_length=31, choices=ReviewStates.choices())
class NodeRequestAction(BaseAction):
target = models.ForeignKey('NodeRequest', related_name='actions', on_delete=models.CASCADE)
permissions = models.CharField(
max_length=5,
choices=[(permission, permission.title()) for permission in permissions.API_CONTRIBUTOR_PERMISSIONS],
default=permissions.READ
)
visible = models.BooleanField(default=True)
class PreprintRequestAction(BaseAction):
target = models.ForeignKey('PreprintRequest', related_name='actions', on_delete=models.CASCADE)
| apache-2.0 |
umitproject/tease-o-matic | django/conf/locale/id/formats.py | 355 | 1818 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j N Y'
DATETIME_FORMAT = "j N Y, G.i.s"
TIME_FORMAT = 'G.i.s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y G.i.s'
FIRST_DAY_OF_WEEK = 1 #Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d-%m-%y', '%d/%m/%y', # '25-10-09', 25/10/09'
'%d-%m-%Y', '%d/%m/%Y', # '25-10-2009', 25/10/2009'
'%d %b %Y', # '25 Oct 2006',
'%d %B %Y', # '25 October 2006'
)
TIME_INPUT_FORMATS = (
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
)
DATETIME_INPUT_FORMATS = (
'%d-%m-%Y %H.%M.%S', # '25-10-2009 14.30.59'
'%d-%m-%Y %H.%M', # '25-10-2009 14.30'
'%d-%m-%Y', # '25-10-2009'
'%d-%m-%y %H.%M.%S', # '25-10-09' 14.30.59'
'%d-%m-%y %H.%M', # '25-10-09' 14.30'
'%d-%m-%y', # '25-10-09''
'%m/%d/%y %H.%M.%S', # '10/25/06 14.30.59'
'%m/%d/%y %H.%M', # '10/25/06 14.30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y %H.%M.%S', # '25/10/2009 14.30.59'
'%m/%d/%Y %H.%M', # '25/10/2009 14.30'
'%m/%d/%Y', # '10/25/2009'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
2014c2g5/2014c2 | exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/calendar.py | 828 | 22940 | """Calendar printing functions
Note when comparing these calendars to the ones printed by cal(1): By
default, these calendars have Monday as the first day of the week, and
Sunday as the last (the European convention). Use setfirstweekday() to
set the first day of the week (0=Monday, 6=Sunday)."""
import sys
import datetime
import locale as _locale
__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
"firstweekday", "isleap", "leapdays", "weekday", "monthrange",
"monthcalendar", "prmonth", "month", "prcal", "calendar",
"timegm", "month_name", "month_abbr", "day_name", "day_abbr"]
# Exception raised for bad input (with string parameter for details)
error = ValueError
# Exceptions raised for bad input
class IllegalMonthError(ValueError):
def __init__(self, month):
self.month = month
def __str__(self):
return "bad month number %r; must be 1-12" % self.month
class IllegalWeekdayError(ValueError):
def __init__(self, weekday):
self.weekday = weekday
def __str__(self):
return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
# Constants for months referenced later
January = 1
February = 2
# Number of days per month (except for February in leap years)
mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
# This module used to have hard-coded lists of day and month names, as
# English strings. The classes following emulate a read-only version of
# that, but supply localized names. Note that the values are computed
# fresh on each call, in case the user changes locale between calls.
class _localized_month:
_months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
_months.insert(0, lambda x: "")
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._months[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 13
class _localized_day:
# January 1, 2001, was a Monday.
_days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._days[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 7
# Full and abbreviated names of weekdays
day_name = _localized_day('%A')
day_abbr = _localized_day('%a')
# Full and abbreviated names of months (1-based arrays!!!)
month_name = _localized_month('%B')
month_abbr = _localized_month('%b')
# Constants for weekdays
(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
def isleap(year):
"""Return True for leap years, False for non-leap years."""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def leapdays(y1, y2):
"""Return number of leap years in range [y1, y2).
Assume y1 <= y2."""
y1 -= 1
y2 -= 1
return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
def weekday(year, month, day):
"""Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12),
day (1-31)."""
return datetime.date(year, month, day).weekday()
def monthrange(year, month):
"""Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
year, month."""
if not 1 <= month <= 12:
raise IllegalMonthError(month)
day1 = weekday(year, month, 1)
ndays = mdays[month] + (month == February and isleap(year))
return day1, ndays
class Calendar(object):
"""
Base calendar class. This class doesn't do any formatting. It simply
provides data to subclasses.
"""
def __init__(self, firstweekday=0):
self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday
def getfirstweekday(self):
return self._firstweekday % 7
def setfirstweekday(self, firstweekday):
self._firstweekday = firstweekday
firstweekday = property(getfirstweekday, setfirstweekday)
def iterweekdays(self):
"""
Return a iterator for one week of weekday numbers starting with the
configured first one.
"""
for i in range(self.firstweekday, self.firstweekday + 7):
yield i%7
def itermonthdates(self, year, month):
"""
Return an iterator for one month. The iterator will yield datetime.date
values and will always iterate through complete weeks, so it will yield
dates outside the specified month.
"""
date = datetime.date(year, month, 1)
# Go back to the beginning of the week
days = (date.weekday() - self.firstweekday) % 7
date -= datetime.timedelta(days=days)
oneday = datetime.timedelta(days=1)
while True:
yield date
try:
date += oneday
except OverflowError:
# Adding one day could fail after datetime.MAXYEAR
break
if date.month != month and date.weekday() == self.firstweekday:
break
def itermonthdays2(self, year, month):
"""
Like itermonthdates(), but will yield (day number, weekday number)
tuples. For days outside the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield (0, date.weekday())
else:
yield (date.day, date.weekday())
def itermonthdays(self, year, month):
"""
Like itermonthdates(), but will yield day numbers. For days outside
the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield 0
else:
yield date.day
def monthdatescalendar(self, year, month):
"""
Return a matrix (list of lists) representing a month's calendar.
Each row represents a week; week entries are datetime.date values.
"""
dates = list(self.itermonthdates(year, month))
return [ dates[i:i+7] for i in range(0, len(dates), 7) ]
def monthdays2calendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; week entries are
(day number, weekday number) tuples. Day numbers outside this month
are zero.
"""
days = list(self.itermonthdays2(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def monthdayscalendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
"""
days = list(self.itermonthdays(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def yeardatescalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting. The return
value is a list of month rows. Each month row contains up to width months.
Each month contains between 4 and 6 weeks and each week contains 1-7
days. Days are datetime.date objects.
"""
months = [
self.monthdatescalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardays2calendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are
(day number, weekday number) tuples. Day numbers outside this month are
zero.
"""
months = [
self.monthdays2calendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardayscalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero.
"""
months = [
self.monthdayscalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
class TextCalendar(Calendar):
"""
Subclass of Calendar that outputs a calendar as a simple plain text
similar to the UNIX program cal.
"""
def prweek(self, theweek, width):
"""
Print a single week (no newline).
"""
print(self.formatweek(theweek, width), end=' ')
def formatday(self, day, weekday, width):
"""
Returns a formatted day.
"""
if day == 0:
s = ''
else:
s = '%2i' % day # right-align single-digit days
return s.center(width)
def formatweek(self, theweek, width):
"""
Returns a single week in a string (no newline).
"""
return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek)
def formatweekday(self, day, width):
"""
Returns a formatted week day name.
"""
if width >= 9:
names = day_name
else:
names = day_abbr
return names[day][:width].center(width)
def formatweekheader(self, width):
"""
Return a header for a week.
"""
return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays())
def formatmonthname(self, theyear, themonth, width, withyear=True):
"""
Return a formatted month name.
"""
s = month_name[themonth]
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
def prmonth(self, theyear, themonth, w=0, l=0):
"""
Print a month's calendar.
"""
print(self.formatmonth(theyear, themonth, w, l), end=' ')
def formatmonth(self, theyear, themonth, w=0, l=0):
"""
Return a month's calendar string (multi-line).
"""
w = max(2, w)
l = max(1, l)
s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1)
s = s.rstrip()
s += '\n' * l
s += self.formatweekheader(w).rstrip()
s += '\n' * l
for week in self.monthdays2calendar(theyear, themonth):
s += self.formatweek(week, w).rstrip()
s += '\n' * l
return s
def formatyear(self, theyear, w=2, l=1, c=6, m=3):
"""
Returns a year's calendar as a multi-line string.
"""
w = max(2, w)
l = max(1, l)
c = max(2, c)
colwidth = (w + 1) * 7 - 1
v = []
a = v.append
a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip())
a('\n'*l)
header = self.formatweekheader(w)
for (i, row) in enumerate(self.yeardays2calendar(theyear, m)):
# months in this row
months = range(m*i+1, min(m*(i+1)+1, 13))
a('\n'*l)
names = (self.formatmonthname(theyear, k, colwidth, False)
for k in months)
a(formatstring(names, colwidth, c).rstrip())
a('\n'*l)
headers = (header for k in months)
a(formatstring(headers, colwidth, c).rstrip())
a('\n'*l)
# max number of weeks for this row
height = max(len(cal) for cal in row)
for j in range(height):
weeks = []
for cal in row:
if j >= len(cal):
weeks.append('')
else:
weeks.append(self.formatweek(cal[j], w))
a(formatstring(weeks, colwidth, c).rstrip())
a('\n' * l)
return ''.join(v)
def pryear(self, theyear, w=0, l=0, c=6, m=3):
"""Print a year's calendar."""
print(self.formatyear(theyear, w, l, c, m))
class HTMLCalendar(Calendar):
"""
This calendar returns complete HTML pages.
"""
# CSS classes for the day <td>s
cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
def formatday(self, day, weekday):
"""
Return a day as a table cell.
"""
if day == 0:
return '<td class="noday"> </td>' # day outside month
else:
return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day)
def formatweek(self, theweek):
"""
Return a complete week as a table row.
"""
s = ''.join(self.formatday(d, wd) for (d, wd) in theweek)
return '<tr>%s</tr>' % s
def formatweekday(self, day):
"""
Return a weekday name as a table header.
"""
return '<th class="%s">%s</th>' % (self.cssclasses[day], day_abbr[day])
def formatweekheader(self):
"""
Return a header for a week as a table row.
"""
s = ''.join(self.formatweekday(i) for i in self.iterweekdays())
return '<tr>%s</tr>' % s
def formatmonthname(self, theyear, themonth, withyear=True):
"""
Return a month name as a table row.
"""
if withyear:
s = '%s %s' % (month_name[themonth], theyear)
else:
s = '%s' % month_name[themonth]
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
def formatmonth(self, theyear, themonth, withyear=True):
"""
Return a formatted month as a table.
"""
v = []
a = v.append
a('<table border="0" cellpadding="0" cellspacing="0" class="month">')
a('\n')
a(self.formatmonthname(theyear, themonth, withyear=withyear))
a('\n')
a(self.formatweekheader())
a('\n')
for week in self.monthdays2calendar(theyear, themonth):
a(self.formatweek(week))
a('\n')
a('</table>')
a('\n')
return ''.join(v)
def formatyear(self, theyear, width=3):
"""
Return a formatted year as a table of tables.
"""
v = []
a = v.append
width = max(width, 1)
a('<table border="0" cellpadding="0" cellspacing="0" class="year">')
a('\n')
a('<tr><th colspan="%d" class="year">%s</th></tr>' % (width, theyear))
for i in range(January, January+12, width):
# months in this row
months = range(i, min(i+width, 13))
a('<tr>')
for m in months:
a('<td>')
a(self.formatmonth(theyear, m, withyear=False))
a('</td>')
a('</tr>')
a('</table>')
return ''.join(v)
def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None):
"""
Return a formatted year as a complete HTML page.
"""
if encoding is None:
encoding = sys.getdefaultencoding()
v = []
a = v.append
a('<?xml version="1.0" encoding="%s"?>\n' % encoding)
a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n')
a('<html>\n')
a('<head>\n')
a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding)
if css is not None:
a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css)
a('<title>Calendar for %d</title>\n' % theyear)
a('</head>\n')
a('<body>\n')
a(self.formatyear(theyear, width))
a('</body>\n')
a('</html>\n')
return ''.join(v).encode(encoding, "xmlcharrefreplace")
class different_locale:
def __init__(self, locale):
self.locale = locale
def __enter__(self):
self.oldlocale = _locale.getlocale(_locale.LC_TIME)
_locale.setlocale(_locale.LC_TIME, self.locale)
def __exit__(self, *args):
_locale.setlocale(_locale.LC_TIME, self.oldlocale)
class LocaleTextCalendar(TextCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
TextCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day, width):
with different_locale(self.locale):
if width >= 9:
names = day_name
else:
names = day_abbr
name = names[day]
return name[:width].center(width)
def formatmonthname(self, theyear, themonth, width, withyear=True):
with different_locale(self.locale):
s = month_name[themonth]
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
class LocaleHTMLCalendar(HTMLCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
HTMLCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day):
with different_locale(self.locale):
s = day_abbr[day]
return '<th class="%s">%s</th>' % (self.cssclasses[day], s)
def formatmonthname(self, theyear, themonth, withyear=True):
with different_locale(self.locale):
s = month_name[themonth]
if withyear:
s = '%s %s' % (s, theyear)
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
# Support for old module level interface
c = TextCalendar()
firstweekday = c.getfirstweekday
def setfirstweekday(firstweekday):
if not MONDAY <= firstweekday <= SUNDAY:
raise IllegalWeekdayError(firstweekday)
c.firstweekday = firstweekday
monthcalendar = c.monthdayscalendar
prweek = c.prweek
week = c.formatweek
weekheader = c.formatweekheader
prmonth = c.prmonth
month = c.formatmonth
calendar = c.formatyear
prcal = c.pryear
# Spacing of month columns for multi-column year calendar
_colwidth = 7*3 - 1 # Amount printed by prweek()
_spacing = 6 # Number of spaces between columns
def format(cols, colwidth=_colwidth, spacing=_spacing):
"""Prints multi-column formatting for year calendars"""
print(formatstring(cols, colwidth, spacing))
def formatstring(cols, colwidth=_colwidth, spacing=_spacing):
"""Returns a string formatted from n strings, centered within n columns."""
spacing *= ' '
return spacing.join(c.center(colwidth) for c in cols)
EPOCH = 1970
_EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
def timegm(tuple):
"""Unrelated but handy function to calculate Unix timestamp from GMT."""
year, month, day, hour, minute, second = tuple[:6]
days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
hours = days*24 + hour
minutes = hours*60 + minute
seconds = minutes*60 + second
return seconds
def main(args):
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options] [year [month]]")
parser.add_option(
"-w", "--width",
dest="width", type="int", default=2,
help="width of date column (default 2, text only)"
)
parser.add_option(
"-l", "--lines",
dest="lines", type="int", default=1,
help="number of lines for each week (default 1, text only)"
)
parser.add_option(
"-s", "--spacing",
dest="spacing", type="int", default=6,
help="spacing between months (default 6, text only)"
)
parser.add_option(
"-m", "--months",
dest="months", type="int", default=3,
help="months per row (default 3, text only)"
)
parser.add_option(
"-c", "--css",
dest="css", default="calendar.css",
help="CSS to use for page (html only)"
)
parser.add_option(
"-L", "--locale",
dest="locale", default=None,
help="locale to be used from month and weekday names"
)
parser.add_option(
"-e", "--encoding",
dest="encoding", default=None,
help="Encoding to use for output."
)
parser.add_option(
"-t", "--type",
dest="type", default="text",
choices=("text", "html"),
help="output type (text or html)"
)
(options, args) = parser.parse_args(args)
if options.locale and not options.encoding:
parser.error("if --locale is specified --encoding is required")
sys.exit(1)
locale = options.locale, options.encoding
if options.type == "html":
if options.locale:
cal = LocaleHTMLCalendar(locale=locale)
else:
cal = HTMLCalendar()
encoding = options.encoding
if encoding is None:
encoding = sys.getdefaultencoding()
optdict = dict(encoding=encoding, css=options.css)
write = sys.stdout.buffer.write
if len(args) == 1:
write(cal.formatyearpage(datetime.date.today().year, **optdict))
elif len(args) == 2:
write(cal.formatyearpage(int(args[1]), **optdict))
else:
parser.error("incorrect number of arguments")
sys.exit(1)
else:
if options.locale:
cal = LocaleTextCalendar(locale=locale)
else:
cal = TextCalendar()
optdict = dict(w=options.width, l=options.lines)
if len(args) != 3:
optdict["c"] = options.spacing
optdict["m"] = options.months
if len(args) == 1:
result = cal.formatyear(datetime.date.today().year, **optdict)
elif len(args) == 2:
result = cal.formatyear(int(args[1]), **optdict)
elif len(args) == 3:
result = cal.formatmonth(int(args[1]), int(args[2]), **optdict)
else:
parser.error("incorrect number of arguments")
sys.exit(1)
write = sys.stdout.write
if options.encoding:
result = result.encode(options.encoding)
write = sys.stdout.buffer.write
write(result)
if __name__ == "__main__":
main(sys.argv)
| gpl-2.0 |
willingc/oh-mainline | vendor/packages/Django/tests/modeltests/update_only_fields/tests.py | 102 | 9781 | from __future__ import absolute_import
from django.db.models.signals import pre_save, post_save
from django.test import TestCase
from .models import Person, Employee, ProxyEmployee, Profile, Account
class UpdateOnlyFieldsTests(TestCase):
def test_update_fields_basic(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s.gender = 'M'
s.name = 'Ian'
s.save(update_fields=['name'])
s = Person.objects.get(pk=s.pk)
self.assertEqual(s.gender, 'F')
self.assertEqual(s.name, 'Ian')
def test_update_fields_deferred(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.defer("gender", "pid").get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_1(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_2(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(2):
s1.save(update_fields=['pid'])
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Sara")
self.assertEqual(s2.gender, "F")
def test_update_fields_only_repeated(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.gender = 'M'
with self.assertNumQueries(1):
s1.save()
# Test that the deferred class does not remember that gender was
# set, instead the instace should remember this.
s1 = Person.objects.only('name').get(pk=s.pk)
with self.assertNumQueries(1):
s1.save()
def test_update_fields_inheritance_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('name').get(pk=e1.pk)
e1.name = 'Linda'
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).name,
'Linda')
def test_update_fields_fk_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile').get(pk=e1.pk)
e1.profile = profile_receptionist
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_receptionist)
e1.profile_id = profile_boss.pk
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_boss)
def test_select_related_only_interaction(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile__salary').select_related('profile').get(pk=e1.pk)
profile_boss.name = 'Clerk'
profile_boss.salary = 1000
profile_boss.save()
# The loaded salary of 3000 gets saved, the name of 'Clerk' isn't
# overwritten.
with self.assertNumQueries(1):
e1.profile.save()
reloaded_profile = Profile.objects.get(pk=profile_boss.pk)
self.assertEqual(reloaded_profile.name, profile_boss.name)
self.assertEqual(reloaded_profile.salary, 3000)
def test_update_fields_m2m(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
a1 = Account.objects.create(num=1)
a2 = Account.objects.create(num=2)
e1.accounts = [a1,a2]
with self.assertRaises(ValueError):
e1.save(update_fields=['accounts'])
def test_update_fields_inheritance(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
with self.assertNumQueries(1):
e3.profile = profile_boss
e3.save(update_fields=['profile_id'])
e4 = Employee.objects.get(pk=e3.pk)
self.assertEqual(e4.profile, profile_boss)
self.assertEqual(e4.profile_id, profile_boss.pk)
def test_update_fields_inheritance_with_proxy_model(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = ProxyEmployee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
def test_update_fields_signals(self):
p = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
p.save(update_fields=['name'])
self.assertEqual(len(pre_save_data), 1)
self.assertEqual(len(pre_save_data[0]), 1)
self.assertTrue('name' in pre_save_data[0])
self.assertEqual(len(post_save_data), 1)
self.assertEqual(len(post_save_data[0]), 1)
self.assertTrue('name' in post_save_data[0])
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_update_fields_incorrect_params(self):
s = Person.objects.create(name='Sara', gender='F')
with self.assertRaises(ValueError):
s.save(update_fields=['first_name'])
with self.assertRaises(ValueError):
s.save(update_fields="name")
def test_empty_update_fields(self):
s = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
# Save is skipped.
with self.assertNumQueries(0):
s.save(update_fields=[])
# Signals were skipped, too...
self.assertEqual(len(pre_save_data), 0)
self.assertEqual(len(post_save_data), 0)
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_num_queries_inheritance(self):
s = Employee.objects.create(name='Sara', gender='F')
s.employee_num = 1
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['employee_num'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.employee_num, 1)
self.assertEqual(s.name, 'Sara')
s.employee_num = 2
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['name'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.name, 'Emily')
self.assertEqual(s.employee_num, 1)
# A little sanity check that we actually did updates...
self.assertEqual(Employee.objects.count(), 1)
self.assertEqual(Person.objects.count(), 1)
with self.assertNumQueries(2):
s.save(update_fields=['name', 'employee_num'])
| agpl-3.0 |
dstockwell/blink | Tools/Scripts/webkitpy/tool/commands/commitannouncer.py | 39 | 2787 | # Copyright (c) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from optparse import make_option
import time
import traceback
from webkitpy.common.config.irc import update_wait_seconds
from webkitpy.tool.bot.commitannouncer import CommitAnnouncer, CommitAnnouncerThread
from webkitpy.tool.multicommandtool import AbstractDeclarativeCommand
_log = logging.getLogger(__name__)
class CommitAnnouncerCommand(AbstractDeclarativeCommand):
name = "commit-announcer"
help_text = "Start an IRC bot for announcing new git commits."
show_in_main_help = True
def __init__(self):
options = [
make_option("--irc-password", default=None, help="Specify IRC password to use."),
]
AbstractDeclarativeCommand.__init__(self, options)
def execute(self, options, args, tool):
bot_thread = CommitAnnouncerThread(tool, options.irc_password)
bot_thread.start()
_log.info("Bot started")
try:
while bot_thread.is_alive():
bot_thread.bot.post_new_commits()
time.sleep(update_wait_seconds)
except KeyboardInterrupt:
_log.error("Terminated by keyboard interrupt")
except Exception, e:
_log.error("Unexpected error:")
_log.error(traceback.format_exc())
if bot_thread.is_alive():
_log.info("Disconnecting bot")
bot_thread.stop()
else:
_log.info("Bot offline")
_log.info("Done")
| bsd-3-clause |
alcobar/asuswrt-merlin | release/src-rt-6.x.4708/toolchains/hndtools-armeabi-2011.09/arm-none-eabi/lib/thumb2/libstdc++.a-gdb.py | 12 | 2354 | # -*- python -*-
# Copyright (C) 2009, 2010 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/opt/codesourcery/arm-none-eabi/share/gcc-4.6.1/python'
libdir = '/opt/codesourcery/arm-none-eabi/lib/thumb2'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| gpl-2.0 |
blaggacao/odoo | addons/l10n_be_hr_payroll_account/__init__.py | 430 | 1046 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nju520/django | tests/m2m_recursive/models.py | 410 | 1120 | """
Many-to-many relationships between the same two tables
In this example, a ``Person`` can have many friends, who are also ``Person``
objects. Friendship is a symmetrical relationship - if I am your friend, you
are my friend. Here, ``friends`` is an example of a symmetrical
``ManyToManyField``.
A ``Person`` can also have many idols - but while I may idolize you, you may
not think the same of me. Here, ``idols`` is an example of a non-symmetrical
``ManyToManyField``. Only recursive ``ManyToManyField`` fields may be
non-symmetrical, and they are symmetrical by default.
This test validates that the many-to-many table is created using a mangled name
if there is a name clash, and tests that symmetry is preserved where
appropriate.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Person(models.Model):
name = models.CharField(max_length=20)
friends = models.ManyToManyField('self')
idols = models.ManyToManyField('self', symmetrical=False, related_name='stalkers')
def __str__(self):
return self.name
| bsd-3-clause |
proxysh/Safejumper-for-Desktop | buildlinux/env32/lib/python2.7/site-packages/twisted/protocols/haproxy/_parser.py | 16 | 2051 | # -*- test-case-name: twisted.protocols.haproxy.test.test_parser -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Parser for 'haproxy:' string endpoint.
"""
from zope.interface import implementer
from twisted.plugin import IPlugin
from twisted.internet.endpoints import (
quoteStringArgument, serverFromString, IStreamServerEndpointStringParser
)
from twisted.python.compat import iteritems
from . import proxyEndpoint
def unparseEndpoint(args, kwargs):
"""
Un-parse the already-parsed args and kwargs back into endpoint syntax.
@param args: C{:}-separated arguments
@type args: L{tuple} of native L{str}
@param kwargs: C{:} and then C{=}-separated keyword arguments
@type arguments: L{tuple} of native L{str}
@return: a string equivalent to the original format which this was parsed
as.
@rtype: native L{str}
"""
description = ':'.join(
[quoteStringArgument(str(arg)) for arg in args] +
sorted(['%s=%s' % (quoteStringArgument(str(key)),
quoteStringArgument(str(value)))
for key, value in iteritems(kwargs)
]))
return description
@implementer(IPlugin, IStreamServerEndpointStringParser)
class HAProxyServerParser(object):
"""
Stream server endpoint string parser for the HAProxyServerEndpoint type.
@ivar prefix: See L{IStreamServerEndpointStringParser.prefix}.
"""
prefix = "haproxy"
def parseStreamServer(self, reactor, *args, **kwargs):
"""
Parse a stream server endpoint from a reactor and string-only arguments
and keyword arguments.
@param reactor: The reactor.
@param args: The parsed string arguments.
@param kwargs: The parsed keyword arguments.
@return: a stream server endpoint
@rtype: L{IStreamServerEndpoint}
"""
subdescription = unparseEndpoint(args, kwargs)
wrappedEndpoint = serverFromString(reactor, subdescription)
return proxyEndpoint(wrappedEndpoint)
| gpl-2.0 |
samepage-labs/luigi | test/scheduler_test.py | 6 | 4367 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import pickle
import tempfile
import time
from helpers import unittest
import luigi.scheduler
from helpers import with_config
import logging
logging.config.fileConfig('test/testconfig/logging.cfg', disable_existing_loggers=False)
luigi.notifications.DEBUG = True
class SchedulerTest(unittest.TestCase):
def test_load_old_state(self):
tasks = {}
active_workers = {'Worker1': 1e9, 'Worker2': time.time()}
with tempfile.NamedTemporaryFile(delete=True) as fn:
with open(fn.name, 'wb') as fobj:
state = (tasks, active_workers)
pickle.dump(state, fobj)
state = luigi.scheduler.SimpleTaskState(
state_path=fn.name)
state.load()
self.assertEqual(set(state.get_worker_ids()),
set(['Worker1', 'Worker2']))
def test_load_broken_state(self):
with tempfile.NamedTemporaryFile(delete=True) as fn:
with open(fn.name, 'w') as fobj:
print("b0rk", file=fobj)
state = luigi.scheduler.SimpleTaskState(
state_path=fn.name)
state.load() # bad if this crashes
self.assertEqual(list(state.get_worker_ids()), [])
@with_config({'scheduler': {'disable-num-failures': '44', 'worker-disconnect-delay': '55'}})
def test_scheduler_with_config(self):
cps = luigi.scheduler.CentralPlannerScheduler()
self.assertEqual(44, cps._config.disable_failures)
self.assertEqual(55, cps._config.worker_disconnect_delay)
# Override
cps = luigi.scheduler.CentralPlannerScheduler(disable_failures=66,
worker_disconnect_delay=77)
self.assertEqual(66, cps._config.disable_failures)
self.assertEqual(77, cps._config.worker_disconnect_delay)
@with_config({'resources': {'a': '100', 'b': '200'}})
def test_scheduler_with_resources(self):
cps = luigi.scheduler.CentralPlannerScheduler()
self.assertEqual({'a': 100, 'b': 200}, cps._resources)
@with_config({'scheduler': {'record_task_history': 'True'},
'task_history': {'db_connection': 'sqlite:////none/existing/path/hist.db'}})
def test_local_scheduler_task_history_status(self):
ls = luigi.interface._WorkerSchedulerFactory().create_local_scheduler()
self.assertEqual(False, ls._config.record_task_history)
def test_load_recovers_tasks_index(self):
cps = luigi.scheduler.CentralPlannerScheduler()
cps.add_task(worker='A', task_id='1')
cps.add_task(worker='B', task_id='2')
cps.add_task(worker='C', task_id='3')
cps.add_task(worker='D', task_id='4')
self.assertEqual(cps.get_work(worker='A')['task_id'], '1')
with tempfile.NamedTemporaryFile(delete=True) as fn:
def reload_from_disk(cps):
cps._state._state_path = fn.name
cps.dump()
cps = luigi.scheduler.CentralPlannerScheduler()
cps._state._state_path = fn.name
cps.load()
return cps
cps = reload_from_disk(cps=cps)
self.assertEqual(cps.get_work(worker='B')['task_id'], '2')
self.assertEqual(cps.get_work(worker='C')['task_id'], '3')
cps = reload_from_disk(cps=cps)
self.assertEqual(cps.get_work(worker='D')['task_id'], '4')
def test_worker_prune_after_init(self):
worker = luigi.scheduler.Worker(123)
class TmpCfg:
def __init__(self):
self.worker_disconnect_delay = 10
worker.prune(TmpCfg())
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
majetideepak/arrow | python/pyarrow/tests/test_builder.py | 7 | 2015 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import pyarrow as pa
from pyarrow.lib import StringBuilder
def test_string_builder_append():
sbuilder = StringBuilder()
sbuilder.append(b"a byte string")
sbuilder.append("a string")
sbuilder.append(np.nan)
sbuilder.append(None)
assert len(sbuilder) == 4
assert sbuilder.null_count == 2
arr = sbuilder.finish()
assert len(sbuilder) == 0
assert isinstance(arr, pa.Array)
assert arr.null_count == 2
assert arr.type == 'str'
expected = ["a byte string", "a string", None, None]
assert arr.to_pylist() == expected
def test_string_builder_append_values():
sbuilder = StringBuilder()
sbuilder.append_values([np.nan, None, "text", None, "other text"])
assert sbuilder.null_count == 3
arr = sbuilder.finish()
assert arr.null_count == 3
expected = [None, None, "text", None, "other text"]
assert arr.to_pylist() == expected
def test_string_builder_append_after_finish():
sbuilder = StringBuilder()
sbuilder.append_values([np.nan, None, "text", None, "other text"])
arr = sbuilder.finish()
sbuilder.append("No effect")
expected = [None, None, "text", None, "other text"]
assert arr.to_pylist() == expected
| apache-2.0 |
waelrash1/or-tools | examples/python/data/nonogram_regular/nonogram_ps.py | 74 | 1042 | # Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# From
# http://www-lp.doc.ic.ac.uk/UserPages/staff/ft/alp/humour/visual/nono.html
# Via ECLiPSe http://87.230.22.228/examples/nono_regular.ecl.txt
#
rows = 9
row_rule_len = 2
row_rules = [
[0,3],
[2,1],
[3,2],
[2,2],
[0,6],
[1,5],
[0,6],
[0,1],
[0,2]
]
cols = 8
col_rule_len = 2
col_rules = [
[1,2],
[3,1],
[1,5],
[7,1],
[0,5],
[0,3],
[0,4],
[0,3]
]
| apache-2.0 |
BartoszCichecki/onlinepython | onlinepython/pypy-2.4.0-win32/lib-python/2.7/test/test_timeout.py | 46 | 7092 | """Unit tests for socket timeout feature."""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
import socket
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.localhost = '127.0.0.1'
def tearDown(self):
self.sock.close()
def testConnectTimeout(self):
# Choose a private address that is unlikely to exist to prevent
# failures due to the connect succeeding before the timeout.
# Use a dotted IP address to avoid including the DNS lookup time
# with the connect time. This avoids failing the assertion that
# the timeout occurred fast enough.
addr = ('10.0.0.0', 12345)
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.connect, addr)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvTimeout(self):
# Test recv() timeout
_timeout = 0.02
with test_support.transient_internet(self.addr_remote[0]):
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.assertRaises(socket.timeout, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testAcceptTimeout(self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
self.sock.listen(5)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvfromTimeout(self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
@unittest.skip('test not implemented')
def testSend(self):
# Test send() timeout
# couldn't figure out how to test it
pass
@unittest.skip('test not implemented')
def testSendto(self):
# Test sendto() timeout
# couldn't figure out how to test it
pass
@unittest.skip('test not implemented')
def testSendall(self):
# Test sendall() timeout
# couldn't figure out how to test it
pass
def test_main():
test_support.requires('network')
test_support.run_unittest(CreationTestCase, TimeoutTestCase)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
sujoykroy/motion-picture | editor/MotionPicture/shapes/polygon_shape.py | 1 | 17847 | from ..commons import *
from .shape import Shape
from xml.etree.ElementTree import Element as XmlElement
from .mirror import *
class PolygonFormRenderer(object):
def __init__(self, polygon_shape, form_name):
self.polygon_shape = polygon_shape
self.form_name = form_name
def get_name(self):
return self.form_name
def get_id(self):
return self.form_name
def get_pixbuf(self):
polygon_shape = self.polygon_shape.copy()
polygon_shape.set_form_raw(self.polygon_shape.get_form_by_name(self.form_name))
polygon_shape.reset_transformations()
polygon_shape.parent_shape = None
pixbuf = polygon_shape.get_pixbuf(64, 64)
return pixbuf
class PolygonShape(Shape, Mirror):
TYPE_NAME = "polygon_shape"
def __init__(self, anchor_at=None,
border_color="000000", border_width=1,
fill_color=None, width=1., height=1.):
if anchor_at is None:
anchor_at = Point(width*.5, height*.5)
Shape.__init__(self, anchor_at, border_color, border_width, fill_color, width, height)
Mirror.__init__(self)
self.polygons = []
self.forms = dict()
def has_poses(self):
return True
@classmethod
def get_pose_prop_names(cls):
prop_names = super(PolygonShape, cls).get_pose_prop_names()
prop_names.extend(["form_raw"])
return prop_names
def save_form(self, form_name):
if form_name is None:
i = len(self.forms)
while True:
i += 1
form_name = "Form_{0}".format(i)
if form_name not in self.forms:
break
form = self.get_form_raw()
form.set_name(form_name)
self.forms[form_name] = form
return form_name
def get_form_raw(self):
polygons = []
anchor_at = self.anchor_at.copy()
anchor_at.scale(1./self.width, 1./self.height)
for polygon in self.polygons:
polygon = polygon.copy()
for point in polygon.points:
point.translate(-anchor_at.x, -anchor_at.y)
polygons.append(polygon)
form = PolygonsForm(width=self.width, height=self.height, polygons=polygons)
return form
def get_form_by_name(self, form):
if form in self.forms:
return self.forms[form]
return None
#wrapper around set_form
def set_pose(self, pose_name):
self.set_form(pose_name)
def set_form(self, form_name):
if form_name not in self.forms:
return
form = self.forms[form_name]
self.set_form_raw(form)
def set_form_raw(self, form):
diff_width = form.width - self.width
diff_height = form.height - self.height
abs_anchor_at = self.get_abs_anchor_at()
self.width = form.width
self.height = form.height
form_polygons = form.polygons
anchor_at = self.anchor_at.copy()
anchor_at.scale(1./self.width, 1./self.height)
for i in range(min(len(form_polygons), len(self.polygons))):
self_polygon = self.polygons[i]
form_polygon = form_polygons[i]
for j in range(min(len(self_polygon.points), len(form_polygon.points))):
self_point = self_polygon.points[j]
form_point = form_polygon.points[j]
self_point.copy_from(form_point)
self_point.translate(anchor_at.x, anchor_at.y)
self.fit_size_to_include_all()
#self.move_to(abs_anchor_at.x, abs_anchor_at.y)
#wrapper around get_form_list
def get_pose_list(self, interior_shape=None):
return self.get_form_list()
def get_form_list(self):
forms = []
for form_name in sorted(self.forms.keys()):
forms.append(PolygonFormRenderer(self, form_name))
return forms
def set_prop_value(self, prop_name, value, prop_data=None):
if prop_name == "internal":
if "start_form" in prop_data:
start_form_name = prop_data["start_form"]
end_form_name = prop_data.get("end_form")
if end_form_name is None or end_form_name not in self.forms:
self.set_form(start_form_name)
return
start_form = self.forms[start_form_name]
end_form = self.forms[end_form_name]
else:
start_form = prop_data["start_form_raw"]
end_form = prop_data.get("end_form_raw")
new_width = start_form.width + (end_form.width-start_form.width)*value
new_height = start_form.height + (end_form.height-start_form.height)*value
diff_width = new_width - self.width
diff_height = new_height - self.height
abs_anchor_at = self.get_abs_anchor_at()
self.width = new_width
self.height = new_height
start_form_polygons = start_form.polygons
end_form_polygons = end_form.polygons
anchor_at = self.anchor_at.copy()
anchor_at.scale(1./self.width, 1./self.height)
for i in range(min(len(start_form_polygons), len(end_form_polygons), len(self.polygons))):
self_polygon = self.polygons[i]
start_form_polygon = start_form_polygons[i]
end_form_polygon = end_form_polygons[i]
for j in range(min(len(self_polygon.points), len(start_form_polygon.points), \
len(end_form_polygon.points))):
self_point = self_polygon.points[j]
start_form_point = start_form_polygon.points[j]
end_form_point = end_form_polygon.points[j]
self_point.x = (start_form_point.x*start_form.width*(1-value)+\
end_form_point.x*end_form.width*value)/self.width
self_point.y = (start_form_point.y*start_form.height*(1-value)+\
end_form_point.y*end_form.height*value)/self.height
#self_point.set_inbetween(start_form_point, end_form_point, value)
self_point.translate(anchor_at.x, anchor_at.y)
self.fit_size_to_include_all()
#self.move_to(abs_anchor_at.x, abs_anchor_at.y)
else:
Shape.set_prop_value(self, prop_name, value, prop_data)
def rename_form(self, old_form, new_form):
if new_form in self.forms: return
self.forms[new_form] = self.forms[old_form]
del self.forms[old_form]
def delete_form(self, form_name):
if form_name in self.forms:
del self.forms[form_name]
def copy_data_from_linked(self, build_lock=True):
super(PolygonShape, self).copy_data_from_linked(build_lock)
if not self.linked_to: return
del self.polygons[:]
linked_to_anchor_at = self.linked_to.anchor_at.copy()
linked_to_anchor_at.scale(1./self.linked_to.width, 1./self.linked_to.height)
self_anchor_at = self.anchor_at.copy()
self_anchor_at.scale(1./self.width, 1./self.height)
diff_x = self_anchor_at.x-linked_to_anchor_at.x
diff_y = self_anchor_at.y-linked_to_anchor_at.y
for polygon in self.linked_to.polygons:
polygon = polygon.copy()
polygon.translate(diff_x, diff_y)
self.polygons.append(polygon)
self.fit_size_to_include_all()
self.forms = copy_value(self.linked_to.forms)
def get_xml_element(self):
elm = Shape.get_xml_element(self)
for polygon in self.polygons:
elm.append(polygon.get_xml_element())
for form_name, form in self.forms.items():
elm.append(form.get_xml_element())
return elm
@classmethod
def create_from_xml_element(cls, elm):
arr = Shape.get_params_array_from_xml_element(elm)
shape = cls(*arr)
default_point = Point(0,0)
for polygon_elm in elm.findall(Polygon.TAG_NAME):
polygon = Polygon.create_from_xml_element(polygon_elm)
shape.polygons.append(polygon)
for form_elm in elm.findall(PolygonsForm.TAG_NAME):
form = PolygonsForm.create_from_xml_element(form_elm)
shape.forms[form.name] = form
shape.assign_params_from_xml_element(elm)
return shape
def copy(self, copy_name=False, deep_copy=False):
newob = PolygonShape(self.anchor_at.copy(), copy_value(self.border_color), self.border_width, copy_value(self.fill_color), self.width, self.height)
self.copy_into(newob, copy_name)
for polygon in self.polygons:
newob.polygons.append(polygon.copy())
if deep_copy:
newob.forms = copy_value(self.forms)
return newob
def add_polygon(self, polygon):
self.polygons.append(polygon)
self.fit_size_to_include_all()
def _draw_polygon(self, ctx, polygon, scale=None, angle=None):
ctx.save()
if angle is not None:
ctx.translate(self.anchor_at.x, self.anchor_at.y)
ctx.rotate(angle*RAD_PER_DEG)
ctx.translate(-self.anchor_at.x, -self.anchor_at.y)
ctx.scale(self.width, self.height)
if scale:
if scale[0] == -1 and scale[1] == 1:
ctx.translate(2*self.anchor_at.x/self.width, 0)
elif scale[0] == 1 and scale[1] == -1:
ctx.translate(0, 2*self.anchor_at.y/self.height)
elif scale[0] == -1 and scale[1] == -1:
ctx.translate(2*self.anchor_at.x/self.width, 2*self.anchor_at.y/self.height)
ctx.scale(*scale)
polygon.draw_path(ctx)
ctx.restore()
def draw_path(self, ctx, for_fill=False):
paths = []
for polygon in self.polygons:
if not for_fill or (for_fill and polygon.closed):
self._draw_polygon(ctx, polygon)
paths.append(ctx.copy_path())
if self.mirror != 0:
scales, rotations = self.get_scales_n_rotations()
for scale in scales:
for polygon in self.polygons:
if not for_fill or (for_fill and polygon.closed):
self._draw_polygon(ctx, polygon, scale=scale)
paths.append(ctx.copy_path())
for angle in rotations:
for polygon in self.polygons:
if not for_fill or (for_fill and polygon.closed):
self._draw_polygon(ctx, polygon, angle=angle)
paths.append(ctx.copy_path())
ctx.new_path()
for path in paths:
ctx.append_path(path)
def fit_size_to_include_all(self):
outline = None
for polygon in self.polygons:
if outline is None:
outline = polygon.get_outline()
else:
outline.expand_include(polygon.get_outline())
if not outline: return
if outline.width==0.:
outline.width=1./self.width
if outline.height==0.:
outline.height=1./self.height
abs_anchor_at = self.get_abs_anchor_at()
self.anchor_at.translate(-self.width*outline.left, -self.height*outline.top)
self.move_to(abs_anchor_at.x, abs_anchor_at.y)
self.set_width(outline.width*self.width, fixed_anchor=False)
self.set_height(outline.height*self.height, fixed_anchor=False)
if outline.height==0:
sy = None
else:
sy = 1/outline.height
if outline.width==0:
sx = None
else:
sx = 1/outline.width
for polygon in self.polygons:
polygon.translate(-outline.left, -outline.top)
if sx is not None and sy is not None:
polygon.scale(sx, sy)
def find_point_location(self, point):
point = point.copy()
point.scale(1./self.width, 1./self.height)
for polygon_index in range(len(self.polygons)):
polygon = self.polygons[polygon_index]
found = polygon.get_closest_point(point, self.width, self.height)
if found:
point_index, t = found
return (polygon_index, point_index, t)
return None
def insert_point_at(self, point):
found = self.find_point_location(point)
if not found: return False
polygon_index, point_index, t = found
polygon = self.polygons[polygon_index]
polygon.insert_point_at(point_index, t)
return True
def insert_break_at(self, polygon_index, point_index):
if polygon_index>=len(self.polygons): return False
prev_polygon = self.polygons[polygon_index]
if point_index>= len(prev_polygon.points): return False
if prev_polygon.closed:
#Just open up the closed polygon
prev_polygon.closed = False
for i in range(0, point_index, 1):
prev_polygon.points.append(prev_polygon.points[0])
del prev_polygon.points[0]
prev_polygon.points.append(prev_polygon.points[0].copy())
return True
point = prev_polygon.points[point_index]
new_polygon = Polygon(points=[point.copy()])
new_polygon.points.extend(prev_polygon.points[point_index+1:])
del prev_polygon.points[point_index+1:]
prev_polygon.closed = False
self.polygons.insert(polygon_index+1, new_polygon)
return True
def join_points(self, polygon_index_1, is_start_1, polygon_index_2, is_start_2):
if polygon_index_1>=len(self.polygons): return False
if polygon_index_2>=len(self.polygons): return False
polygon_1 = self.polygons[polygon_index_1]
polygon_2 = self.polygons[polygon_index_2]
if polygon_index_1 == polygon_index_2:
if is_start_1 != is_start_2:
polygon_1.closed = True
if abs(polygon_1.points[0].distance(polygon_1.points[-1])*self.width)<10:
polygon_1.points[0].x = (polygon_1.points[0].x+polygon_1.points[-1].x)*.5
polygon_1.points[0].y = (polygon_1.points[0].y+polygon_1.points[-1].y)*.5
del polygon_1.points[-1]
return True
return False
if polygon_1.closed: return False
if polygon_2.closed: return False
dist_lapse = .01
if is_start_1 == is_start_2:#reverse polygon_2
rev_polygon = polygon_2.reverse_copy()
for pi in range(len(rev_polygon.points)):
polygon_2.points[pi].copy_from(rev_polygon.points[pi])
if is_start_1:#swap polygons
polygon_1, polygon_2 = polygon_2, polygon_1
polygon_index_1, polygon_index_2 = polygon_index_2, polygon_index_1
#polygon_2 get attached at the end of polygon_1
if abs(polygon_1.points[-1].distance(polygon_2.points[0])*self.width)<10:
polygon_1.points[-1].x = (polygon_1.points[-1].x + polygon_2.points[0].x)*.5
polygon_1.points[-1].y = (polygon_1.points[-1].y + polygon_2.points[0].y)*.5
del polygon_2.points[0]
polygon_1.points.extend(polygon_2.points)
del self.polygons[polygon_index_2]
return True
def delete_point_at(self, polygon_index, point_index):
if polygon_index>=len(self.polygons): return False
polygon = self.polygons[polygon_index]
if point_index>=len(polygon.points): return False
if len(self.polygons) == 1 and len(polygon.points)<=2: return False
del polygon.points[point_index]
if len(polygon.points)<3:
polygon.closed=False
if len(polygon.points)==1:
del self.polygons[polygon_index]
self.fit_size_to_include_all()
return True
def extend_point(self, polygon_index, is_start, point_index):
if polygon_index>=len(self.polygons): return False
polygon = self.polygons[polygon_index]
if polygon.closed: return False
if is_start:
polygon.points.insert(0, polygon.points[0].copy())
else:
polygon.points.insert(point_index, polygon.points[point_index].copy())
return True
@staticmethod
def create_from_rectangle_shape(rect_shape):
if rect_shape.corner_radius>0: return None
polygon_shape = PolygonShape(Point(0,0), None, None, None, None, None)
polygon_shape.polygons.append(Polygon(
points=[Point(0.,0.), Point(1., 0.), Point(1., 1.), Point(0., 1.)], closed=True))
rect_shape.copy_into(polygon_shape, all_fields=True, copy_name=False)
polygon_shape.fit_size_to_include_all()
return polygon_shape
def flip(self, direction):
Shape.flip(self, direction)
for polygon in self.polygons:
for point in polygon.points:
if direction == "x":
point.x = 1.-point.x
elif direction == "y":
point.y = 1.-point.y
self.fit_size_to_include_all()
def include_inside(self, shape):
if not isinstance(shape, PolygonShape): return False
for polygon in shape.polygons:
polygon = polygon.copy()
for i in range(len(polygon.points)):
point = polygon.points[i]
point.scale(shape.width, shape.height)
point = shape.reverse_transform_point(point)
point = self.transform_point(point)
point.scale(1./self.width, 1./self.height)
polygon.points[i] = point
self.polygons.append(polygon)
return True
| gpl-3.0 |
altairpearl/scikit-learn | sklearn/feature_extraction/tests/test_image.py | 25 | 11187 | # Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# License: BSD 3 clause
import numpy as np
import scipy as sp
from scipy import ndimage
from nose.tools import assert_equal, assert_true
from numpy.testing import assert_raises
from sklearn.feature_extraction.image import (
img_to_graph, grid_to_graph, extract_patches_2d,
reconstruct_from_patches_2d, PatchExtractor, extract_patches)
from sklearn.utils.graph import connected_components
from sklearn.utils.testing import SkipTest
from sklearn.utils.fixes import sp_version
if sp_version < (0, 12):
raise SkipTest("Skipping because SciPy version earlier than 0.12.0 and "
"thus does not include the scipy.misc.face() image.")
def test_img_to_graph():
x, y = np.mgrid[:4, :4] - 10
grad_x = img_to_graph(x)
grad_y = img_to_graph(y)
assert_equal(grad_x.nnz, grad_y.nnz)
# Negative elements are the diagonal: the elements of the original
# image. Positive elements are the values of the gradient, they
# should all be equal on grad_x and grad_y
np.testing.assert_array_equal(grad_x.data[grad_x.data > 0],
grad_y.data[grad_y.data > 0])
def test_grid_to_graph():
# Checking that the function works with graphs containing no edges
size = 2
roi_size = 1
# Generating two convex parts with one vertex
# Thus, edges will be empty in _to_graph
mask = np.zeros((size, size), dtype=np.bool)
mask[0:roi_size, 0:roi_size] = True
mask[-roi_size:, -roi_size:] = True
mask = mask.reshape(size ** 2)
A = grid_to_graph(n_x=size, n_y=size, mask=mask, return_as=np.ndarray)
assert_true(connected_components(A)[0] == 2)
# Checking that the function works whatever the type of mask is
mask = np.ones((size, size), dtype=np.int16)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask)
assert_true(connected_components(A)[0] == 1)
# Checking dtype of the graph
mask = np.ones((size, size))
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.bool)
assert_true(A.dtype == np.bool)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.int)
assert_true(A.dtype == np.int)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask,
dtype=np.float64)
assert_true(A.dtype == np.float64)
def test_connect_regions():
try:
face = sp.face(gray=True)
except AttributeError:
# Newer versions of scipy have face in misc
from scipy import misc
face = misc.face(gray=True)
for thr in (50, 150):
mask = face > thr
graph = img_to_graph(face, mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def test_connect_regions_with_grid():
try:
face = sp.face(gray=True)
except AttributeError:
# Newer versions of scipy have face in misc
from scipy import misc
face = misc.face(gray=True)
mask = face > 50
graph = grid_to_graph(*face.shape, mask=mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
mask = face > 150
graph = grid_to_graph(*face.shape, mask=mask, dtype=None)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def _downsampled_face():
try:
face = sp.face(gray=True)
except AttributeError:
# Newer versions of scipy have face in misc
from scipy import misc
face = misc.face(gray=True)
face = face.astype(np.float32)
face = (face[::2, ::2] + face[1::2, ::2] + face[::2, 1::2]
+ face[1::2, 1::2])
face = (face[::2, ::2] + face[1::2, ::2] + face[::2, 1::2]
+ face[1::2, 1::2])
face = face.astype(np.float32)
face /= 16.0
return face
def _orange_face(face=None):
face = _downsampled_face() if face is None else face
face_color = np.zeros(face.shape + (3,))
face_color[:, :, 0] = 256 - face
face_color[:, :, 1] = 256 - face / 2
face_color[:, :, 2] = 256 - face / 4
return face_color
def _make_images(face=None):
face = _downsampled_face() if face is None else face
# make a collection of faces
images = np.zeros((3,) + face.shape)
images[0] = face
images[1] = face + 1
images[2] = face + 2
return images
downsampled_face = _downsampled_face()
orange_face = _orange_face(downsampled_face)
face_collection = _make_images(downsampled_face)
def test_extract_patches_all():
face = downsampled_face
i_h, i_w = face.shape
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(face, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_all_color():
face = orange_face
i_h, i_w = face.shape[:2]
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(face, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_all_rect():
face = downsampled_face
face = face[:, 32:97]
i_h, i_w = face.shape
p_h, p_w = 16, 12
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(face, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_max_patches():
face = downsampled_face
i_h, i_w = face.shape
p_h, p_w = 16, 16
patches = extract_patches_2d(face, (p_h, p_w), max_patches=100)
assert_equal(patches.shape, (100, p_h, p_w))
expected_n_patches = int(0.5 * (i_h - p_h + 1) * (i_w - p_w + 1))
patches = extract_patches_2d(face, (p_h, p_w), max_patches=0.5)
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
assert_raises(ValueError, extract_patches_2d, face, (p_h, p_w),
max_patches=2.0)
assert_raises(ValueError, extract_patches_2d, face, (p_h, p_w),
max_patches=-1.0)
def test_reconstruct_patches_perfect():
face = downsampled_face
p_h, p_w = 16, 16
patches = extract_patches_2d(face, (p_h, p_w))
face_reconstructed = reconstruct_from_patches_2d(patches, face.shape)
np.testing.assert_array_almost_equal(face, face_reconstructed)
def test_reconstruct_patches_perfect_color():
face = orange_face
p_h, p_w = 16, 16
patches = extract_patches_2d(face, (p_h, p_w))
face_reconstructed = reconstruct_from_patches_2d(patches, face.shape)
np.testing.assert_array_almost_equal(face, face_reconstructed)
def test_patch_extractor_fit():
faces = face_collection
extr = PatchExtractor(patch_size=(8, 8), max_patches=100, random_state=0)
assert_true(extr == extr.fit(faces))
def test_patch_extractor_max_patches():
faces = face_collection
i_h, i_w = faces.shape[1:3]
p_h, p_w = 8, 8
max_patches = 100
expected_n_patches = len(faces) * max_patches
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(faces)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
max_patches = 0.5
expected_n_patches = len(faces) * int((i_h - p_h + 1) * (i_w - p_w + 1)
* max_patches)
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(faces)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_max_patches_default():
faces = face_collection
extr = PatchExtractor(max_patches=100, random_state=0)
patches = extr.transform(faces)
assert_equal(patches.shape, (len(faces) * 100, 19, 25))
def test_patch_extractor_all_patches():
faces = face_collection
i_h, i_w = faces.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(faces) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(faces)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_color():
faces = _make_images(orange_face)
i_h, i_w = faces.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(faces) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(faces)
assert_true(patches.shape == (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_strided():
image_shapes_1D = [(10,), (10,), (11,), (10,)]
patch_sizes_1D = [(1,), (2,), (3,), (8,)]
patch_steps_1D = [(1,), (1,), (4,), (2,)]
expected_views_1D = [(10,), (9,), (3,), (2,)]
last_patch_1D = [(10,), (8,), (8,), (2,)]
image_shapes_2D = [(10, 20), (10, 20), (10, 20), (11, 20)]
patch_sizes_2D = [(2, 2), (10, 10), (10, 11), (6, 6)]
patch_steps_2D = [(5, 5), (3, 10), (3, 4), (4, 2)]
expected_views_2D = [(2, 4), (1, 2), (1, 3), (2, 8)]
last_patch_2D = [(5, 15), (0, 10), (0, 8), (4, 14)]
image_shapes_3D = [(5, 4, 3), (3, 3, 3), (7, 8, 9), (7, 8, 9)]
patch_sizes_3D = [(2, 2, 3), (2, 2, 2), (1, 7, 3), (1, 3, 3)]
patch_steps_3D = [(1, 2, 10), (1, 1, 1), (2, 1, 3), (3, 3, 4)]
expected_views_3D = [(4, 2, 1), (2, 2, 2), (4, 2, 3), (3, 2, 2)]
last_patch_3D = [(3, 2, 0), (1, 1, 1), (6, 1, 6), (6, 3, 4)]
image_shapes = image_shapes_1D + image_shapes_2D + image_shapes_3D
patch_sizes = patch_sizes_1D + patch_sizes_2D + patch_sizes_3D
patch_steps = patch_steps_1D + patch_steps_2D + patch_steps_3D
expected_views = expected_views_1D + expected_views_2D + expected_views_3D
last_patches = last_patch_1D + last_patch_2D + last_patch_3D
for (image_shape, patch_size, patch_step, expected_view,
last_patch) in zip(image_shapes, patch_sizes, patch_steps,
expected_views, last_patches):
image = np.arange(np.prod(image_shape)).reshape(image_shape)
patches = extract_patches(image, patch_shape=patch_size,
extraction_step=patch_step)
ndim = len(image_shape)
assert_true(patches.shape[:ndim] == expected_view)
last_patch_slices = [slice(i, i + j, None) for i, j in
zip(last_patch, patch_size)]
assert_true((patches[[slice(-1, None, None)] * ndim] ==
image[last_patch_slices].squeeze()).all())
def test_extract_patches_square():
# test same patch size for all dimensions
face = downsampled_face
i_h, i_w = face.shape
p = 8
expected_n_patches = ((i_h - p + 1), (i_w - p + 1))
patches = extract_patches(face, patch_shape=p)
assert_true(patches.shape == (expected_n_patches[0], expected_n_patches[1],
p, p))
def test_width_patch():
# width and height of the patch should be less than the image
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert_raises(ValueError, extract_patches_2d, x, (4, 1))
assert_raises(ValueError, extract_patches_2d, x, (1, 4))
| bsd-3-clause |
xylsxyls/xueyelingshuang | src/StockCharge/scripts/rebuild_StockCharge.py | 1 | 14091 | #!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
Bit = 'Win32'
Dlllib = 'dll'
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../StockCharge.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'StockCharge'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['StockCharge'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Magenta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_short),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
except Exception as ex:
logFile.close()
sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
if Bit == 'Win32':
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
elif Bit == 'x64':
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" amd64\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os.system('git pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3};platform={4} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType, Bit))
cmds.append('{0} {1} /p:Configuration={2};platform={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType, Bit))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug', Bit))
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release', Bit))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|{4}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType, Bit))
cmds.append('"{0}" {1} /build /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'lib' in sys.argv:
Dlllib = 'lib'
SlnFile = '../StockCharge_lib.sln'
MSBuildFirstProjects = [r'StockCharge_lib']
IncrediBuildFirstProjects = ['StockCharge_lib']
if '64' in sys.argv:
Bit = 'x64'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret)
| mit |
saucelabs/Diamond | src/collectors/dseopscenter/test/testdseopscenter.py | 29 | 1863 | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from mock import Mock
from mock import patch
from diamond.collector import Collector
from dseopscenter import DseOpsCenterCollector
################################################################################
class TestDseOpsCenterCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DseOpsCenterCollector',
{'cluster_id': 'MyTestCluster'})
self.collector = DseOpsCenterCollector(config, None)
def test_import(self):
self.assertTrue(DseOpsCenterCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
urlopen_mock1 = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: self.getFixture('keyspaces.json')))
urlopen_mock1.start()
self.collector._get_schema()
urlopen_mock1.stop()
urlopen_mock2 = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: self.getFixture('new-metrics.json')))
urlopen_mock2.start()
self.collector.collect()
urlopen_mock2.stop()
metrics = {
'cf-bf-false-positives.dse_system.leases': 0,
'key-cache-requests': 38.28847822050253,
'key-cache-hits': 9.114316945274672,
'nonheap-max': 136314880,
'nonheap-used': 48491696.666666664,
'read-ops': 55.91526222229004,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
| mit |
brinbois/Sick-Beard | lib/hachoir_parser/image/common.py | 90 | 1433 | from lib.hachoir_core.field import FieldSet, UserVector, UInt8
class RGB(FieldSet):
color_name = {
( 0, 0, 0): "Black",
(255, 0, 0): "Red",
( 0, 255, 0): "Green",
( 0, 0, 255): "Blue",
(255, 255, 255): "White",
}
static_size = 24
def createFields(self):
yield UInt8(self, "red", "Red")
yield UInt8(self, "green", "Green")
yield UInt8(self, "blue", "Blue")
def createDescription(self):
rgb = self["red"].value, self["green"].value, self["blue"].value
name = self.color_name.get(rgb)
if not name:
name = "#%02X%02X%02X" % rgb
return "RGB color: " + name
class RGBA(RGB):
static_size = 32
def createFields(self):
yield UInt8(self, "red", "Red")
yield UInt8(self, "green", "Green")
yield UInt8(self, "blue", "Blue")
yield UInt8(self, "alpha", "Alpha")
def createDescription(self):
description = RGB.createDescription(self)
opacity = self["alpha"].value*100/255
return "%s (opacity: %s%%)" % (description, opacity)
class PaletteRGB(UserVector):
item_class = RGB
item_name = "color"
def createDescription(self):
return "Palette of %u RGB colors" % len(self)
class PaletteRGBA(PaletteRGB):
item_class = RGBA
def createDescription(self):
return "Palette of %u RGBA colors" % len(self)
| gpl-3.0 |
matsumoto-r/synciga | src/build/android/pylib/gtest/gtest_config.py | 3 | 1094 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Configuration file for android gtest suites."""
# Add new suites here before upgrading them to the stable list below.
EXPERIMENTAL_TEST_SUITES = [
# The JNI version of the sandbox_linux_unittests. Should be changed to
# 'sandbox_linux_unittests' once it can be run with --exe.
'sandbox_linux_jni_unittests',
]
# Do not modify this list without approval of an android owner.
# This list determines which suites are run by default, both for local
# testing and on android trybots running on commit-queue.
STABLE_TEST_SUITES = [
'TestWebKitAPI',
'android_webview_unittests',
'base_unittests',
'cc_unittests',
'components_unittests',
'content_unittests',
'gpu_unittests',
'ipc_tests',
'media_unittests',
'net_unittests',
'sql_unittests',
'sync_unit_tests',
'ui_unittests',
'unit_tests',
'webkit_compositor_bindings_unittests',
'webkit_unit_tests',
]
| bsd-3-clause |
hfp/tensorflow-xsmm | tensorflow/contrib/opt/python/training/addsign.py | 55 | 6067 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of AddSign."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
class AddSignOptimizer(optimizer.Optimizer):
"""Optimizer that implements the AddSign update.
See [Bello et al., ICML2017],
[Neural Optimizer Search with RL](https://arxiv.org/abs/1709.07417).
"""
def __init__(self,
learning_rate=0.1,
alpha=1.0,
beta=0.9,
sign_decay_fn=None,
use_locking=False,
name='AddSignOptimizer'):
"""Constructs a new AddSignOptimizer object.
Initialization:
```
m_0 <- 0 (Initialize initial 1st moment vector)
t <- 0 (Initialize timestep)
```
Update:
```
t <- t + 1
m_t <- beta1 * m_{t-1} + (1 - beta1) * g
sign_decay <- sign_decay_fn(t)
update <- (alpha + sign_decay * sign(g) *sign(m)) * g
variable <- variable - lr_t * update
```
Example for AddSign-ld (AddSign with linear sign decay)
```
decay_steps = 1000
linear_decay_fn = sign_decays.get_linear_decay_fn(decay_steps)
opt = AddSignOptimizer(learning_rate=0.1, sign_decay_fn=linear_decay_fn)
```
Args:
learning_rate: learning_rate used when taking a step.
alpha: alpha used in optimizer.
beta: decay used for computing the moving average m.
sign_decay_fn: decay function applied to the sign(g) sign(m) quantity.
Takes global_step as an argument. See sign_decay.py for some examples.
use_locking: If True, use locks for update operations.
name: Optional name for the operations created when applying gradients.
Defaults to "AddSignOptimizer".
"""
super(AddSignOptimizer, self).__init__(use_locking, name)
self._lr = learning_rate
self._alpha = alpha
self._beta = beta
self._sign_decay_fn = sign_decay_fn
# Tensor versions of the constructor arguments, created in _prepare().
self._lr_t = None
self._alpha_t = None
self._beta_t = None
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
if self._sign_decay_fn is not None:
self._sign_decay_t = ops.convert_to_tensor(
self._sign_decay_fn(global_step), name='sign_decay')
return super(AddSignOptimizer, self).apply_gradients(
grads_and_vars, global_step=global_step, name=name)
def _create_slots(self, var_list):
# Create slots for the first moment.
for v in var_list:
self._zeros_slot(v, 'm', self._name)
def _prepare(self):
self._lr_t = ops.convert_to_tensor(self._lr, name='learning_rate')
self._beta_t = ops.convert_to_tensor(self._beta, name='beta')
self._alpha_t = ops.convert_to_tensor(self._alpha, name='alpha')
if self._sign_decay_fn is None:
self._sign_decay_t = ops.convert_to_tensor(1.0, name='sign_decay')
def _apply_dense(self, grad, var):
m = self.get_slot(var, 'm')
return training_ops.apply_add_sign(
var,
m,
math_ops.cast(self._lr_t, var.dtype.base_dtype),
math_ops.cast(self._alpha_t, var.dtype.base_dtype),
math_ops.cast(self._sign_decay_t, var.dtype.base_dtype),
math_ops.cast(self._beta_t, var.dtype.base_dtype),
grad,
use_locking=self._use_locking).op
def _resource_apply_dense(self, grad, var):
m = self.get_slot(var, 'm')
return training_ops.resource_apply_add_sign(
var.handle,
m.handle,
math_ops.cast(self._lr_t, var.dtype.base_dtype),
math_ops.cast(self._alpha_t, var.dtype.base_dtype),
math_ops.cast(self._sign_decay_t, var.dtype.base_dtype),
math_ops.cast(self._beta_t, var.dtype.base_dtype),
grad,
use_locking=self._use_locking)
def _apply_sparse(self, grad, var):
lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
alpha_t = math_ops.cast(self._alpha_t, var.dtype.base_dtype)
beta_t = math_ops.cast(self._beta_t, var.dtype.base_dtype)
m = self.get_slot(var, 'm')
m_t = state_ops.assign(
m, (m * beta_t) + (grad * (1 - beta_t)), use_locking=self._use_locking)
sign_g = ops.IndexedSlices(
math_ops.sign(grad.values), grad.indices, dense_shape=grad.dense_shape)
sign_gm = ops.IndexedSlices(
array_ops.gather(math_ops.sign(m_t), sign_g.indices) * sign_g.values,
sign_g.indices,
dense_shape=sign_g.dense_shape)
sign_decayed = math_ops.cast(
self._sign_decay_t, var.dtype.base_dtype)
multiplier_values = alpha_t + sign_decayed * sign_gm.values
multiplier = ops.IndexedSlices(
multiplier_values, sign_gm.indices, dense_shape=sign_gm.dense_shape)
final_update = ops.IndexedSlices(
lr_t * multiplier.values * grad.values,
multiplier.indices,
dense_shape=multiplier.dense_shape)
var_update = state_ops.scatter_sub(
var,
final_update.indices,
final_update.values,
use_locking=self._use_locking)
return control_flow_ops.group(* [var_update, m_t])
| apache-2.0 |
efortuna/AndroidSDKClone | ndk_experimental/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/share/gdb/python/gdb/command/pretty_printers.py | 137 | 14474 | # Pretty-printer commands.
# Copyright (C) 2010-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GDB commands for working with pretty-printers."""
import copy
import gdb
import re
def parse_printer_regexps(arg):
"""Internal utility to parse a pretty-printer command argv.
Arguments:
arg: The arguments to the command. The format is:
[object-regexp [name-regexp]].
Individual printers in a collection are named as
printer-name;subprinter-name.
Returns:
The result is a 3-tuple of compiled regular expressions, except that
the resulting compiled subprinter regexp is None if not provided.
Raises:
SyntaxError: an error processing ARG
"""
argv = gdb.string_to_argv(arg);
argc = len(argv)
object_regexp = "" # match everything
name_regexp = "" # match everything
subname_regexp = None
if argc > 3:
raise SyntaxError("too many arguments")
if argc >= 1:
object_regexp = argv[0]
if argc >= 2:
name_subname = argv[1].split(";", 1)
name_regexp = name_subname[0]
if len(name_subname) == 2:
subname_regexp = name_subname[1]
# That re.compile raises SyntaxError was determined empirically.
# We catch it and reraise it to provide a slightly more useful
# error message for the user.
try:
object_re = re.compile(object_regexp)
except SyntaxError:
raise SyntaxError("invalid object regexp: %s" % object_regexp)
try:
name_re = re.compile (name_regexp)
except SyntaxError:
raise SyntaxError("invalid name regexp: %s" % name_regexp)
if subname_regexp is not None:
try:
subname_re = re.compile(subname_regexp)
except SyntaxError:
raise SyntaxError("invalid subname regexp: %s" % subname_regexp)
else:
subname_re = None
return(object_re, name_re, subname_re)
def printer_enabled_p(printer):
"""Internal utility to see if printer (or subprinter) is enabled."""
if hasattr(printer, "enabled"):
return printer.enabled
else:
return True
class InfoPrettyPrinter(gdb.Command):
"""GDB command to list all registered pretty-printers.
Usage: info pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to list.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__ (self):
super(InfoPrettyPrinter, self).__init__("info pretty-printer",
gdb.COMMAND_DATA)
@staticmethod
def enabled_string(printer):
"""Return "" if PRINTER is enabled, otherwise " [disabled]"."""
if printer_enabled_p(printer):
return ""
else:
return " [disabled]"
@staticmethod
def printer_name(printer):
"""Return the printer's name."""
if hasattr(printer, "name"):
return printer.name
if hasattr(printer, "__name__"):
return printer.__name__
# This "shouldn't happen", but the public API allows for
# direct additions to the pretty-printer list, and we shouldn't
# crash because someone added a bogus printer.
# Plus we want to give the user a way to list unknown printers.
return "unknown"
def list_pretty_printers(self, pretty_printers, name_re, subname_re):
"""Print a list of pretty-printers."""
# A potential enhancement is to provide an option to list printers in
# "lookup order" (i.e. unsorted).
sorted_pretty_printers = sorted (copy.copy(pretty_printers),
key = self.printer_name)
for printer in sorted_pretty_printers:
name = self.printer_name(printer)
enabled = self.enabled_string(printer)
if name_re.match(name):
print (" %s%s" % (name, enabled))
if (hasattr(printer, "subprinters") and
printer.subprinters is not None):
sorted_subprinters = sorted (copy.copy(printer.subprinters),
key = self.printer_name)
for subprinter in sorted_subprinters:
if (not subname_re or
subname_re.match(subprinter.name)):
print (" %s%s" %
(subprinter.name,
self.enabled_string(subprinter)))
def invoke1(self, title, printer_list,
obj_name_to_match, object_re, name_re, subname_re):
"""Subroutine of invoke to simplify it."""
if printer_list and object_re.match(obj_name_to_match):
print (title)
self.list_pretty_printers(printer_list, name_re, subname_re)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
(object_re, name_re, subname_re) = parse_printer_regexps(arg)
self.invoke1("global pretty-printers:", gdb.pretty_printers,
"global", object_re, name_re, subname_re)
cp = gdb.current_progspace()
self.invoke1("progspace %s pretty-printers:" % cp.filename,
cp.pretty_printers, "progspace",
object_re, name_re, subname_re)
for objfile in gdb.objfiles():
self.invoke1(" objfile %s pretty-printers:" % objfile.filename,
objfile.pretty_printers, objfile.filename,
object_re, name_re, subname_re)
def count_enabled_printers(pretty_printers):
"""Return a 2-tuple of number of enabled and total printers."""
enabled = 0
total = 0
for printer in pretty_printers:
if (hasattr(printer, "subprinters")
and printer.subprinters is not None):
if printer_enabled_p(printer):
for subprinter in printer.subprinters:
if printer_enabled_p(subprinter):
enabled += 1
total += len(printer.subprinters)
else:
if printer_enabled_p(printer):
enabled += 1
total += 1
return (enabled, total)
def count_all_enabled_printers():
"""Return a 2-tuble of the enabled state and total number of all printers.
This includes subprinters.
"""
enabled_count = 0
total_count = 0
(t_enabled, t_total) = count_enabled_printers(gdb.pretty_printers)
enabled_count += t_enabled
total_count += t_total
(t_enabled, t_total) = count_enabled_printers(gdb.current_progspace().pretty_printers)
enabled_count += t_enabled
total_count += t_total
for objfile in gdb.objfiles():
(t_enabled, t_total) = count_enabled_printers(objfile.pretty_printers)
enabled_count += t_enabled
total_count += t_total
return (enabled_count, total_count)
def pluralize(text, n, suffix="s"):
"""Return TEXT pluralized if N != 1."""
if n != 1:
return "%s%s" % (text, suffix)
else:
return text
def show_pretty_printer_enabled_summary():
"""Print the number of printers enabled/disabled.
We count subprinters individually.
"""
(enabled_count, total_count) = count_all_enabled_printers()
print ("%d of %d printers enabled" % (enabled_count, total_count))
def do_enable_pretty_printer_1 (pretty_printers, name_re, subname_re, flag):
"""Worker for enabling/disabling pretty-printers.
Arguments:
pretty_printers: list of pretty-printers
name_re: regular-expression object to select printers
subname_re: regular expression object to select subprinters or None
if all are affected
flag: True for Enable, False for Disable
Returns:
The number of printers affected.
This is just for informational purposes for the user.
"""
total = 0
for printer in pretty_printers:
if (hasattr(printer, "name") and name_re.match(printer.name) or
hasattr(printer, "__name__") and name_re.match(printer.__name__)):
if (hasattr(printer, "subprinters") and
printer.subprinters is not None):
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
for subprinter in printer.subprinters:
if printer_enabled_p(subprinter):
total += 1
# NOTE: We preserve individual subprinter settings.
printer.enabled = flag
else:
# NOTE: Whether this actually disables the subprinter
# depends on whether the printer's lookup function supports
# the "enable" API. We can only assume it does.
for subprinter in printer.subprinters:
if subname_re.match(subprinter.name):
# Only record printers that change state.
if (printer_enabled_p(printer) and
printer_enabled_p(subprinter) != flag):
total += 1
subprinter.enabled = flag
else:
# This printer has no subprinters.
# If the user does "disable pretty-printer .* .* foo"
# should we disable printers that don't have subprinters?
# How do we apply "foo" in this context? Since there is no
# "foo" subprinter it feels like we should skip this printer.
# There's still the issue of how to handle
# "disable pretty-printer .* .* .*", and every other variation
# that can match everything. For now punt and only support
# "disable pretty-printer .* .*" (i.e. subname is elided)
# to disable everything.
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
total += 1
printer.enabled = flag
return total
def do_enable_pretty_printer (arg, flag):
"""Internal worker for enabling/disabling pretty-printers."""
(object_re, name_re, subname_re) = parse_printer_regexps(arg)
total = 0
if object_re.match("global"):
total += do_enable_pretty_printer_1(gdb.pretty_printers,
name_re, subname_re, flag)
cp = gdb.current_progspace()
if object_re.match("progspace"):
total += do_enable_pretty_printer_1(cp.pretty_printers,
name_re, subname_re, flag)
for objfile in gdb.objfiles():
if object_re.match(objfile.filename):
total += do_enable_pretty_printer_1(objfile.pretty_printers,
name_re, subname_re, flag)
if flag:
state = "enabled"
else:
state = "disabled"
print ("%d %s %s" % (total, pluralize("printer", total), state))
# Print the total list of printers currently enabled/disabled.
# This is to further assist the user in determining whether the result
# is expected. Since we use regexps to select it's useful.
show_pretty_printer_enabled_summary()
# Enable/Disable one or more pretty-printers.
#
# This is intended for use when a broken pretty-printer is shipped/installed
# and the user wants to disable that printer without disabling all the other
# printers.
#
# A useful addition would be -v (verbose) to show each printer affected.
class EnablePrettyPrinter (gdb.Command):
"""GDB command to enable the specified pretty-printer.
Usage: enable pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to examine.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__(self):
super(EnablePrettyPrinter, self).__init__("enable pretty-printer",
gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
do_enable_pretty_printer(arg, True)
class DisablePrettyPrinter (gdb.Command):
"""GDB command to disable the specified pretty-printer.
Usage: disable pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to examine.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__(self):
super(DisablePrettyPrinter, self).__init__("disable pretty-printer",
gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
do_enable_pretty_printer(arg, False)
def register_pretty_printer_commands():
"""Call from a top level script to install the pretty-printer commands."""
InfoPrettyPrinter()
EnablePrettyPrinter()
DisablePrettyPrinter()
register_pretty_printer_commands()
| apache-2.0 |
axelspringer/ansible-modules-core | cloud/rackspace/rax_cdb_database.py | 51 | 4856 | #!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
module: rax_cdb_database
short_description: 'create / delete a database in the Cloud Databases'
description:
- create / delete a database in the Cloud Databases.
version_added: "1.8"
options:
cdb_id:
description:
- The databases server UUID
default: null
name:
description:
- Name to give to the database
default: null
character_set:
description:
- Set of symbols and encodings
default: 'utf8'
collate:
description:
- Set of rules for comparing characters in a character set
default: 'utf8_general_ci'
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
author: "Simon JAILLET (@jails)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a database in Cloud Databases
tasks:
- name: Database build request
local_action:
module: rax_cdb_database
credentials: ~/.raxpub
region: IAD
cdb_id: 323e7ce0-9cb0-11e3-a5e2-0800200c9a66
name: db1
state: present
register: rax_db_database
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def find_database(instance, name):
try:
database = instance.get_database(name)
except Exception:
return False
return database
def save_database(module, cdb_id, name, character_set, collate):
cdb = pyrax.cloud_databases
try:
instance = cdb.get(cdb_id)
except Exception as e:
module.fail_json(msg='%s' % e.message)
changed = False
database = find_database(instance, name)
if not database:
try:
database = instance.create_database(name=name,
character_set=character_set,
collate=collate)
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
module.exit_json(changed=changed, action='create',
database=rax_to_dict(database))
def delete_database(module, cdb_id, name):
cdb = pyrax.cloud_databases
try:
instance = cdb.get(cdb_id)
except Exception as e:
module.fail_json(msg='%s' % e.message)
changed = False
database = find_database(instance, name)
if database:
try:
database.delete()
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
module.exit_json(changed=changed, action='delete',
database=rax_to_dict(database))
def rax_cdb_database(module, state, cdb_id, name, character_set, collate):
# act on the state
if state == 'present':
save_database(module, cdb_id, name, character_set, collate)
elif state == 'absent':
delete_database(module, cdb_id, name)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
cdb_id=dict(type='str', required=True),
name=dict(type='str', required=True),
character_set=dict(type='str', default='utf8'),
collate=dict(type='str', default='utf8_general_ci'),
state=dict(default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
cdb_id = module.params.get('cdb_id')
name = module.params.get('name')
character_set = module.params.get('character_set')
collate = module.params.get('collate')
state = module.params.get('state')
setup_rax_module(module, pyrax)
rax_cdb_database(module, state, cdb_id, name, character_set, collate)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
| gpl-3.0 |
AOtools/soapy | soapy/pyqtgraph/widgets/SpinBox.py | 25 | 19875 | # -*- coding: utf-8 -*-
from ..Qt import QtGui, QtCore
from ..python2_3 import asUnicode
from ..SignalProxy import SignalProxy
from .. import functions as fn
from math import log
from decimal import Decimal as D ## Use decimal to avoid accumulating floating-point errors
from decimal import *
import weakref
__all__ = ['SpinBox']
class SpinBox(QtGui.QAbstractSpinBox):
"""
**Bases:** QtGui.QAbstractSpinBox
QSpinBox widget on steroids. Allows selection of numerical value, with extra features:
- SI prefix notation (eg, automatically display "300 mV" instead of "0.003 V")
- Float values with linear and decimal stepping (1-9, 10-90, 100-900, etc.)
- Option for unbounded values
- Delayed signals (allows multiple rapid changes with only one change signal)
============================= ==============================================
**Signals:**
valueChanged(value) Same as QSpinBox; emitted every time the value
has changed.
sigValueChanged(self) Emitted when value has changed, but also combines
multiple rapid changes into one signal (eg,
when rolling the mouse wheel).
sigValueChanging(self, value) Emitted immediately for all value changes.
============================= ==============================================
"""
## There's a PyQt bug that leaks a reference to the
## QLineEdit returned from QAbstractSpinBox.lineEdit()
## This makes it possible to crash the entire program
## by making accesses to the LineEdit after the spinBox has been deleted.
## I have no idea how to get around this..
valueChanged = QtCore.Signal(object) # (value) for compatibility with QSpinBox
sigValueChanged = QtCore.Signal(object) # (self)
sigValueChanging = QtCore.Signal(object, object) # (self, value) sent immediately; no delay.
def __init__(self, parent=None, value=0.0, **kwargs):
"""
============== ========================================================================
**Arguments:**
parent Sets the parent widget for this SpinBox (optional). Default is None.
value (float/int) initial value. Default is 0.0.
bounds (min,max) Minimum and maximum values allowed in the SpinBox.
Either may be None to leave the value unbounded. By default, values are unbounded.
suffix (str) suffix (units) to display after the numerical value. By default, suffix is an empty str.
siPrefix (bool) If True, then an SI prefix is automatically prepended
to the units and the value is scaled accordingly. For example,
if value=0.003 and suffix='V', then the SpinBox will display
"300 mV" (but a call to SpinBox.value will still return 0.003). Default is False.
step (float) The size of a single step. This is used when clicking the up/
down arrows, when rolling the mouse wheel, or when pressing
keyboard arrows while the widget has keyboard focus. Note that
the interpretation of this value is different when specifying
the 'dec' argument. Default is 0.01.
dec (bool) If True, then the step value will be adjusted to match
the current size of the variable (for example, a value of 15
might step in increments of 1 whereas a value of 1500 would
step in increments of 100). In this case, the 'step' argument
is interpreted *relative* to the current value. The most common
'step' values when dec=True are 0.1, 0.2, 0.5, and 1.0. Default is False.
minStep (float) When dec=True, this specifies the minimum allowable step size.
int (bool) if True, the value is forced to integer type. Default is False
decimals (int) Number of decimal values to display. Default is 2.
============== ========================================================================
"""
QtGui.QAbstractSpinBox.__init__(self, parent)
self.lastValEmitted = None
self.lastText = ''
self.textValid = True ## If false, we draw a red border
self.setMinimumWidth(0)
self.setMaximumHeight(20)
self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
self.opts = {
'bounds': [None, None],
## Log scaling options #### Log mode is no longer supported.
#'step': 0.1,
#'minStep': 0.001,
#'log': True,
#'dec': False,
## decimal scaling option - example
#'step': 0.1,
#'minStep': .001,
#'log': False,
#'dec': True,
## normal arithmetic step
'step': D('0.01'), ## if 'dec' is false, the spinBox steps by 'step' every time
## if 'dec' is True, the step size is relative to the value
## 'step' needs to be an integral divisor of ten, ie 'step'*n=10 for some integer value of n (but only if dec is True)
'log': False,
'dec': False, ## if true, does decimal stepping. ie from 1-10 it steps by 'step', from 10 to 100 it steps by 10*'step', etc.
## if true, minStep must be set in order to cross zero.
'int': False, ## Set True to force value to be integer
'suffix': '',
'siPrefix': False, ## Set to True to display numbers with SI prefix (ie, 100pA instead of 1e-10A)
'delay': 0.3, ## delay sending wheel update signals for 300ms
'delayUntilEditFinished': True, ## do not send signals until text editing has finished
'decimals': 3,
}
self.decOpts = ['step', 'minStep']
self.val = D(asUnicode(value)) ## Value is precise decimal. Ordinary math not allowed.
self.updateText()
self.skipValidate = False
self.setCorrectionMode(self.CorrectToPreviousValue)
self.setKeyboardTracking(False)
self.setOpts(**kwargs)
self.editingFinished.connect(self.editingFinishedEvent)
self.proxy = SignalProxy(self.sigValueChanging, slot=self.delayedChange, delay=self.opts['delay'])
def event(self, ev):
ret = QtGui.QAbstractSpinBox.event(self, ev)
if ev.type() == QtCore.QEvent.KeyPress and ev.key() == QtCore.Qt.Key_Return:
ret = True ## For some reason, spinbox pretends to ignore return key press
return ret
##lots of config options, just gonna stuff 'em all in here rather than do the get/set crap.
def setOpts(self, **opts):
"""
Changes the behavior of the SpinBox. Accepts most of the arguments
allowed in :func:`__init__ <pyqtgraph.SpinBox.__init__>`.
"""
#print opts
for k in opts:
if k == 'bounds':
self.setMinimum(opts[k][0], update=False)
self.setMaximum(opts[k][1], update=False)
elif k == 'min':
self.setMinimum(opts[k], update=False)
elif k == 'max':
self.setMaximum(opts[k], update=False)
elif k in ['step', 'minStep']:
self.opts[k] = D(asUnicode(opts[k]))
elif k == 'value':
pass ## don't set value until bounds have been set
elif k in self.opts:
self.opts[k] = opts[k]
else:
raise TypeError("Invalid keyword argument '%s'." % k)
if 'value' in opts:
self.setValue(opts['value'])
## If bounds have changed, update value to match
if 'bounds' in opts and 'value' not in opts:
self.setValue()
## sanity checks:
if self.opts['int']:
if 'step' in opts:
step = opts['step']
## not necessary..
#if int(step) != step:
#raise Exception('Integer SpinBox must have integer step size.')
else:
self.opts['step'] = int(self.opts['step'])
if 'minStep' in opts:
step = opts['minStep']
if int(step) != step:
raise Exception('Integer SpinBox must have integer minStep size.')
else:
ms = int(self.opts.get('minStep', 1))
if ms < 1:
ms = 1
self.opts['minStep'] = ms
if 'delay' in opts:
self.proxy.setDelay(opts['delay'])
self.updateText()
def setMaximum(self, m, update=True):
"""Set the maximum allowed value (or None for no limit)"""
if m is not None:
m = D(asUnicode(m))
self.opts['bounds'][1] = m
if update:
self.setValue()
def setMinimum(self, m, update=True):
"""Set the minimum allowed value (or None for no limit)"""
if m is not None:
m = D(asUnicode(m))
self.opts['bounds'][0] = m
if update:
self.setValue()
def setPrefix(self, p):
"""Set a string prefix.
"""
self.setOpts(prefix=p)
def setRange(self, r0, r1):
"""Set the upper and lower limits for values in the spinbox.
"""
self.setOpts(bounds = [r0,r1])
def setProperty(self, prop, val):
## for QSpinBox compatibility
if prop == 'value':
#if type(val) is QtCore.QVariant:
#val = val.toDouble()[0]
self.setValue(val)
else:
print("Warning: SpinBox.setProperty('%s', ..) not supported." % prop)
def setSuffix(self, suf):
"""Set the string suffix appended to the spinbox text.
"""
self.setOpts(suffix=suf)
def setSingleStep(self, step):
"""Set the step size used when responding to the mouse wheel, arrow
buttons, or arrow keys.
"""
self.setOpts(step=step)
def setDecimals(self, decimals):
"""Set the number of decimals to be displayed when formatting numeric
values.
"""
self.setOpts(decimals=decimals)
def selectNumber(self):
"""
Select the numerical portion of the text to allow quick editing by the user.
"""
le = self.lineEdit()
text = asUnicode(le.text())
if self.opts['suffix'] == '':
le.setSelection(0, len(text))
else:
try:
index = text.index(' ')
except ValueError:
return
le.setSelection(0, index)
def value(self):
"""
Return the value of this SpinBox.
"""
if self.opts['int']:
return int(self.val)
else:
return float(self.val)
def setValue(self, value=None, update=True, delaySignal=False):
"""
Set the value of this spin.
If the value is out of bounds, it will be clipped to the nearest boundary.
If the spin is integer type, the value will be coerced to int.
Returns the actual value set.
If value is None, then the current value is used (this is for resetting
the value after bounds, etc. have changed)
"""
if value is None:
value = self.value()
bounds = self.opts['bounds']
if bounds[0] is not None and value < bounds[0]:
value = bounds[0]
if bounds[1] is not None and value > bounds[1]:
value = bounds[1]
if self.opts['int']:
value = int(value)
value = D(asUnicode(value))
if value == self.val:
return
prev = self.val
self.val = value
if update:
self.updateText(prev=prev)
self.sigValueChanging.emit(self, float(self.val)) ## change will be emitted in 300ms if there are no subsequent changes.
if not delaySignal:
self.emitChanged()
return value
def emitChanged(self):
self.lastValEmitted = self.val
self.valueChanged.emit(float(self.val))
self.sigValueChanged.emit(self)
def delayedChange(self):
try:
if self.val != self.lastValEmitted:
self.emitChanged()
except RuntimeError:
pass ## This can happen if we try to handle a delayed signal after someone else has already deleted the underlying C++ object.
def widgetGroupInterface(self):
return (self.valueChanged, SpinBox.value, SpinBox.setValue)
def sizeHint(self):
return QtCore.QSize(120, 0)
def stepEnabled(self):
return self.StepUpEnabled | self.StepDownEnabled
#def fixup(self, *args):
#print "fixup:", args
def stepBy(self, n):
n = D(int(n)) ## n must be integral number of steps.
s = [D(-1), D(1)][n >= 0] ## determine sign of step
val = self.val
for i in range(int(abs(n))):
if self.opts['log']:
raise Exception("Log mode no longer supported.")
# step = abs(val) * self.opts['step']
# if 'minStep' in self.opts:
# step = max(step, self.opts['minStep'])
# val += step * s
if self.opts['dec']:
if val == 0:
step = self.opts['minStep']
exp = None
else:
vs = [D(-1), D(1)][val >= 0]
#exp = D(int(abs(val*(D('1.01')**(s*vs))).log10()))
fudge = D('1.01')**(s*vs) ## fudge factor. at some places, the step size depends on the step sign.
exp = abs(val * fudge).log10().quantize(1, ROUND_FLOOR)
step = self.opts['step'] * D(10)**exp
if 'minStep' in self.opts:
step = max(step, self.opts['minStep'])
val += s * step
#print "Exp:", exp, "step", step, "val", val
else:
val += s*self.opts['step']
if 'minStep' in self.opts and abs(val) < self.opts['minStep']:
val = D(0)
self.setValue(val, delaySignal=True) ## note all steps (arrow buttons, wheel, up/down keys..) emit delayed signals only.
def valueInRange(self, value):
bounds = self.opts['bounds']
if bounds[0] is not None and value < bounds[0]:
return False
if bounds[1] is not None and value > bounds[1]:
return False
if self.opts.get('int', False):
if int(value) != value:
return False
return True
def updateText(self, prev=None):
# get the number of decimal places to print
decimals = self.opts.get('decimals')
# temporarily disable validation
self.skipValidate = True
# add a prefix to the units if requested
if self.opts['siPrefix']:
# special case: if it's zero use the previous prefix
if self.val == 0 and prev is not None:
(s, p) = fn.siScale(prev)
# NOTE: insert optional format string here?
txt = ("%."+str(decimals)+"g %s%s") % (0, p, self.opts['suffix'])
else:
# NOTE: insert optional format string here as an argument?
txt = fn.siFormat(float(self.val), precision=decimals, suffix=self.opts['suffix'])
# otherwise, format the string manually
else:
# NOTE: insert optional format string here?
txt = ('%.'+str(decimals)+'g%s') % (self.val , self.opts['suffix'])
# actually set the text
self.lineEdit().setText(txt)
self.lastText = txt
# re-enable the validation
self.skipValidate = False
def validate(self, strn, pos):
if self.skipValidate:
ret = QtGui.QValidator.Acceptable
else:
try:
## first make sure we didn't mess with the suffix
suff = self.opts.get('suffix', '')
if len(suff) > 0 and asUnicode(strn)[-len(suff):] != suff:
ret = QtGui.QValidator.Invalid
## next see if we actually have an interpretable value
else:
val = self.interpret()
if val is False:
ret = QtGui.QValidator.Intermediate
else:
if self.valueInRange(val):
if not self.opts['delayUntilEditFinished']:
self.setValue(val, update=False)
ret = QtGui.QValidator.Acceptable
else:
ret = QtGui.QValidator.Intermediate
except:
ret = QtGui.QValidator.Intermediate
## draw / clear border
if ret == QtGui.QValidator.Intermediate:
self.textValid = False
elif ret == QtGui.QValidator.Acceptable:
self.textValid = True
## note: if text is invalid, we don't change the textValid flag
## since the text will be forced to its previous state anyway
self.update()
## support 2 different pyqt APIs. Bleh.
if hasattr(QtCore, 'QString'):
return (ret, pos)
else:
return (ret, strn, pos)
def paintEvent(self, ev):
QtGui.QAbstractSpinBox.paintEvent(self, ev)
## draw red border if text is invalid
if not self.textValid:
p = QtGui.QPainter(self)
p.setRenderHint(p.Antialiasing)
p.setPen(fn.mkPen((200,50,50), width=2))
p.drawRoundedRect(self.rect().adjusted(2, 2, -2, -2), 4, 4)
p.end()
def interpret(self):
"""Return value of text. Return False if text is invalid, raise exception if text is intermediate"""
strn = self.lineEdit().text()
suf = self.opts['suffix']
if len(suf) > 0:
if strn[-len(suf):] != suf:
return False
#raise Exception("Units are invalid.")
strn = strn[:-len(suf)]
try:
val = fn.siEval(strn)
except:
#sys.excepthook(*sys.exc_info())
#print "invalid"
return False
#print val
return val
def editingFinishedEvent(self):
"""Edit has finished; set value."""
#print "Edit finished."
if asUnicode(self.lineEdit().text()) == self.lastText:
#print "no text change."
return
try:
val = self.interpret()
except:
return
if val is False:
#print "value invalid:", str(self.lineEdit().text())
return
if val == self.val:
#print "no value change:", val, self.val
return
self.setValue(val, delaySignal=False) ## allow text update so that values are reformatted pretty-like
| gpl-3.0 |
jnewland/home-assistant | homeassistant/components/decora/light.py | 7 | 4088 | """Support for Decora dimmers."""
import importlib
import logging
from functools import wraps
import time
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_DEVICES, CONF_NAME
from homeassistant.components.light import (
ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, Light,
PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
SUPPORT_DECORA_LED = (SUPPORT_BRIGHTNESS)
DEVICE_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_API_KEY): cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA},
})
def retry(method):
"""Retry bluetooth commands."""
@wraps(method)
def wrapper_retry(device, *args, **kwargs):
"""Try send command and retry on error."""
# pylint: disable=import-error, no-member
import decora
import bluepy
initial = time.monotonic()
while True:
if time.monotonic() - initial >= 10:
return None
try:
return method(device, *args, **kwargs)
except (decora.decoraException, AttributeError,
bluepy.btle.BTLEException):
_LOGGER.warning("Decora connect error for device %s. "
"Reconnecting...", device.name)
# pylint: disable=protected-access
device._switch.connect()
return wrapper_retry
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an Decora switch."""
lights = []
for address, device_config in config[CONF_DEVICES].items():
device = {}
device['name'] = device_config[CONF_NAME]
device['key'] = device_config[CONF_API_KEY]
device['address'] = address
light = DecoraLight(device)
lights.append(light)
add_entities(lights)
class DecoraLight(Light):
"""Representation of an Decora light."""
def __init__(self, device):
"""Initialize the light."""
# pylint: disable=no-member
decora = importlib.import_module('decora')
self._name = device['name']
self._address = device['address']
self._key = device["key"]
self._switch = decora.decora(self._address, self._key)
self._brightness = 0
self._state = False
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_DECORA_LED
@property
def should_poll(self):
"""We can read the device state, so poll."""
return True
@property
def assumed_state(self):
"""We can read the actual state."""
return False
@retry
def set_state(self, brightness):
"""Set the state of this lamp to the provided brightness."""
self._switch.set_brightness(int(brightness / 2.55))
self._brightness = brightness
@retry
def turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
self._switch.on()
self._state = True
if brightness is not None:
self.set_state(brightness)
@retry
def turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
self._switch.off()
self._state = False
@retry
def update(self):
"""Synchronise internal state with the actual light state."""
self._brightness = self._switch.get_brightness() * 2.55
self._state = self._switch.get_on()
| apache-2.0 |
Ivoz/ajenti | plugins/pkgman/api.py | 17 | 1380 | from ajenti.com import *
from ajenti.api import *
from ajenti.apis import API
class PkgMan(API):
class IPackageManager(Interface):
def refresh(self, st):
pass
def get_lists(self, st):
pass
def search(self, q):
pass
def mark_install(self, st, name):
pass
def mark_remove(self, st, name):
pass
def mark_cancel(self, st, name):
pass
def mark_cancel_all(self, st):
pass
def apply(self, st):
pass
def is_busy(self):
pass
def get_busy_status(self):
pass
def get_expected_result(self, st):
pass
def abort(self):
pass
def get_info(self, pkg):
pass
def get_info_ui(self, pkg):
pass
class Package(object):
def __init__(self):
self.name = ''
self.version = ''
self.state = ''
self.description = ''
class PackageInfo(object):
def __init__(self):
self.installed = ''
self.available = ''
self.description = ''
class Status(object):
upgradeable = {}
pending = {}
full = {}
| lgpl-3.0 |
takis/django | tests/template_tests/filter_tests/test_wordcount.py | 521 | 1107 | from django.template.defaultfilters import wordcount
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class WordcountTests(SimpleTestCase):
@setup({'wordcount01': '{% autoescape off %}{{ a|wordcount }} {{ b|wordcount }}{% endautoescape %}'})
def test_wordcount01(self):
output = self.engine.render_to_string('wordcount01', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, '3 3')
@setup({'wordcount02': '{{ a|wordcount }} {{ b|wordcount }}'})
def test_wordcount02(self):
output = self.engine.render_to_string('wordcount02', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, '3 3')
class FunctionTests(SimpleTestCase):
def test_empty_string(self):
self.assertEqual(wordcount(''), 0)
def test_count_one(self):
self.assertEqual(wordcount('oneword'), 1)
def test_count_multiple(self):
self.assertEqual(wordcount('lots of words'), 3)
def test_non_string_input(self):
self.assertEqual(wordcount(123), 1)
| bsd-3-clause |
jrwdunham/old | onlinelinguisticdatabase/controllers/morphologybackups.py | 1 | 3615 | # Copyright 2016 Joel Dunham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the :class:`MorphologybackupsController`.
.. module:: morphologybackups
:synopsis: Contains the morphology backups controller.
"""
import logging
from pylons import request, response, config
from formencode.validators import Invalid
from onlinelinguisticdatabase.lib.base import BaseController
import onlinelinguisticdatabase.lib.helpers as h
from onlinelinguisticdatabase.lib.SQLAQueryBuilder import SQLAQueryBuilder
from onlinelinguisticdatabase.model.meta import Session
from onlinelinguisticdatabase.model import MorphologyBackup
log = logging.getLogger(__name__)
class MorphologybackupsController(BaseController):
"""Generate responses to requests on morphology backup resources.
REST Controller styled on the Atom Publishing Protocol.
.. note::
The ``h.jsonify`` decorator converts the return value of the methods to
JSON.
.. note::
Morphology backups are created when updating and deleting morphologies;
they cannot be created directly and they should never be deleted. This
controller facilitates retrieval of morphology backups only.
"""
query_builder = SQLAQueryBuilder('MorphologyBackup', config=config)
@h.jsonify
@h.restrict('GET')
@h.authenticate
def index(self):
"""Get all morphology backup resources.
:URL: ``GET /morphologybackups``
:returns: a list of all morphology backup resources.
"""
try:
query = Session.query(MorphologyBackup)
query = h.add_order_by(query, dict(request.GET), self.query_builder)
return h.add_pagination(query, dict(request.GET))
except Invalid, e:
response.status_int = 400
return {'errors': e.unpack_errors()}
@h.jsonify
def create(self):
response.status_int = 404
return {'error': 'This resource is read-only.'}
@h.jsonify
def new(self, format='html'):
response.status_int = 404
return {'error': 'This resource is read-only.'}
@h.jsonify
def update(self, id):
response.status_int = 404
return {'error': 'This resource is read-only.'}
@h.jsonify
def delete(self, id):
response.status_int = 404
return {'error': 'This resource is read-only.'}
@h.jsonify
@h.restrict('GET')
@h.authenticate
def show(self, id):
"""Return a morphology backup.
:URL: ``GET /morphologybackups/id``
:param str id: the ``id`` value of the morphology backup to be returned.
:returns: a morphology backup model object.
"""
morphology_backup = Session.query(MorphologyBackup).get(id)
if morphology_backup:
return morphology_backup
else:
response.status_int = 404
return {'error': 'There is no morphology backup with id %s' % id}
@h.jsonify
def edit(self, id, format='html'):
response.status_int = 404
return {'error': 'This resource is read-only.'}
| apache-2.0 |
videetssinghai/Blog-Rest-Api | lib/python2.7/site-packages/pip/commands/search.py | 343 | 4502 | from __future__ import absolute_import
import logging
import sys
import textwrap
from pip.basecommand import Command, SUCCESS
from pip.compat import OrderedDict
from pip.download import PipXmlrpcTransport
from pip.models import PyPI
from pip.utils import get_terminal_size
from pip.utils.logging import indent_log
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor import pkg_resources
from pip._vendor.six.moves import xmlrpc_client
logger = logging.getLogger(__name__)
class SearchCommand(Command):
"""Search for PyPI packages whose name or summary contains <query>."""
name = 'search'
usage = """
%prog [options] <query>"""
summary = 'Search PyPI for packages.'
def __init__(self, *args, **kw):
super(SearchCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-i', '--index',
dest='index',
metavar='URL',
default=PyPI.pypi_url,
help='Base URL of Python Package Index (default %default)')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
raise CommandError('Missing required argument (search query).')
query = args
pypi_hits = self.search(query, options)
hits = transform_hits(pypi_hits)
terminal_width = None
if sys.stdout.isatty():
terminal_width = get_terminal_size()[0]
print_results(hits, terminal_width=terminal_width)
if pypi_hits:
return SUCCESS
return NO_MATCHES_FOUND
def search(self, query, options):
index_url = options.index
with self._build_session(options) as session:
transport = PipXmlrpcTransport(index_url, session)
pypi = xmlrpc_client.ServerProxy(index_url, transport)
hits = pypi.search({'name': query, 'summary': query}, 'or')
return hits
def transform_hits(hits):
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages = OrderedDict()
for hit in hits:
name = hit['name']
summary = hit['summary']
version = hit['version']
if name not in packages.keys():
packages[name] = {
'name': name,
'summary': summary,
'versions': [version],
}
else:
packages[name]['versions'].append(version)
# if this is the highest version, replace summary and score
if version == highest_version(packages[name]['versions']):
packages[name]['summary'] = summary
return list(packages.values())
def print_results(hits, name_column_width=None, terminal_width=None):
if not hits:
return
if name_column_width is None:
name_column_width = max([
len(hit['name']) + len(hit.get('versions', ['-'])[-1])
for hit in hits
]) + 4
installed_packages = [p.project_name for p in pkg_resources.working_set]
for hit in hits:
name = hit['name']
summary = hit['summary'] or ''
version = hit.get('versions', ['-'])[-1]
if terminal_width is not None:
target_width = terminal_width - name_column_width - 5
if target_width > 10:
# wrap and indent summary to fit terminal
summary = textwrap.wrap(summary, target_width)
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%-*s - %s' % (name_column_width,
'%s (%s)' % (name, version), summary)
try:
logger.info(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
with indent_log():
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.info('INSTALLED: %s (latest)', dist.version)
else:
logger.info('INSTALLED: %s', dist.version)
logger.info('LATEST: %s', latest)
except UnicodeEncodeError:
pass
def highest_version(versions):
return max(versions, key=parse_version)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.