repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
jonyroda97/redbot-amigosprovaveis
lib/numpy/ma/mrecords.py
16
27418
""":mod:`numpy.ma..mrecords` Defines the equivalent of :class:`numpy.recarrays` for masked arrays, where fields can be accessed as attributes. Note that :class:`numpy.ma.MaskedArray` already supports structured datatypes and the masking of individual fields. .. moduleauthor:: Pierre Gerard-Marchant """ from __future__ import division, absolute_import, print_function # We should make sure that no field is called '_mask','mask','_fieldmask', # or whatever restricted keywords. An idea would be to no bother in the # first place, and then rename the invalid fields with a trailing # underscore. Maybe we could just overload the parser function ? import sys import warnings import numpy as np import numpy.core.numerictypes as ntypes from numpy.compat import basestring from numpy import ( bool_, dtype, ndarray, recarray, array as narray ) from numpy.core.records import ( fromarrays as recfromarrays, fromrecords as recfromrecords ) _byteorderconv = np.core.records._byteorderconv _typestr = ntypes._typestr import numpy.ma as ma from numpy.ma import ( MAError, MaskedArray, masked, nomask, masked_array, getdata, getmaskarray, filled ) _check_fill_value = ma.core._check_fill_value __all__ = [ 'MaskedRecords', 'mrecarray', 'fromarrays', 'fromrecords', 'fromtextfile', 'addfield', ] reserved_fields = ['_data', '_mask', '_fieldmask', 'dtype'] def _getformats(data): """ Returns the formats of arrays in arraylist as a comma-separated string. """ if hasattr(data, 'dtype'): return ",".join([desc[1] for desc in data.dtype.descr]) formats = '' for obj in data: obj = np.asarray(obj) formats += _typestr[obj.dtype.type] if issubclass(obj.dtype.type, ntypes.flexible): formats += repr(obj.itemsize) formats += ',' return formats[:-1] def _checknames(descr, names=None): """ Checks that field names ``descr`` are not reserved keywords. If this is the case, a default 'f%i' is substituted. If the argument `names` is not None, updates the field names to valid names. """ ndescr = len(descr) default_names = ['f%i' % i for i in range(ndescr)] if names is None: new_names = default_names else: if isinstance(names, (tuple, list)): new_names = names elif isinstance(names, str): new_names = names.split(',') else: raise NameError("illegal input names %s" % repr(names)) nnames = len(new_names) if nnames < ndescr: new_names += default_names[nnames:] ndescr = [] for (n, d, t) in zip(new_names, default_names, descr.descr): if n in reserved_fields: if t[0] in reserved_fields: ndescr.append((d, t[1])) else: ndescr.append(t) else: ndescr.append((n, t[1])) return np.dtype(ndescr) def _get_fieldmask(self): mdescr = [(n, '|b1') for n in self.dtype.names] fdmask = np.empty(self.shape, dtype=mdescr) fdmask.flat = tuple([False] * len(mdescr)) return fdmask class MaskedRecords(MaskedArray, object): """ Attributes ---------- _data : recarray Underlying data, as a record array. _mask : boolean array Mask of the records. A record is masked when all its fields are masked. _fieldmask : boolean recarray Record array of booleans, setting the mask of each individual field of each record. _fill_value : record Filling values for each field. """ def __new__(cls, shape, dtype=None, buf=None, offset=0, strides=None, formats=None, names=None, titles=None, byteorder=None, aligned=False, mask=nomask, hard_mask=False, fill_value=None, keep_mask=True, copy=False, **options): self = recarray.__new__(cls, shape, dtype=dtype, buf=buf, offset=offset, strides=strides, formats=formats, names=names, titles=titles, byteorder=byteorder, aligned=aligned,) mdtype = ma.make_mask_descr(self.dtype) if mask is nomask or not np.size(mask): if not keep_mask: self._mask = tuple([False] * len(mdtype)) else: mask = np.array(mask, copy=copy) if mask.shape != self.shape: (nd, nm) = (self.size, mask.size) if nm == 1: mask = np.resize(mask, self.shape) elif nm == nd: mask = np.reshape(mask, self.shape) else: msg = "Mask and data not compatible: data size is %i, " + \ "mask size is %i." raise MAError(msg % (nd, nm)) copy = True if not keep_mask: self.__setmask__(mask) self._sharedmask = True else: if mask.dtype == mdtype: _mask = mask else: _mask = np.array([tuple([m] * len(mdtype)) for m in mask], dtype=mdtype) self._mask = _mask return self def __array_finalize__(self, obj): # Make sure we have a _fieldmask by default _mask = getattr(obj, '_mask', None) if _mask is None: objmask = getattr(obj, '_mask', nomask) _dtype = ndarray.__getattribute__(self, 'dtype') if objmask is nomask: _mask = ma.make_mask_none(self.shape, dtype=_dtype) else: mdescr = ma.make_mask_descr(_dtype) _mask = narray([tuple([m] * len(mdescr)) for m in objmask], dtype=mdescr).view(recarray) # Update some of the attributes _dict = self.__dict__ _dict.update(_mask=_mask) self._update_from(obj) if _dict['_baseclass'] == ndarray: _dict['_baseclass'] = recarray return def _getdata(self): """ Returns the data as a recarray. """ return ndarray.view(self, recarray) _data = property(fget=_getdata) def _getfieldmask(self): """ Alias to mask. """ return self._mask _fieldmask = property(fget=_getfieldmask) def __len__(self): """ Returns the length """ # We have more than one record if self.ndim: return len(self._data) # We have only one record: return the nb of fields return len(self.dtype) def __getattribute__(self, attr): try: return object.__getattribute__(self, attr) except AttributeError: # attr must be a fieldname pass fielddict = ndarray.__getattribute__(self, 'dtype').fields try: res = fielddict[attr][:2] except (TypeError, KeyError): raise AttributeError("record array has no attribute %s" % attr) # So far, so good _localdict = ndarray.__getattribute__(self, '__dict__') _data = ndarray.view(self, _localdict['_baseclass']) obj = _data.getfield(*res) if obj.dtype.fields: raise NotImplementedError("MaskedRecords is currently limited to" "simple records.") # Get some special attributes # Reset the object's mask hasmasked = False _mask = _localdict.get('_mask', None) if _mask is not None: try: _mask = _mask[attr] except IndexError: # Couldn't find a mask: use the default (nomask) pass hasmasked = _mask.view((np.bool, (len(_mask.dtype) or 1))).any() if (obj.shape or hasmasked): obj = obj.view(MaskedArray) obj._baseclass = ndarray obj._isfield = True obj._mask = _mask # Reset the field values _fill_value = _localdict.get('_fill_value', None) if _fill_value is not None: try: obj._fill_value = _fill_value[attr] except ValueError: obj._fill_value = None else: obj = obj.item() return obj def __setattr__(self, attr, val): """ Sets the attribute attr to the value val. """ # Should we call __setmask__ first ? if attr in ['mask', 'fieldmask']: self.__setmask__(val) return # Create a shortcut (so that we don't have to call getattr all the time) _localdict = object.__getattribute__(self, '__dict__') # Check whether we're creating a new field newattr = attr not in _localdict try: # Is attr a generic attribute ? ret = object.__setattr__(self, attr, val) except: # Not a generic attribute: exit if it's not a valid field fielddict = ndarray.__getattribute__(self, 'dtype').fields or {} optinfo = ndarray.__getattribute__(self, '_optinfo') or {} if not (attr in fielddict or attr in optinfo): exctype, value = sys.exc_info()[:2] raise exctype(value) else: # Get the list of names fielddict = ndarray.__getattribute__(self, 'dtype').fields or {} # Check the attribute if attr not in fielddict: return ret if newattr: # We just added this one or this setattr worked on an # internal attribute. try: object.__delattr__(self, attr) except: return ret # Let's try to set the field try: res = fielddict[attr][:2] except (TypeError, KeyError): raise AttributeError("record array has no attribute %s" % attr) if val is masked: _fill_value = _localdict['_fill_value'] if _fill_value is not None: dval = _localdict['_fill_value'][attr] else: dval = val mval = True else: dval = filled(val) mval = getmaskarray(val) obj = ndarray.__getattribute__(self, '_data').setfield(dval, *res) _localdict['_mask'].__setitem__(attr, mval) return obj def __getitem__(self, indx): """ Returns all the fields sharing the same fieldname base. The fieldname base is either `_data` or `_mask`. """ _localdict = self.__dict__ _mask = ndarray.__getattribute__(self, '_mask') _data = ndarray.view(self, _localdict['_baseclass']) # We want a field if isinstance(indx, basestring): # Make sure _sharedmask is True to propagate back to _fieldmask # Don't use _set_mask, there are some copies being made that # break propagation Don't force the mask to nomask, that wreaks # easy masking obj = _data[indx].view(MaskedArray) obj._mask = _mask[indx] obj._sharedmask = True fval = _localdict['_fill_value'] if fval is not None: obj._fill_value = fval[indx] # Force to masked if the mask is True if not obj.ndim and obj._mask: return masked return obj # We want some elements. # First, the data. obj = np.array(_data[indx], copy=False).view(mrecarray) obj._mask = np.array(_mask[indx], copy=False).view(recarray) return obj def __setitem__(self, indx, value): """ Sets the given record to value. """ MaskedArray.__setitem__(self, indx, value) if isinstance(indx, basestring): self._mask[indx] = ma.getmaskarray(value) def __str__(self): """ Calculates the string representation. """ if self.size > 1: mstr = ["(%s)" % ",".join([str(i) for i in s]) for s in zip(*[getattr(self, f) for f in self.dtype.names])] return "[%s]" % ", ".join(mstr) else: mstr = ["%s" % ",".join([str(i) for i in s]) for s in zip([getattr(self, f) for f in self.dtype.names])] return "(%s)" % ", ".join(mstr) def __repr__(self): """ Calculates the repr representation. """ _names = self.dtype.names fmt = "%%%is : %%s" % (max([len(n) for n in _names]) + 4,) reprstr = [fmt % (f, getattr(self, f)) for f in self.dtype.names] reprstr.insert(0, 'masked_records(') reprstr.extend([fmt % (' fill_value', self.fill_value), ' )']) return str("\n".join(reprstr)) def view(self, dtype=None, type=None): """ Returns a view of the mrecarray. """ # OK, basic copy-paste from MaskedArray.view. if dtype is None: if type is None: output = ndarray.view(self) else: output = ndarray.view(self, type) # Here again. elif type is None: try: if issubclass(dtype, ndarray): output = ndarray.view(self, dtype) dtype = None else: output = ndarray.view(self, dtype) # OK, there's the change except TypeError: dtype = np.dtype(dtype) # we need to revert to MaskedArray, but keeping the possibility # of subclasses (eg, TimeSeriesRecords), so we'll force a type # set to the first parent if dtype.fields is None: basetype = self.__class__.__bases__[0] output = self.__array__().view(dtype, basetype) output._update_from(self) else: output = ndarray.view(self, dtype) output._fill_value = None else: output = ndarray.view(self, dtype, type) # Update the mask, just like in MaskedArray.view if (getattr(output, '_mask', nomask) is not nomask): mdtype = ma.make_mask_descr(output.dtype) output._mask = self._mask.view(mdtype, ndarray) output._mask.shape = output.shape return output def harden_mask(self): """ Forces the mask to hard. """ self._hardmask = True def soften_mask(self): """ Forces the mask to soft """ self._hardmask = False def copy(self): """ Returns a copy of the masked record. """ copied = self._data.copy().view(type(self)) copied._mask = self._mask.copy() return copied def tolist(self, fill_value=None): """ Return the data portion of the array as a list. Data items are converted to the nearest compatible Python type. Masked values are converted to fill_value. If fill_value is None, the corresponding entries in the output list will be ``None``. """ if fill_value is not None: return self.filled(fill_value).tolist() result = narray(self.filled().tolist(), dtype=object) mask = narray(self._mask.tolist()) result[mask] = None return result.tolist() def __getstate__(self): """Return the internal state of the masked array. This is for pickling. """ state = (1, self.shape, self.dtype, self.flags.fnc, self._data.tobytes(), self._mask.tobytes(), self._fill_value, ) return state def __setstate__(self, state): """ Restore the internal state of the masked array. This is for pickling. ``state`` is typically the output of the ``__getstate__`` output, and is a 5-tuple: - class name - a tuple giving the shape of the data - a typecode for the data - a binary string for the data - a binary string for the mask. """ (ver, shp, typ, isf, raw, msk, flv) = state ndarray.__setstate__(self, (shp, typ, isf, raw)) mdtype = dtype([(k, bool_) for (k, _) in self.dtype.descr]) self.__dict__['_mask'].__setstate__((shp, mdtype, isf, msk)) self.fill_value = flv def __reduce__(self): """ Return a 3-tuple for pickling a MaskedArray. """ return (_mrreconstruct, (self.__class__, self._baseclass, (0,), 'b',), self.__getstate__()) def _mrreconstruct(subtype, baseclass, baseshape, basetype,): """ Build a new MaskedArray from the information stored in a pickle. """ _data = ndarray.__new__(baseclass, baseshape, basetype).view(subtype) _mask = ndarray.__new__(ndarray, baseshape, 'b1') return subtype.__new__(subtype, _data, mask=_mask, dtype=basetype,) mrecarray = MaskedRecords ############################################################################### # Constructors # ############################################################################### def fromarrays(arraylist, dtype=None, shape=None, formats=None, names=None, titles=None, aligned=False, byteorder=None, fill_value=None): """ Creates a mrecarray from a (flat) list of masked arrays. Parameters ---------- arraylist : sequence A list of (masked) arrays. Each element of the sequence is first converted to a masked array if needed. If a 2D array is passed as argument, it is processed line by line dtype : {None, dtype}, optional Data type descriptor. shape : {None, integer}, optional Number of records. If None, shape is defined from the shape of the first array in the list. formats : {None, sequence}, optional Sequence of formats for each individual field. If None, the formats will be autodetected by inspecting the fields and selecting the highest dtype possible. names : {None, sequence}, optional Sequence of the names of each field. fill_value : {None, sequence}, optional Sequence of data to be used as filling values. Notes ----- Lists of tuples should be preferred over lists of lists for faster processing. """ datalist = [getdata(x) for x in arraylist] masklist = [np.atleast_1d(getmaskarray(x)) for x in arraylist] _array = recfromarrays(datalist, dtype=dtype, shape=shape, formats=formats, names=names, titles=titles, aligned=aligned, byteorder=byteorder).view(mrecarray) _array._mask.flat = list(zip(*masklist)) if fill_value is not None: _array.fill_value = fill_value return _array def fromrecords(reclist, dtype=None, shape=None, formats=None, names=None, titles=None, aligned=False, byteorder=None, fill_value=None, mask=nomask): """ Creates a MaskedRecords from a list of records. Parameters ---------- reclist : sequence A list of records. Each element of the sequence is first converted to a masked array if needed. If a 2D array is passed as argument, it is processed line by line dtype : {None, dtype}, optional Data type descriptor. shape : {None,int}, optional Number of records. If None, ``shape`` is defined from the shape of the first array in the list. formats : {None, sequence}, optional Sequence of formats for each individual field. If None, the formats will be autodetected by inspecting the fields and selecting the highest dtype possible. names : {None, sequence}, optional Sequence of the names of each field. fill_value : {None, sequence}, optional Sequence of data to be used as filling values. mask : {nomask, sequence}, optional. External mask to apply on the data. Notes ----- Lists of tuples should be preferred over lists of lists for faster processing. """ # Grab the initial _fieldmask, if needed: _mask = getattr(reclist, '_mask', None) # Get the list of records. if isinstance(reclist, ndarray): # Make sure we don't have some hidden mask if isinstance(reclist, MaskedArray): reclist = reclist.filled().view(ndarray) # Grab the initial dtype, just in case if dtype is None: dtype = reclist.dtype reclist = reclist.tolist() mrec = recfromrecords(reclist, dtype=dtype, shape=shape, formats=formats, names=names, titles=titles, aligned=aligned, byteorder=byteorder).view(mrecarray) # Set the fill_value if needed if fill_value is not None: mrec.fill_value = fill_value # Now, let's deal w/ the mask if mask is not nomask: mask = np.array(mask, copy=False) maskrecordlength = len(mask.dtype) if maskrecordlength: mrec._mask.flat = mask elif mask.ndim == 2: mrec._mask.flat = [tuple(m) for m in mask] else: mrec.__setmask__(mask) if _mask is not None: mrec._mask[:] = _mask return mrec def _guessvartypes(arr): """ Tries to guess the dtypes of the str_ ndarray `arr`. Guesses by testing element-wise conversion. Returns a list of dtypes. The array is first converted to ndarray. If the array is 2D, the test is performed on the first line. An exception is raised if the file is 3D or more. """ vartypes = [] arr = np.asarray(arr) if arr.ndim == 2: arr = arr[0] elif arr.ndim > 2: raise ValueError("The array should be 2D at most!") # Start the conversion loop. for f in arr: try: int(f) except (ValueError, TypeError): try: float(f) except (ValueError, TypeError): try: complex(f) except (ValueError, TypeError): vartypes.append(arr.dtype) else: vartypes.append(np.dtype(complex)) else: vartypes.append(np.dtype(float)) else: vartypes.append(np.dtype(int)) return vartypes def openfile(fname): """ Opens the file handle of file `fname`. """ # A file handle if hasattr(fname, 'readline'): return fname # Try to open the file and guess its type try: f = open(fname) except IOError: raise IOError("No such file: '%s'" % fname) if f.readline()[:2] != "\\x": f.seek(0, 0) return f f.close() raise NotImplementedError("Wow, binary file") def fromtextfile(fname, delimitor=None, commentchar='#', missingchar='', varnames=None, vartypes=None): """ Creates a mrecarray from data stored in the file `filename`. Parameters ---------- fname : {file name/handle} Handle of an opened file. delimitor : {None, string}, optional Alphanumeric character used to separate columns in the file. If None, any (group of) white spacestring(s) will be used. commentchar : {'#', string}, optional Alphanumeric character used to mark the start of a comment. missingchar : {'', string}, optional String indicating missing data, and used to create the masks. varnames : {None, sequence}, optional Sequence of the variable names. If None, a list will be created from the first non empty line of the file. vartypes : {None, sequence}, optional Sequence of the variables dtypes. If None, it will be estimated from the first non-commented line. Ultra simple: the varnames are in the header, one line""" # Try to open the file. ftext = openfile(fname) # Get the first non-empty line as the varnames while True: line = ftext.readline() firstline = line[:line.find(commentchar)].strip() _varnames = firstline.split(delimitor) if len(_varnames) > 1: break if varnames is None: varnames = _varnames # Get the data. _variables = masked_array([line.strip().split(delimitor) for line in ftext if line[0] != commentchar and len(line) > 1]) (_, nfields) = _variables.shape ftext.close() # Try to guess the dtype. if vartypes is None: vartypes = _guessvartypes(_variables[0]) else: vartypes = [np.dtype(v) for v in vartypes] if len(vartypes) != nfields: msg = "Attempting to %i dtypes for %i fields!" msg += " Reverting to default." warnings.warn(msg % (len(vartypes), nfields), stacklevel=2) vartypes = _guessvartypes(_variables[0]) # Construct the descriptor. mdescr = [(n, f) for (n, f) in zip(varnames, vartypes)] mfillv = [ma.default_fill_value(f) for f in vartypes] # Get the data and the mask. # We just need a list of masked_arrays. It's easier to create it like that: _mask = (_variables.T == missingchar) _datalist = [masked_array(a, mask=m, dtype=t, fill_value=f) for (a, m, t, f) in zip(_variables.T, _mask, vartypes, mfillv)] return fromarrays(_datalist, dtype=mdescr) def addfield(mrecord, newfield, newfieldname=None): """Adds a new field to the masked record array Uses `newfield` as data and `newfieldname` as name. If `newfieldname` is None, the new field name is set to 'fi', where `i` is the number of existing fields. """ _data = mrecord._data _mask = mrecord._mask if newfieldname is None or newfieldname in reserved_fields: newfieldname = 'f%i' % len(_data.dtype) newfield = ma.array(newfield) # Get the new data. # Create a new empty recarray newdtype = np.dtype(_data.dtype.descr + [(newfieldname, newfield.dtype)]) newdata = recarray(_data.shape, newdtype) # Add the existing field [newdata.setfield(_data.getfield(*f), *f) for f in _data.dtype.fields.values()] # Add the new field newdata.setfield(newfield._data, *newdata.dtype.fields[newfieldname]) newdata = newdata.view(MaskedRecords) # Get the new mask # Create a new empty recarray newmdtype = np.dtype([(n, bool_) for n in newdtype.names]) newmask = recarray(_data.shape, newmdtype) # Add the old masks [newmask.setfield(_mask.getfield(*f), *f) for f in _mask.dtype.fields.values()] # Add the mask of the new field newmask.setfield(getmaskarray(newfield), *newmask.dtype.fields[newfieldname]) newdata._mask = newmask return newdata
gpl-3.0
nhorelik/openmc
tests/test_statepoint_restart/results.py
3
1045
#!/usr/bin/env python import sys import numpy as np # import statepoint sys.path.insert(0, '../../src/utils') import statepoint # read in statepoint file if len(sys.argv) > 1: sp = statepoint.StatePoint(sys.argv[1]) else: sp = statepoint.StatePoint('statepoint.07.binary') sp.read_results() # extract tally results and convert to vector results1 = sp.tallies[0].results shape1 = results1.shape size1 = (np.product(shape1)) results1 = np.reshape(results1, size1) results2 = sp.tallies[1].results shape2 = results2.shape size2 = (np.product(shape2)) results2 = np.reshape(results2, size2) # set up output string outstr = '' # write out k-combined outstr += 'k-combined:\n' outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1]) # write out tally results outstr += 'tally 1:\n' for item in results1: outstr += "{0:12.6E}\n".format(item) outstr += 'tally 2:\n' for item in results2: outstr += "{0:12.6E}\n".format(item) # write results to file with open('results_test.dat','w') as fh: fh.write(outstr)
mit
abhinavp13/IITBX-edx-platform-dev
common/lib/xmodule/xmodule/tests/test_html_module.py
6
1232
import unittest from mock import Mock from xmodule.html_module import HtmlModule from . import get_test_system class HtmlModuleSubstitutionTestCase(unittest.TestCase): descriptor = Mock() def test_substitution_works(self): sample_xml = '''%%USER_ID%%''' module_data = {'data': sample_xml} module_system = get_test_system() module = HtmlModule(module_system, self.descriptor, module_data) self.assertEqual(module.get_html(), str(module_system.anonymous_student_id)) def test_substitution_without_magic_string(self): sample_xml = ''' <html> <p>Hi USER_ID!11!</p> </html> ''' module_data = {'data': sample_xml} module = HtmlModule(get_test_system(), self.descriptor, module_data) self.assertEqual(module.get_html(), sample_xml) def test_substitution_without_anonymous_student_id(self): sample_xml = '''%%USER_ID%%''' module_data = {'data': sample_xml} module_system = get_test_system() module_system.anonymous_student_id = None module = HtmlModule(module_system, self.descriptor, module_data) self.assertEqual(module.get_html(), sample_xml)
agpl-3.0
dymkowsk/mantid
docs/sphinxext/mantiddoc/directives/properties.py
3
10231
#pylint: disable=invalid-name,deprecated-module from __future__ import (absolute_import, division, print_function) from mantiddoc.directives.base import AlgorithmBaseDirective #pylint: disable=unused-import import re from string import punctuation from six.moves import range SUBSTITUTE_REF_RE = re.compile(r'\|(.+?)\|') class PropertiesDirective(AlgorithmBaseDirective): """ Outputs the given algorithm's properties into a ReST formatted table. """ # Accept one required argument and no optional arguments. required_arguments, optional_arguments = 0, 0 def execute(self): """ Called by Sphinx when the ..properties:: directive is encountered. """ self._create_properties_table() return [] def _create_properties_table(self): """ Populates the ReST table with algorithm properties. """ if self.algorithm_version() is None: # This is an IFunction ifunc = self.create_mantid_ifunction(self.algorithm_name()) if ifunc.numParams() <= 0: return False # Stores each property of the algorithm in a tuple. properties = [] # names for the table headers. header = ('Name', 'Default', 'Description') for i in range(ifunc.numParams()): properties.append((ifunc.parameterName(i), str(ifunc.getParameterValue(i)), ifunc.paramDescription(i) )) self.add_rst(self.make_header("Properties (fitting parameters)")) else: # this is an Algorithm alg = self.create_mantid_algorithm(self.algorithm_name(), self.algorithm_version()) alg_properties = alg.getProperties() if len(alg_properties) == 0: return False # Stores each property of the algorithm in a tuple. properties = [] # names for the table headers. header = ('Name', 'Direction', 'Type', 'Default', 'Description') # Used to obtain the name for the direction property rather than an # int. direction_string = ["Input", "Output", "InOut", "None"] #dictionary to convert from property type to link to category page (where possible) property_type_dict = { "Workspace":":ref:`Workspace <Workspace>`", "Workspace2D":":ref:`Workspace2D <Workspace2D>`", "EventWorkspace":":ref:`EventWorkspace <EventWorkspace>`", "MatrixWorkspace":":ref:`MatrixWorkspace <MatrixWorkspace>`", "GroupWorkspace":":ref:`GroupWorkspace <WorkspaceGroup>`", "MDEventWorkspace":":ref:`MDEventWorkspace <MDWorkspace>`", "MDHistoWorkspace":":ref:`MDHistoWorkspace <MDHistoWorkspace>`", "TableWorkspace":":ref:`TableWorkspace <Table Workspaces>`" } for prop in alg_properties: # Append a tuple of properties to the list. properties.append(( str(prop.name), str(direction_string[prop.direction]), property_type_dict.get(str(prop.type),str(prop.type)), str(self._get_default_prop(prop)), self._create_property_description_string(prop) )) self.add_rst(self.make_header("Properties")) self.add_rst(self._build_table(header, properties)) return True def _build_table(self, header_content, table_content): """ Build the ReST format Args: header_content (list): Header for the table. Must be the same length as the rows table_content (list of tuples): Each tuple (row) container property values for a unique property of that algorithm. Returns: str: ReST formatted table containing algorithm properties. """ # The width of the columns. Multiply row length by 10 to ensure small # properties format correctly. # Added 10 to the length to ensure if table_content is 0 that # the table is still displayed. col_sizes = [max( (len(row[i] * 10) + 10) for row in table_content) for i in range(len(header_content))] # Use the column widths as a means to formatting columns. formatter = ' '.join('{%d:<%d}' % (index,col) for index, col in enumerate(col_sizes)) # Add whitespace to each column. This depends on the values returned by # col_sizes. table_content_formatted = [ formatter.format(*item) for item in table_content] # Create a seperator for each column seperator = formatter.format(*['=' * col for col in col_sizes]) # Build the table. header = '\n' + seperator + '\n' + formatter.format(*header_content) + '\n' content = seperator + '\n' + \ '\n'.join(table_content_formatted) + '\n' + seperator # Join the header and footer. return header + content def _get_default_prop(self, prop): """ Converts the default value of the property to a more use-friendly one. Args: prop (str): The algorithm property to use. Returns: str: The default value of the property. """ from mantid.api import IWorkspaceProperty # Used to obtain the name for the direction property rather than # outputting an int. direction_string = ["Input", "Output", "InOut", "None"] # Nothing to show under the default section for an output properties # that are not workspace properties. if (direction_string[prop.direction] == "Output") and \ (not isinstance(prop, IWorkspaceProperty)): default_prop = "" elif prop.isValid == "": default_prop = self._create_property_default_string(prop) else: default_prop = "*Mandatory*" return default_prop def _create_property_default_string(self, prop): """ Converts the default value of the property to a more use-friendly one. Args: prop. The property to find the default value of. Returns: str: The string to add to the property table default section. """ default = prop.getDefault defaultstr = "" # Convert to int, then float, then any string try: val = int(default) if val >= 2147483647: defaultstr = "*Optional*" else: defaultstr = str(val) except ValueError: try: val = float(default) if val >= 1e+307: defaultstr = "*Optional*" else: defaultstr = str(val) except ValueError: # Fall-back default for anything defaultstr = str(default) # Replace nonprintable characters with their printable # representations, such as \n, \t, ... defaultstr = repr(defaultstr)[1:-1] defaultstr = defaultstr.replace('\\','\\\\') # A special case for single-character default values (e.g. + or *, see MuonLoad). We don't # want them to be interpreted as list items. if len(defaultstr) == 1 and defaultstr in punctuation: defaultstr = "\\" + defaultstr # Values ending with underscores should just be literals if defaultstr.endswith('_'): defaultstr = defaultstr[:-1] + '\\_' # Replace the ugly default values with "Optional" if (defaultstr == "8.9884656743115785e+307") or \ (defaultstr == "1.7976931348623157e+308") or \ (defaultstr == "2147483647"): defaultstr = "*Optional*" if str(prop.type) == "boolean": if defaultstr == "1": defaultstr = "True" else: defaultstr = "False" return defaultstr def _create_property_description_string(self, prop): """ Converts the description of the property to a more use-friendly one. Args: prop. The property to find the default value of. Returns: str: The string to add to the property table description section. """ desc = str(prop.documentation.replace("\n", " ")) allowedValueString = str(prop.allowedValues) # 4 allows for [''] if len(allowedValueString) > 4: ##make sure the last sentence ended with a full stop (or equivalent) if (not desc.rstrip().endswith(".")) \ and (not desc.rstrip().endswith("!")) \ and (not desc.rstrip().endswith("?")) \ and (len(desc.strip())>0): desc += "." isFileExts = True for item in prop.allowedValues: #check it does not look like a file extension if (not item.startswith(".")) and (not item[-4:].startswith(".")): isFileExts = False break prefixString = " Allowed values: " if isFileExts: prefixString = " Allowed extensions: " #put a space in between entries to allow the line to break allowedValueString = allowedValueString.replace("','","', '") desc += prefixString + allowedValueString return self._escape_subsitution_refs(desc) def _escape_subsitution_refs(self, desc): """ Find occurrences of text surrounded by vertical bars and assume they are not docutils subsitution referencess by esacping them """ def repl(match): return r'\|' + match.group(1) + r'\|' return SUBSTITUTE_REF_RE.sub(repl, desc) def setup(app): """ Setup the directives when the extension is activated Args: app: The main Sphinx application object """ app.add_directive('properties', PropertiesDirective)
gpl-3.0
rvykydal/anaconda
pyanaconda/modules/boss/module_manager/start_modules.py
3
6918
# # Support for object containers # # Copyright (C) 2019 Red Hat, Inc. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from functools import partial from queue import SimpleQueue from pyanaconda.anaconda_loggers import get_module_logger from dasbus.constants import DBUS_FLAG_NONE, DBUS_START_REPLY_SUCCESS from dasbus.namespace import get_dbus_name from pyanaconda.modules.boss.module_manager import ModuleObserver from pyanaconda.modules.common.constants.namespaces import ADDONS_NAMESPACE from pyanaconda.modules.common.errors.module import UnavailableModuleError from pyanaconda.modules.common.task import Task log = get_module_logger(__name__) __all__ = ["StartModulesTask"] class StartModulesTask(Task): """A task for starting DBus modules. The timeout service_start_timeout from the Anaconda bus configuration file is applied by default when the DBus method StartServiceByName is called. """ def __init__(self, message_bus, module_names, addons_enabled): """Create a new task. :param message_bus: a message bus :param module_names: a list of DBus names of modules :param addons_enabled: True to enable addons, otherwise False """ super().__init__() self._message_bus = message_bus self._module_names = module_names self._addons_enabled = addons_enabled self._module_observers = [] self._callbacks = SimpleQueue() @property def name(self): """Name of the task.""" return "Start the modules" def run(self): """Run the task. :return: a list of observers """ # Collect the modules. self._module_observers = self._find_modules() + self._find_addons() # Asynchronously start the modules. self._start_modules(self._module_observers) # Process the callbacks of the asynchronous calls. self._process_callbacks(self._module_observers) return self._module_observers def _find_modules(self): """Find modules.""" modules = [] for service_name in self._module_names: log.debug("Found %s.", service_name) modules.append(ModuleObserver( self._message_bus, service_name )) return modules def _find_addons(self): """Find additional modules.""" modules = [] if not self._addons_enabled: return modules dbus = self._message_bus.proxy names = dbus.ListActivatableNames() prefix = get_dbus_name(*ADDONS_NAMESPACE) for service_name in names: if not service_name.startswith(prefix): continue log.debug("Found %s.", service_name) modules.append(ModuleObserver( self._message_bus, service_name, is_addon=True )) return modules def _start_modules(self, module_observers): """Start the modules.""" dbus = self._message_bus.proxy for observer in module_observers: log.debug("Starting %s.", observer) dbus.StartServiceByName( observer.service_name, DBUS_FLAG_NONE, callback=self._start_service_by_name_callback, callback_args=(observer,) ) def _start_service_by_name_callback(self, call, observer): """Callback for the StartServiceByName method.""" self._callbacks.put((observer, partial(self._start_service_by_name_handler, call))) def _start_service_by_name_handler(self, call, observer): """Handler for the StartServiceByName method.""" try: returned = call() except Exception as error: # pylint: disable=broad-except raise UnavailableModuleError( "Service {} has failed to start: {}".format(observer, error) ) from error if returned != DBUS_START_REPLY_SUCCESS: log.warning("Service %s is already running.", observer) else: log.debug("Service %s started successfully.", observer) # Connect the observer once the service is available. observer.service_available.connect(self._service_available_callback) observer.connect_once_available() return False def _service_available_callback(self, observer): """Callback for the service_available signal.""" self._callbacks.put((observer, self._service_available_handler)) def _service_available_handler(self, observer): """Handler for the service_available signal.""" log.debug("%s is available.", observer) observer.proxy.Ping() return True def _process_callbacks(self, module_observers): """Process callbacks of the asynchronous calls. Process callbacks of the asynchronous calls until all modules are processed. A callback returns True if the module is processed, otherwise False. If a DBus call fails with an error, we raise an exception in the callback and immediately quit the task unless it comes from an add-on. A failure of an add-on module is not fatal, we just remove its observer from the list of available modules and continue. :param module_observers: a list of module observers """ available = module_observers unprocessed = set(module_observers) while unprocessed: # Call the next scheduled callback. observer, callback = self._callbacks.get() try: is_available = callback(observer) # The module is not processed yet. if not is_available: continue except UnavailableModuleError: # The failure of an Anaconda module is fatal. if not observer.is_addon: raise # The failure of an add-on module is not fatal. Remove # it from the list of available modules and continue. log.warning("Skipping %s.", observer) available.remove(observer) # The module is processed. unprocessed.discard(observer)
gpl-2.0
scipy/scipy
scipy/stats/setup.py
1
3148
import os from os.path import join from numpy.distutils.misc_util import get_info def pre_build_hook(build_ext, ext): from scipy._build_utils.compiler_helper import get_cxx_std_flag std_flag = get_cxx_std_flag(build_ext._cxx_compiler) if std_flag is not None: ext.extra_compile_args.append(std_flag) def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils.compiler_helper import set_cxx_flags_hook import numpy as np config = Configuration('stats', parent_package, top_path) config.add_data_dir('tests') statlib_src = [join('statlib', '*.f')] config.add_library('statlib', sources=statlib_src) # add statlib module config.add_extension('statlib', sources=['statlib.pyf'], f2py_options=['--no-wrap-functions'], libraries=['statlib'], depends=statlib_src) # add _stats module config.add_extension('_stats', sources=['_stats.c']) # add mvn module config.add_extension('mvn', sources=['mvn.pyf', 'mvndst.f']) # add _sobol module config.add_extension('_sobol', sources=['_sobol.c']) config.add_data_files('_sobol_direction_numbers.npz') # add _qmc_cy module ext = config.add_extension('_qmc_cy', sources=['_qmc_cy.cxx']) ext._pre_build_hook = set_cxx_flags_hook if int(os.environ.get('SCIPY_USE_PYTHRAN', 1)): import pythran ext = pythran.dist.PythranExtension( 'scipy.stats._hypotests_pythran', sources=["scipy/stats/_hypotests_pythran.py"], config=['compiler.blas=none']) config.ext_modules.append(ext) # add BiasedUrn module config.add_data_files('biasedurn.pxd') from _generate_pyx import isNPY_OLD # type: ignore[import] NPY_OLD = isNPY_OLD() if NPY_OLD: biasedurn_libs = [] biasedurn_libdirs = [] else: biasedurn_libs = ['npyrandom', 'npymath'] biasedurn_libdirs = [join(np.get_include(), '..', '..', 'random', 'lib')] biasedurn_libdirs += get_info('npymath')['library_dirs'] ext = config.add_extension( 'biasedurn', sources=[ 'biasedurn.cxx', 'biasedurn/impls.cpp', 'biasedurn/fnchyppr.cpp', 'biasedurn/wnchyppr.cpp', 'biasedurn/stoc1.cpp', 'biasedurn/stoc3.cpp'], include_dirs=[np.get_include()], library_dirs=biasedurn_libdirs, libraries=biasedurn_libs, define_macros=[('R_BUILD', None)], language='c++', depends=['biasedurn/stocR.h'], ) ext._pre_build_hook = pre_build_hook # add boost stats distributions config.add_subpackage('_boost') # Type stubs config.add_data_files('*.pyi') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
bsd-3-clause
kchristidis/fabric
bddtests/peer/events_pb2.py
17
21087
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: peer/events.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from common import common_pb2 as common_dot_common__pb2 from peer import chaincode_event_pb2 as peer_dot_chaincode__event__pb2 from peer import transaction_pb2 as peer_dot_transaction__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='peer/events.proto', package='protos', syntax='proto3', serialized_pb=_b('\n\x11peer/events.proto\x12\x06protos\x1a\x13\x63ommon/common.proto\x1a\x1apeer/chaincode_event.proto\x1a\x16peer/transaction.proto\"8\n\x0c\x43haincodeReg\x12\x14\n\x0c\x63haincode_id\x18\x01 \x01(\t\x12\x12\n\nevent_name\x18\x02 \x01(\t\"\x81\x01\n\x08Interest\x12%\n\nevent_type\x18\x01 \x01(\x0e\x32\x11.protos.EventType\x12\x32\n\x12\x63haincode_reg_info\x18\x02 \x01(\x0b\x32\x14.protos.ChaincodeRegH\x00\x12\x0f\n\x07\x63hainID\x18\x03 \x01(\tB\t\n\x07RegInfo\",\n\x08Register\x12 \n\x06\x65vents\x18\x01 \x03(\x0b\x32\x10.protos.Interest\"?\n\tRejection\x12\x1f\n\x02tx\x18\x01 \x01(\x0b\x32\x13.protos.Transaction\x12\x11\n\terror_msg\x18\x02 \x01(\t\".\n\nUnregister\x12 \n\x06\x65vents\x18\x01 \x03(\x0b\x32\x10.protos.Interest\"4\n\x0bSignedEvent\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x12\n\neventBytes\x18\x02 \x01(\x0c\"\xec\x01\n\x05\x45vent\x12$\n\x08register\x18\x01 \x01(\x0b\x32\x10.protos.RegisterH\x00\x12\x1e\n\x05\x62lock\x18\x02 \x01(\x0b\x32\r.common.BlockH\x00\x12\x31\n\x0f\x63haincode_event\x18\x03 \x01(\x0b\x32\x16.protos.ChaincodeEventH\x00\x12&\n\trejection\x18\x04 \x01(\x0b\x32\x11.protos.RejectionH\x00\x12(\n\nunregister\x18\x05 \x01(\x0b\x32\x12.protos.UnregisterH\x00\x12\x0f\n\x07\x63reator\x18\x06 \x01(\x0c\x42\x07\n\x05\x45vent*B\n\tEventType\x12\x0c\n\x08REGISTER\x10\x00\x12\t\n\x05\x42LOCK\x10\x01\x12\r\n\tCHAINCODE\x10\x02\x12\r\n\tREJECTION\x10\x03\x32\x34\n\x06\x45vents\x12*\n\x04\x43hat\x12\r.protos.Event\x1a\r.protos.Event\"\x00(\x01\x30\x01\x42^\n\"org.hyperledger.fabric.protos.peerB\rEventsPackageZ)github.com/hyperledger/fabric/protos/peerb\x06proto3') , dependencies=[common_dot_common__pb2.DESCRIPTOR,peer_dot_chaincode__event__pb2.DESCRIPTOR,peer_dot_transaction__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _EVENTTYPE = _descriptor.EnumDescriptor( name='EventType', full_name='protos.EventType', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='REGISTER', index=0, number=0, options=None, type=None), _descriptor.EnumValueDescriptor( name='BLOCK', index=1, number=1, options=None, type=None), _descriptor.EnumValueDescriptor( name='CHAINCODE', index=2, number=2, options=None, type=None), _descriptor.EnumValueDescriptor( name='REJECTION', index=3, number=3, options=None, type=None), ], containing_type=None, options=None, serialized_start=744, serialized_end=810, ) _sym_db.RegisterEnumDescriptor(_EVENTTYPE) EventType = enum_type_wrapper.EnumTypeWrapper(_EVENTTYPE) REGISTER = 0 BLOCK = 1 CHAINCODE = 2 REJECTION = 3 _CHAINCODEREG = _descriptor.Descriptor( name='ChaincodeReg', full_name='protos.ChaincodeReg', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='chaincode_id', full_name='protos.ChaincodeReg.chaincode_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='event_name', full_name='protos.ChaincodeReg.event_name', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=102, serialized_end=158, ) _INTEREST = _descriptor.Descriptor( name='Interest', full_name='protos.Interest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='event_type', full_name='protos.Interest.event_type', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='chaincode_reg_info', full_name='protos.Interest.chaincode_reg_info', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='chainID', full_name='protos.Interest.chainID', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='RegInfo', full_name='protos.Interest.RegInfo', index=0, containing_type=None, fields=[]), ], serialized_start=161, serialized_end=290, ) _REGISTER = _descriptor.Descriptor( name='Register', full_name='protos.Register', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='events', full_name='protos.Register.events', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=292, serialized_end=336, ) _REJECTION = _descriptor.Descriptor( name='Rejection', full_name='protos.Rejection', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='tx', full_name='protos.Rejection.tx', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='error_msg', full_name='protos.Rejection.error_msg', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=338, serialized_end=401, ) _UNREGISTER = _descriptor.Descriptor( name='Unregister', full_name='protos.Unregister', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='events', full_name='protos.Unregister.events', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=403, serialized_end=449, ) _SIGNEDEVENT = _descriptor.Descriptor( name='SignedEvent', full_name='protos.SignedEvent', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='signature', full_name='protos.SignedEvent.signature', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='eventBytes', full_name='protos.SignedEvent.eventBytes', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=451, serialized_end=503, ) _EVENT = _descriptor.Descriptor( name='Event', full_name='protos.Event', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='register', full_name='protos.Event.register', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='block', full_name='protos.Event.block', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='chaincode_event', full_name='protos.Event.chaincode_event', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='rejection', full_name='protos.Event.rejection', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='unregister', full_name='protos.Event.unregister', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='creator', full_name='protos.Event.creator', index=5, number=6, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='Event', full_name='protos.Event.Event', index=0, containing_type=None, fields=[]), ], serialized_start=506, serialized_end=742, ) _INTEREST.fields_by_name['event_type'].enum_type = _EVENTTYPE _INTEREST.fields_by_name['chaincode_reg_info'].message_type = _CHAINCODEREG _INTEREST.oneofs_by_name['RegInfo'].fields.append( _INTEREST.fields_by_name['chaincode_reg_info']) _INTEREST.fields_by_name['chaincode_reg_info'].containing_oneof = _INTEREST.oneofs_by_name['RegInfo'] _REGISTER.fields_by_name['events'].message_type = _INTEREST _REJECTION.fields_by_name['tx'].message_type = peer_dot_transaction__pb2._TRANSACTION _UNREGISTER.fields_by_name['events'].message_type = _INTEREST _EVENT.fields_by_name['register'].message_type = _REGISTER _EVENT.fields_by_name['block'].message_type = common_dot_common__pb2._BLOCK _EVENT.fields_by_name['chaincode_event'].message_type = peer_dot_chaincode__event__pb2._CHAINCODEEVENT _EVENT.fields_by_name['rejection'].message_type = _REJECTION _EVENT.fields_by_name['unregister'].message_type = _UNREGISTER _EVENT.oneofs_by_name['Event'].fields.append( _EVENT.fields_by_name['register']) _EVENT.fields_by_name['register'].containing_oneof = _EVENT.oneofs_by_name['Event'] _EVENT.oneofs_by_name['Event'].fields.append( _EVENT.fields_by_name['block']) _EVENT.fields_by_name['block'].containing_oneof = _EVENT.oneofs_by_name['Event'] _EVENT.oneofs_by_name['Event'].fields.append( _EVENT.fields_by_name['chaincode_event']) _EVENT.fields_by_name['chaincode_event'].containing_oneof = _EVENT.oneofs_by_name['Event'] _EVENT.oneofs_by_name['Event'].fields.append( _EVENT.fields_by_name['rejection']) _EVENT.fields_by_name['rejection'].containing_oneof = _EVENT.oneofs_by_name['Event'] _EVENT.oneofs_by_name['Event'].fields.append( _EVENT.fields_by_name['unregister']) _EVENT.fields_by_name['unregister'].containing_oneof = _EVENT.oneofs_by_name['Event'] DESCRIPTOR.message_types_by_name['ChaincodeReg'] = _CHAINCODEREG DESCRIPTOR.message_types_by_name['Interest'] = _INTEREST DESCRIPTOR.message_types_by_name['Register'] = _REGISTER DESCRIPTOR.message_types_by_name['Rejection'] = _REJECTION DESCRIPTOR.message_types_by_name['Unregister'] = _UNREGISTER DESCRIPTOR.message_types_by_name['SignedEvent'] = _SIGNEDEVENT DESCRIPTOR.message_types_by_name['Event'] = _EVENT DESCRIPTOR.enum_types_by_name['EventType'] = _EVENTTYPE ChaincodeReg = _reflection.GeneratedProtocolMessageType('ChaincodeReg', (_message.Message,), dict( DESCRIPTOR = _CHAINCODEREG, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.ChaincodeReg) )) _sym_db.RegisterMessage(ChaincodeReg) Interest = _reflection.GeneratedProtocolMessageType('Interest', (_message.Message,), dict( DESCRIPTOR = _INTEREST, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.Interest) )) _sym_db.RegisterMessage(Interest) Register = _reflection.GeneratedProtocolMessageType('Register', (_message.Message,), dict( DESCRIPTOR = _REGISTER, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.Register) )) _sym_db.RegisterMessage(Register) Rejection = _reflection.GeneratedProtocolMessageType('Rejection', (_message.Message,), dict( DESCRIPTOR = _REJECTION, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.Rejection) )) _sym_db.RegisterMessage(Rejection) Unregister = _reflection.GeneratedProtocolMessageType('Unregister', (_message.Message,), dict( DESCRIPTOR = _UNREGISTER, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.Unregister) )) _sym_db.RegisterMessage(Unregister) SignedEvent = _reflection.GeneratedProtocolMessageType('SignedEvent', (_message.Message,), dict( DESCRIPTOR = _SIGNEDEVENT, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.SignedEvent) )) _sym_db.RegisterMessage(SignedEvent) Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), dict( DESCRIPTOR = _EVENT, __module__ = 'peer.events_pb2' # @@protoc_insertion_point(class_scope:protos.Event) )) _sym_db.RegisterMessage(Event) DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\"org.hyperledger.fabric.protos.peerB\rEventsPackageZ)github.com/hyperledger/fabric/protos/peer')) try: # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. import grpc from grpc.framework.common import cardinality from grpc.framework.interfaces.face import utilities as face_utilities from grpc.beta import implementations as beta_implementations from grpc.beta import interfaces as beta_interfaces class EventsStub(object): """Interface exported by the events server """ def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.Chat = channel.stream_stream( '/protos.Events/Chat', request_serializer=Event.SerializeToString, response_deserializer=Event.FromString, ) class EventsServicer(object): """Interface exported by the events server """ def Chat(self, request_iterator, context): """event chatting using Event """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_EventsServicer_to_server(servicer, server): rpc_method_handlers = { 'Chat': grpc.stream_stream_rpc_method_handler( servicer.Chat, request_deserializer=Event.FromString, response_serializer=Event.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'protos.Events', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) class BetaEventsServicer(object): """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" """Interface exported by the events server """ def Chat(self, request_iterator, context): """event chatting using Event """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) class BetaEventsStub(object): """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" """Interface exported by the events server """ def Chat(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): """event chatting using Event """ raise NotImplementedError() def beta_create_Events_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This function was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" request_deserializers = { ('protos.Events', 'Chat'): Event.FromString, } response_serializers = { ('protos.Events', 'Chat'): Event.SerializeToString, } method_implementations = { ('protos.Events', 'Chat'): face_utilities.stream_stream_inline(servicer.Chat), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) return beta_implementations.server(method_implementations, options=server_options) def beta_create_Events_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This function was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" request_serializers = { ('protos.Events', 'Chat'): Event.SerializeToString, } response_deserializers = { ('protos.Events', 'Chat'): Event.FromString, } cardinalities = { 'Chat': cardinality.Cardinality.STREAM_STREAM, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) return beta_implementations.dynamic_stub(channel, 'protos.Events', cardinalities, options=stub_options) except ImportError: pass # @@protoc_insertion_point(module_scope)
apache-2.0
watspidererik/testenv
flask/lib/python2.7/site-packages/migrate/versioning/version.py
32
8389
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import re import shutil import logging from migrate import exceptions from migrate.versioning import pathed, script from datetime import datetime import six log = logging.getLogger(__name__) class VerNum(object): """A version number that behaves like a string and int at the same time""" _instances = dict() def __new__(cls, value): val = str(value) if val not in cls._instances: cls._instances[val] = super(VerNum, cls).__new__(cls) ret = cls._instances[val] return ret def __init__(self,value): self.value = str(int(value)) if self < 0: raise ValueError("Version number cannot be negative") def __add__(self, value): ret = int(self) + int(value) return VerNum(ret) def __sub__(self, value): return self + (int(value) * -1) def __eq__(self, value): return int(self) == int(value) def __ne__(self, value): return int(self) != int(value) def __lt__(self, value): return int(self) < int(value) def __gt__(self, value): return int(self) > int(value) def __ge__(self, value): return int(self) >= int(value) def __le__(self, value): return int(self) <= int(value) def __repr__(self): return "<VerNum(%s)>" % self.value def __str__(self): return str(self.value) def __int__(self): return int(self.value) if six.PY3: def __hash__(self): return hash(self.value) class Collection(pathed.Pathed): """A collection of versioning scripts in a repository""" FILENAME_WITH_VERSION = re.compile(r'^(\d{3,}).*') def __init__(self, path): """Collect current version scripts in repository and store them in self.versions """ super(Collection, self).__init__(path) # Create temporary list of files, allowing skipped version numbers. files = os.listdir(path) if '1' in files: # deprecation raise Exception('It looks like you have a repository in the old ' 'format (with directories for each version). ' 'Please convert repository before proceeding.') tempVersions = dict() for filename in files: match = self.FILENAME_WITH_VERSION.match(filename) if match: num = int(match.group(1)) tempVersions.setdefault(num, []).append(filename) else: pass # Must be a helper file or something, let's ignore it. # Create the versions member where the keys # are VerNum's and the values are Version's. self.versions = dict() for num, files in tempVersions.items(): self.versions[VerNum(num)] = Version(num, path, files) @property def latest(self): """:returns: Latest version in Collection""" return max([VerNum(0)] + list(self.versions.keys())) def _next_ver_num(self, use_timestamp_numbering): if use_timestamp_numbering == True: return VerNum(int(datetime.utcnow().strftime('%Y%m%d%H%M%S'))) else: return self.latest + 1 def create_new_python_version(self, description, **k): """Create Python files for new version""" ver = self._next_ver_num(k.pop('use_timestamp_numbering', False)) extra = str_to_filename(description) if extra: if extra == '_': extra = '' elif not extra.startswith('_'): extra = '_%s' % extra filename = '%03d%s.py' % (ver, extra) filepath = self._version_path(filename) script.PythonScript.create(filepath, **k) self.versions[ver] = Version(ver, self.path, [filename]) def create_new_sql_version(self, database, description, **k): """Create SQL files for new version""" ver = self._next_ver_num(k.pop('use_timestamp_numbering', False)) self.versions[ver] = Version(ver, self.path, []) extra = str_to_filename(description) if extra: if extra == '_': extra = '' elif not extra.startswith('_'): extra = '_%s' % extra # Create new files. for op in ('upgrade', 'downgrade'): filename = '%03d%s_%s_%s.sql' % (ver, extra, database, op) filepath = self._version_path(filename) script.SqlScript.create(filepath, **k) self.versions[ver].add_script(filepath) def version(self, vernum=None): """Returns latest Version if vernum is not given. Otherwise, returns wanted version""" if vernum is None: vernum = self.latest return self.versions[VerNum(vernum)] @classmethod def clear(cls): super(Collection, cls).clear() def _version_path(self, ver): """Returns path of file in versions repository""" return os.path.join(self.path, str(ver)) class Version(object): """A single version in a collection :param vernum: Version Number :param path: Path to script files :param filelist: List of scripts :type vernum: int, VerNum :type path: string :type filelist: list """ def __init__(self, vernum, path, filelist): self.version = VerNum(vernum) # Collect scripts in this folder self.sql = dict() self.python = None for script in filelist: self.add_script(os.path.join(path, script)) def script(self, database=None, operation=None): """Returns SQL or Python Script""" for db in (database, 'default'): # Try to return a .sql script first try: return self.sql[db][operation] except KeyError: continue # No .sql script exists # TODO: maybe add force Python parameter? ret = self.python assert ret is not None, \ "There is no script for %d version" % self.version return ret def add_script(self, path): """Add script to Collection/Version""" if path.endswith(Extensions.py): self._add_script_py(path) elif path.endswith(Extensions.sql): self._add_script_sql(path) SQL_FILENAME = re.compile(r'^.*\.sql') def _add_script_sql(self, path): basename = os.path.basename(path) match = self.SQL_FILENAME.match(basename) if match: basename = basename.replace('.sql', '') parts = basename.split('_') if len(parts) < 3: raise exceptions.ScriptError( "Invalid SQL script name %s " % basename + \ "(needs to be ###_description_database_operation.sql)") version = parts[0] op = parts[-1] # NOTE(mriedem): check for ibm_db_sa as the database in the name if 'ibm_db_sa' in basename: if len(parts) == 6: dbms = '_'.join(parts[-4: -1]) else: raise exceptions.ScriptError( "Invalid ibm_db_sa SQL script name '%s'; " "(needs to be " "###_description_ibm_db_sa_operation.sql)" % basename) else: dbms = parts[-2] else: raise exceptions.ScriptError( "Invalid SQL script name %s " % basename + \ "(needs to be ###_description_database_operation.sql)") # File the script into a dictionary self.sql.setdefault(dbms, {})[op] = script.SqlScript(path) def _add_script_py(self, path): if self.python is not None: raise exceptions.ScriptError('You can only have one Python script ' 'per version, but you have: %s and %s' % (self.python, path)) self.python = script.PythonScript(path) class Extensions: """A namespace for file extensions""" py = 'py' sql = 'sql' def str_to_filename(s): """Replaces spaces, (double and single) quotes and double underscores to underscores """ s = s.replace(' ', '_').replace('"', '_').replace("'", '_').replace(".", "_") while '__' in s: s = s.replace('__', '_') return s
mit
korbenzhang/vim-ycm-win
python/ycm/tests/syntax_parse_test.py
37
13259
#!/usr/bin/env python # # Copyright (C) 2013 Google Inc. # # This file is part of YouCompleteMe. # # YouCompleteMe is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # YouCompleteMe is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>. import os from nose.tools import eq_ from hamcrest import assert_that, has_items from ycm.test_utils import MockVimModule vim_mock = MockVimModule() from ycm import syntax_parse def ContentsOfTestFile( test_file ): dir_of_script = os.path.dirname( os.path.abspath( __file__ ) ) full_path_to_test_file = os.path.join( dir_of_script, 'testdata', test_file ) return open( full_path_to_test_file ).read() def KeywordsFromSyntaxListOutput_PythonSyntax_test(): eq_( set(['bytearray', 'IndexError', 'all', 'help', 'vars', 'SyntaxError', 'global', 'elif', 'unicode', 'sorted', 'memoryview', 'isinstance', 'except', 'nonlocal', 'NameError', 'finally', 'BytesWarning', 'dict', 'IOError', 'pass', 'oct', 'match', 'bin', 'SystemExit', 'return', 'StandardError', 'format', 'TabError', 'break', 'next', 'not', 'UnicodeDecodeError', 'False', 'RuntimeWarning', 'list', 'iter', 'try', 'reload', 'Warning', 'round', 'dir', 'cmp', 'set', 'bytes', 'UnicodeTranslateError', 'intern', 'issubclass', 'yield', 'Ellipsis', 'hash', 'locals', 'BufferError', 'slice', 'for', 'FloatingPointError', 'sum', 'VMSError', 'getattr', 'abs', 'print', 'import', 'True', 'FutureWarning', 'ImportWarning', 'None', 'EOFError', 'len', 'frozenset', 'ord', 'super', 'raise', 'TypeError', 'KeyboardInterrupt', 'UserWarning', 'filter', 'range', 'staticmethod', 'SystemError', 'or', 'BaseException', 'pow', 'RuntimeError', 'float', 'MemoryError', 'StopIteration', 'globals', 'divmod', 'enumerate', 'apply', 'LookupError', 'open', 'basestring', 'from', 'UnicodeError', 'zip', 'hex', 'long', 'IndentationError', 'int', 'chr', '__import__', 'type', 'Exception', 'continue', 'tuple', 'reduce', 'reversed', 'else', 'assert', 'UnicodeEncodeError', 'input', 'with', 'hasattr', 'delattr', 'setattr', 'raw_input', 'PendingDeprecationWarning', 'compile', 'ArithmeticError', 'while', 'del', 'str', 'property', 'def', 'and', 'GeneratorExit', 'ImportError', 'xrange', 'is', 'EnvironmentError', 'KeyError', 'coerce', 'SyntaxWarning', 'file', 'in', 'unichr', 'ascii', 'any', 'as', 'if', 'OSError', 'DeprecationWarning', 'min', 'UnicodeWarning', 'execfile', 'id', 'complex', 'bool', 'ValueError', 'NotImplemented', 'map', 'exec', 'buffer', 'max', 'class', 'object', 'repr', 'callable', 'ZeroDivisionError', 'eval', '__debug__', 'ReferenceError', 'AssertionError', 'classmethod', 'UnboundLocalError', 'NotImplementedError', 'lambda', 'AttributeError', 'OverflowError', 'WindowsError'] ), syntax_parse._KeywordsFromSyntaxListOutput( ContentsOfTestFile( 'python_syntax' ) ) ) def KeywordsFromSyntaxListOutput_CppSyntax_test(): eq_( set(['int_fast32_t', 'FILE', 'size_t', 'bitor', 'typedef', 'const', 'struct', 'uint8_t', 'fpos_t', 'thread_local', 'unsigned', 'uint_least16_t', 'match', 'do', 'intptr_t', 'uint_least64_t', 'return', 'auto', 'void', '_Complex', 'break', '_Alignof', 'not', 'using', '_Static_assert', '_Thread_local', 'public', 'uint_fast16_t', 'this', 'continue', 'char32_t', 'int16_t', 'intmax_t', 'static', 'clock_t', 'sizeof', 'int_fast64_t', 'mbstate_t', 'try', 'xor', 'uint_fast32_t', 'int_least8_t', 'div_t', 'volatile', 'template', 'char16_t', 'new', 'ldiv_t', 'int_least16_t', 'va_list', 'uint_least8_t', 'goto', 'noreturn', 'enum', 'static_assert', 'bitand', 'compl', 'imaginary', 'jmp_buf', 'throw', 'asm', 'ptrdiff_t', 'uint16_t', 'or', 'uint_fast8_t', '_Bool', 'int32_t', 'float', 'private', 'restrict', 'wint_t', 'operator', 'not_eq', '_Imaginary', 'alignas', 'union', 'long', 'uint_least32_t', 'int_least64_t', 'friend', 'uintptr_t', 'int8_t', 'else', 'export', 'int_fast8_t', 'catch', 'true', 'case', 'default', 'double', '_Noreturn', 'signed', 'typename', 'while', 'protected', 'wchar_t', 'wctrans_t', 'uint64_t', 'delete', 'and', 'register', 'false', 'int', 'uintmax_t', 'off_t', 'char', 'int64_t', 'int_fast16_t', 'DIR', '_Atomic', 'time_t', 'xor_eq', 'namespace', 'virtual', 'complex', 'bool', 'mutable', 'if', 'int_least32_t', 'sig_atomic_t', 'and_eq', 'ssize_t', 'alignof', '_Alignas', '_Generic', 'extern', 'class', 'typeid', 'short', 'for', 'uint_fast64_t', 'wctype_t', 'explicit', 'or_eq', 'switch', 'uint32_t', 'inline']), syntax_parse._KeywordsFromSyntaxListOutput( ContentsOfTestFile( 'cpp_syntax' ) ) ) def KeywordsFromSyntaxListOutput_JavaSyntax_test(): eq_( set(['code', 'text', 'cols', 'datetime', 'disabled', 'shape', 'codetype', 'alt', 'compact', 'style', 'valuetype', 'short', 'finally', 'continue', 'extends', 'valign', 'match', 'bordercolor', 'do', 'return', 'rel', 'rules', 'void', 'nohref', 'abbr', 'background', 'scrolling', 'instanceof', 'name', 'summary', 'try', 'default', 'noshade', 'coords', 'dir', 'frame', 'usemap', 'ismap', 'static', 'hspace', 'vlink', 'for', 'selected', 'rev', 'vspace', 'content', 'method', 'version', 'volatile', 'above', 'new', 'charoff', 'public', 'alink', 'enum', 'codebase', 'if', 'noresize', 'interface', 'checked', 'byte', 'super', 'throw', 'src', 'language', 'package', 'standby', 'script', 'longdesc', 'maxlength', 'cellpadding', 'throws', 'tabindex', 'color', 'colspan', 'accesskey', 'float', 'while', 'private', 'height', 'boolean', 'wrap', 'prompt', 'nowrap', 'size', 'rows', 'span', 'clip', 'bgcolor', 'top', 'long', 'start', 'scope', 'scheme', 'type', 'final', 'lang', 'visibility', 'else', 'assert', 'transient', 'link', 'catch', 'true', 'serializable', 'target', 'lowsrc', 'this', 'double', 'align', 'value', 'cite', 'headers', 'below', 'protected', 'declare', 'classid', 'defer', 'false', 'synchronized', 'int', 'abstract', 'accept', 'hreflang', 'char', 'border', 'id', 'native', 'rowspan', 'charset', 'archive', 'strictfp', 'readonly', 'axis', 'cellspacing', 'profile', 'multiple', 'object', 'action', 'pagex', 'pagey', 'marginheight', 'data', 'class', 'frameborder', 'enctype', 'implements', 'break', 'gutter', 'url', 'clear', 'face', 'switch', 'marginwidth', 'width', 'left']), syntax_parse._KeywordsFromSyntaxListOutput( ContentsOfTestFile( 'java_syntax' ) ) ) def KeywordsFromSyntaxListOutput_PhpSyntax_ContainsFunctions_test(): assert_that( syntax_parse._KeywordsFromSyntaxListOutput( ContentsOfTestFile( 'php_syntax' ) ), has_items( 'array_change_key_case' ) ) def KeywordsFromSyntaxListOutput_Basic_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ foogroup xxx foo bar zoo goo links to Statement""" ) ) def KeywordsFromSyntaxListOutput_Function_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ foogroup xxx foo bar zoo goo links to Function""" ) ) def KeywordsFromSyntaxListOutput_ContainedArgAllowed_test(): assert_that( syntax_parse._KeywordsFromSyntaxListOutput( """ phpFunctions xxx contained gzclose yaz_syntax html_entity_decode fbsql_read_blob png2wbmp mssql_init cpdf_set_title gztell fbsql_insert_id empty cpdf_restore mysql_field_type closelog swftext ldap_search curl_errno gmp_div_r mssql_data_seek getmyinode printer_draw_pie mcve_initconn ncurses_getmaxyx defined contained replace_child has_attributes specified insertdocument assign node_name hwstat addshape get_attribute_node html_dump_mem userlist links to Function""" ), has_items( 'gzclose', 'userlist', 'ldap_search' ) ) def KeywordsFromSyntaxListOutput_JunkIgnored_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ --- Syntax items --- foogroup xxx foo bar zoo goo links to Statement Spell cluster=NONE NoSpell cluster=NONE""" ) ) def KeywordsFromSyntaxListOutput_MultipleStatementGroups_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ foogroup xxx foo bar links to Statement bargroup xxx zoo goo links to Statement""" ) ) def KeywordsFromSyntaxListOutput_StatementAndTypeGroups_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ foogroup xxx foo bar links to Statement bargroup xxx zoo goo links to Type""" ) ) def KeywordsFromSyntaxListOutput_StatementHierarchy_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo', 'qux', 'moo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ baa xxx foo bar links to Foo Foo xxx zoo goo links to Bar Bar xxx qux moo links to Statement""" ) ) def KeywordsFromSyntaxListOutput_TypeHierarchy_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo', 'qux', 'moo' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ baa xxx foo bar links to Foo Foo xxx zoo goo links to Bar Bar xxx qux moo links to Type""" ) ) def KeywordsFromSyntaxListOutput_StatementAndTypeHierarchy_test(): eq_( set([ 'foo', 'bar', 'zoo', 'goo', 'qux', 'moo', 'na', 'nb', 'nc' ]), syntax_parse._KeywordsFromSyntaxListOutput( """ tBaa xxx foo bar links to tFoo tFoo xxx zoo goo links to tBar tBar xxx qux moo links to Type sBaa xxx na bar links to sFoo sFoo xxx zoo nb links to sBar sBar xxx qux nc links to Statement""" ) ) def SyntaxGroupsFromOutput_Basic_test(): groups = syntax_parse._SyntaxGroupsFromOutput( """foogroup xxx foo bar zoo goo links to Statement""" ) assert 'foogroup' in groups def ExtractKeywordsFromGroup_Basic_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo bar', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_Commas_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo, bar,', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_WithLinksTo_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo bar', 'zoo goo', 'links to Statement' ] ) ) ) def ExtractKeywordsFromGroup_KeywordStarts_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo bar', 'transparent boo baa', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_KeywordMiddle_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo oneline bar', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_KeywordAssign_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo end=zoo((^^//)) bar', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_KeywordAssignAndMiddle_test(): eq_( ['foo', 'bar', 'zoo', 'goo' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'foo end=zoo((^^//)) transparent bar', 'zoo goo', ] ) ) ) def ExtractKeywordsFromGroup_ContainedSyntaxArgAllowed_test(): eq_( ['foo', 'zoq', 'bar', 'goo', 'far' ], syntax_parse._ExtractKeywordsFromGroup( syntax_parse.SyntaxGroup('', [ 'contained foo zoq', 'contained bar goo', 'far', ] ) ) )
apache-2.0
jhprinz/openpathsampling
openpathsampling/netcdfplus/stores/indexed.py
4
3288
from .object import ObjectStore import logging logger = logging.getLogger(__name__) init_log = logging.getLogger('openpathsampling.initialization') class IndexedObjectStore(ObjectStore): """ ObjectStore storing objects in arbitrary order This has a prefilled .index which knows at which position a certain index is stored. This way you can circumvent holes and keep the file smaller """ # ========================================================================== # LOAD/SAVE DECORATORS FOR CACHE HANDLING # ========================================================================== def load(self, idx): """ Returns an object from the storage. Parameters ---------- idx : int the integer index of the object to be loaded Returns ------- :py:class:`openpathsampling.netcdfplus.base.StorableObject` the loaded object """ # we want to load by uuid and it was not in cache. if idx in self.index: n_idx = self.index[idx] else: raise KeyError(idx) if n_idx < 0: return None # if it is in the cache, return it try: obj = self.cache[n_idx] return obj except KeyError: pass obj = self._load(n_idx) self.cache[n_idx] = obj return obj # def create_uuid_index(self): # return dict() def save(self, obj, idx=None): """ Saves an object to the storage. Parameters ---------- obj : :py:class:`openpathsampling.netcdfplus.base.StorableObject` the object to be stored idx : int or string or `None` the index to be used for storing. This is highly discouraged since it changes an immutable object (at least in the storage). It is better to store also the new object and just ignore the previously stored one. """ if idx in self.index: # has been saved so quit and do nothing return idx # n_idx = self.free() n_idx = len(self.index) # mark as saved so circular dependencies will not cause infinite loops self.index.append(idx) # make sure in nested saving that an IDX is not used twice! # self.reserve_idx(n_idx) logger.debug('Saving ' + str(type(obj)) + ' using IDX #' + str(n_idx)) try: self._save(obj, n_idx) self.vars['index'][n_idx] = idx # store the name in the cache # if hasattr(self, 'cache'): self.cache[n_idx] = obj except: logger.debug('Problem saving %d !' % n_idx) # in case we did not succeed remove the mark as being saved del self.index[idx] # self.release_idx(n_idx) raise # self.release_idx(n_idx) self._set_id(n_idx, obj) return idx def restore(self): self.index.clear() self.index.extend(self.vars['index'][:]) def initialize(self): super(IndexedObjectStore, self).initialize() self.create_variable( 'index', 'index' )
lgpl-2.1
anthonypdawson/LazyLibrarian
cherrypy/process/win32.py
93
5870
"""Windows service. Requires pywin32.""" import os import win32api import win32con import win32event import win32service import win32serviceutil from cherrypy.process import wspbus, plugins class ConsoleCtrlHandler(plugins.SimplePlugin): """A WSPBus plugin for handling Win32 console events (like Ctrl-C).""" def __init__(self, bus): self.is_set = False plugins.SimplePlugin.__init__(self, bus) def start(self): if self.is_set: self.bus.log('Handler for console events already set.', level=40) return result = win32api.SetConsoleCtrlHandler(self.handle, 1) if result == 0: self.bus.log('Could not SetConsoleCtrlHandler (error %r)' % win32api.GetLastError(), level=40) else: self.bus.log('Set handler for console events.', level=40) self.is_set = True def stop(self): if not self.is_set: self.bus.log('Handler for console events already off.', level=40) return try: result = win32api.SetConsoleCtrlHandler(self.handle, 0) except ValueError: # "ValueError: The object has not been registered" result = 1 if result == 0: self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' % win32api.GetLastError(), level=40) else: self.bus.log('Removed handler for console events.', level=40) self.is_set = False def handle(self, event): """Handle console control events (like Ctrl-C).""" if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT, win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT, win32con.CTRL_CLOSE_EVENT): self.bus.log('Console event %s: shutting down bus' % event) # Remove self immediately so repeated Ctrl-C doesn't re-call it. try: self.stop() except ValueError: pass self.bus.exit() # 'First to return True stops the calls' return 1 return 0 class Win32Bus(wspbus.Bus): """A Web Site Process Bus implementation for Win32. Instead of time.sleep, this bus blocks using native win32event objects. """ def __init__(self): self.events = {} wspbus.Bus.__init__(self) def _get_state_event(self, state): """Return a win32event for the given state (creating it if needed).""" try: return self.events[state] except KeyError: event = win32event.CreateEvent(None, 0, 0, "WSPBus %s Event (pid=%r)" % (state.name, os.getpid())) self.events[state] = event return event def _get_state(self): return self._state def _set_state(self, value): self._state = value event = self._get_state_event(value) win32event.PulseEvent(event) state = property(_get_state, _set_state) def wait(self, state, interval=0.1, channel=None): """Wait for the given state(s), KeyboardInterrupt or SystemExit. Since this class uses native win32event objects, the interval argument is ignored. """ if isinstance(state, (tuple, list)): # Don't wait for an event that beat us to the punch ;) if self.state not in state: events = tuple([self._get_state_event(s) for s in state]) win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE) else: # Don't wait for an event that beat us to the punch ;) if self.state != state: event = self._get_state_event(state) win32event.WaitForSingleObject(event, win32event.INFINITE) class _ControlCodes(dict): """Control codes used to "signal" a service via ControlService. User-defined control codes are in the range 128-255. We generally use the standard Python value for the Linux signal and add 128. Example: >>> signal.SIGUSR1 10 control_codes['graceful'] = 128 + 10 """ def key_for(self, obj): """For the given value, return its corresponding key.""" for key, val in self.items(): if val is obj: return key raise ValueError("The given object could not be found: %r" % obj) control_codes = _ControlCodes({'graceful': 138}) def signal_child(service, command): if command == 'stop': win32serviceutil.StopService(service) elif command == 'restart': win32serviceutil.RestartService(service) else: win32serviceutil.ControlService(service, control_codes[command]) class PyWebService(win32serviceutil.ServiceFramework): """Python Web Service.""" _svc_name_ = "Python Web Service" _svc_display_name_ = "Python Web Service" _svc_deps_ = None # sequence of service names on which this depends _exe_name_ = "pywebsvc" _exe_args_ = None # Default to no arguments # Only exists on Windows 2000 or later, ignored on windows NT _svc_description_ = "Python Web Service" def SvcDoRun(self): from cherrypy import process process.bus.start() process.bus.block() def SvcStop(self): from cherrypy import process self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) process.bus.exit() def SvcOther(self, control): process.bus.publish(control_codes.key_for(control)) if __name__ == '__main__': win32serviceutil.HandleCommandLine(PyWebService)
gpl-3.0
nitinitprof/odoo
addons/mrp_operations/mrp_operations.py
193
27173
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields from openerp.osv import osv import time from datetime import datetime from openerp.tools.translate import _ #---------------------------------------------------------- # Work Centers #---------------------------------------------------------- # capacity_hour : capacity per hour. default: 1.0. # Eg: If 5 concurrent operations at one time: capacity = 5 (because 5 employees) # unit_per_cycle : how many units are produced for one cycle class stock_move(osv.osv): _inherit = 'stock.move' _columns = { 'move_dest_id_lines': fields.one2many('stock.move','move_dest_id', 'Children Moves') } class mrp_production_workcenter_line(osv.osv): def _get_date_end(self, cr, uid, ids, field_name, arg, context=None): """ Finds ending date. @return: Dictionary of values. """ ops = self.browse(cr, uid, ids, context=context) date_and_hours_by_cal = [(op.date_planned, op.hour, op.workcenter_id.calendar_id.id) for op in ops if op.date_planned] intervals = self.pool.get('resource.calendar').interval_get_multi(cr, uid, date_and_hours_by_cal) res = {} for op in ops: res[op.id] = False if op.date_planned: i = intervals.get((op.date_planned, op.hour, op.workcenter_id.calendar_id.id)) if i: res[op.id] = i[-1][1].strftime('%Y-%m-%d %H:%M:%S') else: res[op.id] = op.date_planned return res def onchange_production_id(self, cr, uid, ids, production_id, context=None): if not production_id: return {} production = self.pool.get('mrp.production').browse(cr, uid, production_id, context=None) result = { 'product': production.product_id.id, 'qty': production.product_qty, 'uom': production.product_uom.id, } return {'value': result} _inherit = 'mrp.production.workcenter.line' _order = "sequence, date_planned" _columns = { 'state': fields.selection([('draft','Draft'),('cancel','Cancelled'),('pause','Pending'),('startworking', 'In Progress'),('done','Finished')],'Status', readonly=True, copy=False, help="* When a work order is created it is set in 'Draft' status.\n" \ "* When user sets work order in start mode that time it will be set in 'In Progress' status.\n" \ "* When work order is in running mode, during that time if user wants to stop or to make changes in order then can set in 'Pending' status.\n" \ "* When the user cancels the work order it will be set in 'Canceled' status.\n" \ "* When order is completely processed that time it is set in 'Finished' status."), 'date_planned': fields.datetime('Scheduled Date', select=True), 'date_planned_end': fields.function(_get_date_end, string='End Date', type='datetime'), 'date_start': fields.datetime('Start Date'), 'date_finished': fields.datetime('End Date'), 'delay': fields.float('Working Hours',help="The elapsed time between operation start and stop in this Work Center",readonly=True), 'production_state':fields.related('production_id','state', type='selection', selection=[('draft','Draft'),('confirmed','Waiting Goods'),('ready','Ready to Produce'),('in_production','In Production'),('cancel','Canceled'),('done','Done')], string='Production Status', readonly=True), 'product':fields.related('production_id','product_id',type='many2one',relation='product.product',string='Product', readonly=True), 'qty':fields.related('production_id','product_qty',type='float',string='Qty',readonly=True, store=True), 'uom':fields.related('production_id','product_uom',type='many2one',relation='product.uom',string='Unit of Measure',readonly=True), } _defaults = { 'state': 'draft', 'delay': 0.0, 'production_state': 'draft' } def modify_production_order_state(self, cr, uid, ids, action): """ Modifies production order state if work order state is changed. @param action: Action to perform. @return: Nothing """ prod_obj_pool = self.pool.get('mrp.production') oper_obj = self.browse(cr, uid, ids)[0] prod_obj = oper_obj.production_id if action == 'start': if prod_obj.state =='confirmed': prod_obj_pool.force_production(cr, uid, [prod_obj.id]) prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') elif prod_obj.state =='ready': prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') elif prod_obj.state =='in_production': return else: raise osv.except_osv(_('Error!'),_('Manufacturing order cannot be started in state "%s"!') % (prod_obj.state,)) else: open_count = self.search_count(cr,uid,[('production_id','=',prod_obj.id), ('state', '!=', 'done')]) flag = not bool(open_count) if flag: for production in prod_obj_pool.browse(cr, uid, [prod_obj.id], context= None): if production.move_lines or production.move_created_ids: prod_obj_pool.action_produce(cr,uid, production.id, production.product_qty, 'consume_produce', context = None) prod_obj_pool.signal_workflow(cr, uid, [oper_obj.production_id.id], 'button_produce_done') return def write(self, cr, uid, ids, vals, context=None, update=True): result = super(mrp_production_workcenter_line, self).write(cr, uid, ids, vals, context=context) prod_obj = self.pool.get('mrp.production') if vals.get('date_planned', False) and update: for prod in self.browse(cr, uid, ids, context=context): if prod.production_id.workcenter_lines: dstart = min(vals['date_planned'], prod.production_id.workcenter_lines[0]['date_planned']) prod_obj.write(cr, uid, [prod.production_id.id], {'date_start':dstart}, context=context, mini=False) return result def action_draft(self, cr, uid, ids, context=None): """ Sets state to draft. @return: True """ return self.write(cr, uid, ids, {'state': 'draft'}, context=context) def action_start_working(self, cr, uid, ids, context=None): """ Sets state to start working and writes starting date. @return: True """ self.modify_production_order_state(cr, uid, ids, 'start') self.write(cr, uid, ids, {'state':'startworking', 'date_start': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context) return True def action_done(self, cr, uid, ids, context=None): """ Sets state to done, writes finish date and calculates delay. @return: True """ delay = 0.0 date_now = time.strftime('%Y-%m-%d %H:%M:%S') obj_line = self.browse(cr, uid, ids[0]) date_start = datetime.strptime(obj_line.date_start,'%Y-%m-%d %H:%M:%S') date_finished = datetime.strptime(date_now,'%Y-%m-%d %H:%M:%S') delay += (date_finished-date_start).days * 24 delay += (date_finished-date_start).seconds / float(60*60) self.write(cr, uid, ids, {'state':'done', 'date_finished': date_now,'delay':delay}, context=context) self.modify_production_order_state(cr,uid,ids,'done') return True def action_cancel(self, cr, uid, ids, context=None): """ Sets state to cancel. @return: True """ return self.write(cr, uid, ids, {'state':'cancel'}, context=context) def action_pause(self, cr, uid, ids, context=None): """ Sets state to pause. @return: True """ return self.write(cr, uid, ids, {'state':'pause'}, context=context) def action_resume(self, cr, uid, ids, context=None): """ Sets state to startworking. @return: True """ return self.write(cr, uid, ids, {'state':'startworking'}, context=context) class mrp_production(osv.osv): _inherit = 'mrp.production' _columns = { 'allow_reorder': fields.boolean('Free Serialisation', help="Check this to be able to move independently all production orders, without moving dependent ones."), } def _production_date_end(self, cr, uid, ids, prop, unknow_none, context=None): """ Calculates planned end date of production order. @return: Dictionary of values """ result = {} for prod in self.browse(cr, uid, ids, context=context): result[prod.id] = prod.date_planned for line in prod.workcenter_lines: result[prod.id] = max(line.date_planned_end, result[prod.id]) return result def action_production_end(self, cr, uid, ids, context=None): """ Finishes work order if production order is done. @return: Super method """ obj = self.browse(cr, uid, ids, context=context)[0] workcenter_pool = self.pool.get('mrp.production.workcenter.line') for workcenter_line in obj.workcenter_lines: if workcenter_line.state == 'draft': workcenter_line.signal_workflow('button_start_working') workcenter_line.signal_workflow('button_done') return super(mrp_production,self).action_production_end(cr, uid, ids, context=context) def action_in_production(self, cr, uid, ids, context=None): """ Changes state to In Production and writes starting date. @return: True """ workcenter_pool = self.pool.get('mrp.production.workcenter.line') for prod in self.browse(cr, uid, ids): if prod.workcenter_lines: workcenter_pool.signal_workflow(cr, uid, [prod.workcenter_lines[0].id], 'button_start_working') return super(mrp_production,self).action_in_production(cr, uid, ids, context=context) def action_cancel(self, cr, uid, ids, context=None): """ Cancels work order if production order is canceled. @return: Super method """ workcenter_pool = self.pool.get('mrp.production.workcenter.line') obj = self.browse(cr, uid, ids,context=context)[0] workcenter_pool.signal_workflow(cr, uid, [record.id for record in obj.workcenter_lines], 'button_cancel') return super(mrp_production,self).action_cancel(cr,uid,ids,context=context) def _compute_planned_workcenter(self, cr, uid, ids, context=None, mini=False): """ Computes planned and finished dates for work order. @return: Calculated date """ dt_end = datetime.now() if context is None: context = {} for po in self.browse(cr, uid, ids, context=context): dt_end = datetime.strptime(po.date_planned, '%Y-%m-%d %H:%M:%S') if not po.date_start: self.write(cr, uid, [po.id], { 'date_start': po.date_planned }, context=context, update=False) old = None for wci in range(len(po.workcenter_lines)): wc = po.workcenter_lines[wci] if (old is None) or (wc.sequence>old): dt = dt_end if context.get('__last_update'): del context['__last_update'] if (wc.date_planned < dt.strftime('%Y-%m-%d %H:%M:%S')) or mini: self.pool.get('mrp.production.workcenter.line').write(cr, uid, [wc.id], { 'date_planned': dt.strftime('%Y-%m-%d %H:%M:%S') }, context=context, update=False) i = self.pool.get('resource.calendar').interval_get( cr, uid, #passing False makes resource_resource._schedule_hours run 1000 iterations doing nothing wc.workcenter_id.calendar_id and wc.workcenter_id.calendar_id.id or None, dt, wc.hour or 0.0 ) if i: dt_end = max(dt_end, i[-1][1]) else: dt_end = datetime.strptime(wc.date_planned_end, '%Y-%m-%d %H:%M:%S') old = wc.sequence or 0 super(mrp_production, self).write(cr, uid, [po.id], { 'date_finished': dt_end }) return dt_end def _move_pass(self, cr, uid, ids, context=None): """ Calculates start date for stock moves finding interval from resource calendar. @return: True """ for po in self.browse(cr, uid, ids, context=context): if po.allow_reorder: continue todo = list(po.move_lines) dt = datetime.strptime(po.date_start,'%Y-%m-%d %H:%M:%S') while todo: l = todo.pop(0) if l.state in ('done','cancel','draft'): continue todo += l.move_dest_id_lines date_end = l.production_id.date_finished if date_end and datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S') > dt: if l.production_id.state not in ('done','cancel'): for wc in l.production_id.workcenter_lines: i = self.pool.get('resource.calendar').interval_min_get( cr, uid, wc.workcenter_id.calendar_id.id or False, dt, wc.hour or 0.0 ) dt = i[0][0] if l.production_id.date_start > dt.strftime('%Y-%m-%d %H:%M:%S'): self.write(cr, uid, [l.production_id.id], {'date_start':dt.strftime('%Y-%m-%d %H:%M:%S')}, mini=True) return True def _move_futur(self, cr, uid, ids, context=None): """ Calculates start date for stock moves. @return: True """ for po in self.browse(cr, uid, ids, context=context): if po.allow_reorder: continue for line in po.move_created_ids: l = line while l.move_dest_id: l = l.move_dest_id if l.state in ('done','cancel','draft'): break if l.production_id.state in ('done','cancel'): break if l.production_id and (l.production_id.date_start < po.date_finished): self.write(cr, uid, [l.production_id.id], {'date_start': po.date_finished}) break return True def write(self, cr, uid, ids, vals, context=None, update=True, mini=True): direction = {} if vals.get('date_start', False): for po in self.browse(cr, uid, ids, context=context): direction[po.id] = cmp(po.date_start, vals.get('date_start', False)) result = super(mrp_production, self).write(cr, uid, ids, vals, context=context) if (vals.get('workcenter_lines', False) or vals.get('date_start', False) or vals.get('date_planned', False)) and update: self._compute_planned_workcenter(cr, uid, ids, context=context, mini=mini) for d in direction: if direction[d] == 1: # the production order has been moved to the passed self._move_pass(cr, uid, [d], context=context) pass elif direction[d] == -1: self._move_futur(cr, uid, [d], context=context) # the production order has been moved to the future pass return result def action_compute(self, cr, uid, ids, properties=None, context=None): """ Computes bills of material of a product and planned date of work order. @param properties: List containing dictionaries of properties. @return: No. of products. """ result = super(mrp_production, self).action_compute(cr, uid, ids, properties=properties, context=context) self._compute_planned_workcenter(cr, uid, ids, context=context) return result class mrp_operations_operation_code(osv.osv): _name="mrp_operations.operation.code" _columns={ 'name': fields.char('Operation Name', required=True), 'code': fields.char('Code', size=16, required=True), 'start_stop': fields.selection([('start','Start'),('pause','Pause'),('resume','Resume'),('cancel','Cancelled'),('done','Done')], 'Status', required=True), } class mrp_operations_operation(osv.osv): _name="mrp_operations.operation" def _order_date_search_production(self, cr, uid, ids, context=None): """ Finds operations for a production order. @return: List of ids """ operation_ids = self.pool.get('mrp_operations.operation').search(cr, uid, [('production_id','=',ids[0])], context=context) return operation_ids def _get_order_date(self, cr, uid, ids, field_name, arg, context=None): """ Calculates planned date for an operation. @return: Dictionary of values """ res={} operation_obj = self.browse(cr, uid, ids, context=context) for operation in operation_obj: res[operation.id] = operation.production_id.date_planned return res def calc_delay(self, cr, uid, vals): """ Calculates delay of work order. @return: Delay """ code_lst = [] time_lst = [] code_ids = self.pool.get('mrp_operations.operation.code').search(cr, uid, [('id','=',vals['code_id'])]) code = self.pool.get('mrp_operations.operation.code').browse(cr, uid, code_ids)[0] oper_ids = self.search(cr,uid,[('production_id','=',vals['production_id']),('workcenter_id','=',vals['workcenter_id'])]) oper_objs = self.browse(cr,uid,oper_ids) for oper in oper_objs: code_lst.append(oper.code_id.start_stop) time_lst.append(oper.date_start) code_lst.append(code.start_stop) time_lst.append(vals['date_start']) diff = 0 for i in range(0,len(code_lst)): if code_lst[i] == 'pause' or code_lst[i] == 'done' or code_lst[i] == 'cancel': if not i: continue if code_lst[i-1] not in ('resume','start'): continue a = datetime.strptime(time_lst[i-1],'%Y-%m-%d %H:%M:%S') b = datetime.strptime(time_lst[i],'%Y-%m-%d %H:%M:%S') diff += (b-a).days * 24 diff += (b-a).seconds / float(60*60) return diff def check_operation(self, cr, uid, vals): """ Finds which operation is called ie. start, pause, done, cancel. @param vals: Dictionary of values. @return: True or False """ code_ids=self.pool.get('mrp_operations.operation.code').search(cr,uid,[('id','=',vals['code_id'])]) code=self.pool.get('mrp_operations.operation.code').browse(cr,uid,code_ids)[0] code_lst = [] oper_ids=self.search(cr,uid,[('production_id','=',vals['production_id']),('workcenter_id','=',vals['workcenter_id'])]) oper_objs=self.browse(cr,uid,oper_ids) if not oper_objs: if code.start_stop!='start': raise osv.except_osv(_('Sorry!'),_('Operation is not started yet!')) return False else: for oper in oper_objs: code_lst.append(oper.code_id.start_stop) if code.start_stop=='start': if 'start' in code_lst: raise osv.except_osv(_('Sorry!'),_('Operation has already started! You can either Pause/Finish/Cancel the operation.')) return False if code.start_stop=='pause': if code_lst[len(code_lst)-1]!='resume' and code_lst[len(code_lst)-1]!='start': raise osv.except_osv(_('Error!'),_('In order to Pause the operation, it must be in the Start or Resume state!')) return False if code.start_stop=='resume': if code_lst[len(code_lst)-1]!='pause': raise osv.except_osv(_('Error!'),_('In order to Resume the operation, it must be in the Pause state!')) return False if code.start_stop=='done': if code_lst[len(code_lst)-1]!='start' and code_lst[len(code_lst)-1]!='resume': raise osv.except_osv(_('Sorry!'),_('In order to Finish the operation, it must be in the Start or Resume state!')) return False if 'cancel' in code_lst: raise osv.except_osv(_('Sorry!'),_('Operation is Already Cancelled!')) return False if code.start_stop=='cancel': if not 'start' in code_lst : raise osv.except_osv(_('Error!'),_('No operation to cancel.')) return False if 'done' in code_lst: raise osv.except_osv(_('Error!'),_('Operation is already finished!')) return False return True def write(self, cr, uid, ids, vals, context=None): oper_objs = self.browse(cr, uid, ids, context=context)[0] vals['production_id']=oper_objs.production_id.id vals['workcenter_id']=oper_objs.workcenter_id.id if 'code_id' in vals: self.check_operation(cr, uid, vals) if 'date_start' in vals: vals['date_start']=vals['date_start'] vals['code_id']=oper_objs.code_id.id delay=self.calc_delay(cr, uid, vals) wc_op_id=self.pool.get('mrp.production.workcenter.line').search(cr,uid,[('workcenter_id','=',vals['workcenter_id']),('production_id','=',vals['production_id'])]) self.pool.get('mrp.production.workcenter.line').write(cr,uid,wc_op_id,{'delay':delay}) return super(mrp_operations_operation, self).write(cr, uid, ids, vals, context=context) def create(self, cr, uid, vals, context=None): workcenter_pool = self.pool.get('mrp.production.workcenter.line') code_ids=self.pool.get('mrp_operations.operation.code').search(cr,uid,[('id','=',vals['code_id'])]) code=self.pool.get('mrp_operations.operation.code').browse(cr, uid, code_ids, context=context)[0] wc_op_id=workcenter_pool.search(cr,uid,[('workcenter_id','=',vals['workcenter_id']),('production_id','=',vals['production_id'])]) if code.start_stop in ('start','done','pause','cancel','resume'): if not wc_op_id: production_obj=self.pool.get('mrp.production').browse(cr, uid, vals['production_id'], context=context) wc_op_id.append(workcenter_pool.create(cr,uid,{'production_id':vals['production_id'],'name':production_obj.product_id.name,'workcenter_id':vals['workcenter_id']})) if code.start_stop=='start': workcenter_pool.action_start_working(cr,uid,wc_op_id) workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_start_working') if code.start_stop=='done': workcenter_pool.action_done(cr,uid,wc_op_id) workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_done') self.pool.get('mrp.production').write(cr,uid,vals['production_id'],{'date_finished':datetime.now().strftime('%Y-%m-%d %H:%M:%S')}) if code.start_stop=='pause': workcenter_pool.action_pause(cr,uid,wc_op_id) workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_pause') if code.start_stop=='resume': workcenter_pool.action_resume(cr,uid,wc_op_id) workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_resume') if code.start_stop=='cancel': workcenter_pool.action_cancel(cr,uid,wc_op_id) workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_cancel') if not self.check_operation(cr, uid, vals): return delay=self.calc_delay(cr, uid, vals) line_vals = {} line_vals['delay'] = delay if vals.get('date_start',False): if code.start_stop == 'done': line_vals['date_finished'] = vals['date_start'] elif code.start_stop == 'start': line_vals['date_start'] = vals['date_start'] self.pool.get('mrp.production.workcenter.line').write(cr, uid, wc_op_id, line_vals, context=context) return super(mrp_operations_operation, self).create(cr, uid, vals, context=context) def initialize_workflow_instance(self, cr, uid, context=None): mrp_production_workcenter_line = self.pool.get('mrp.production.workcenter.line') line_ids = mrp_production_workcenter_line.search(cr, uid, [], context=context) mrp_production_workcenter_line.create_workflow(cr, uid, line_ids) return True _columns={ 'production_id':fields.many2one('mrp.production','Production',required=True), 'workcenter_id':fields.many2one('mrp.workcenter','Work Center',required=True), 'code_id':fields.many2one('mrp_operations.operation.code','Code',required=True), 'date_start': fields.datetime('Start Date'), 'date_finished': fields.datetime('End Date'), 'order_date': fields.function(_get_order_date,string='Order Date',type='date',store={'mrp.production':(_order_date_search_production,['date_planned'], 10)}), } _defaults={ 'date_start': lambda *a:datetime.now().strftime('%Y-%m-%d %H:%M:%S') } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
captnjohnny1618/Pichat
client.py
1
2913
import sys import socket import select import cPickle as pickle import lib_pichat as lp def help_string(): f=open('help_message.txt','r') help_message=f.read(); return help_message class client(): #SERVER_IP='192.168.2.5' #SERVER_IP='127.0.0.1' SERVER_IP='aurora.rip.ucla.edu' PORT=8000 BUFF_SIZE=4096 USER=None def __init__(self): ### Connect to server and set up user print('') print('----------------------') print('Pichat Client Started!') print('----------------------') print('') sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM); try: sock.connect((self.SERVER_IP,self.PORT)) except: self.PORT+=1; sock.connect((self.SERVER_IP,self.PORT)) #print("Could not connect") #sys.exit() print("Connected to server %s on port %i" % (self.SERVER_IP, self.PORT)); username=raw_input('Please enter a username: ') print("Exchanging info with the Pichat server...") self.USER=lp.user(username,[],[]); sock.send(pickle.dumps(self.USER)); print("Welcome %s!" % self.USER.name) print(help_string()) sys.stdout.write('[ME] ') sys.stdout.flush() ### Main loop to handle sending and receiving data while True: socket_list=[sys.stdin, sock] (readable,writable,[])=select.select(socket_list,[],[],0) for s in readable: if s==sock: ## Receiving data from server data=sock.recv(self.BUFF_SIZE) m=pickle.loads(data); # load into message object if data=='': print('Disconnected from server') sys.exit() else: if m.signal==100: print('\n\n') print('------------------------') print('Thanks for using Pichat!') print('------------------------') print('\n\n') sys.exit() sys.stdout.write('\b\b\b\b\b'); sys.stdout.write(str(m)) sys.stdout.write('[ME] ') sys.stdout.flush() else: ## User is writing data msg=sys.stdin.readline() msg=lp.message(msg,self.USER.name) sock.send(pickle.dumps(msg)); sys.stdout.write('[ME] ') sys.stdout.flush() if __name__=='__main__': client()
gpl-2.0
endlessm/chromium-browser
third_party/mako/mako/cache.py
8
7736
# mako/cache.py # Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file> # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from mako import compat from mako import util _cache_plugins = util.PluginLoader("mako.cache") register_plugin = _cache_plugins.register register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl") class Cache(object): """Represents a data content cache made available to the module space of a specific :class:`.Template` object. .. versionadded:: 0.6 :class:`.Cache` by itself is mostly a container for a :class:`.CacheImpl` object, which implements a fixed API to provide caching services; specific subclasses exist to implement different caching strategies. Mako includes a backend that works with the Beaker caching system. Beaker itself then supports a number of backends (i.e. file, memory, memcached, etc.) The construction of a :class:`.Cache` is part of the mechanics of a :class:`.Template`, and programmatic access to this cache is typically via the :attr:`.Template.cache` attribute. """ impl = None """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`. This accessor allows a :class:`.CacheImpl` with additional methods beyond that of :class:`.Cache` to be used programmatically. """ id = None """Return the 'id' that identifies this cache. This is a value that should be globally unique to the :class:`.Template` associated with this cache, and can be used by a caching system to name a local container for data specific to this template. """ starttime = None """Epochal time value for when the owning :class:`.Template` was first compiled. A cache implementation may wish to invalidate data earlier than this timestamp; this has the effect of the cache for a specific :class:`.Template` starting clean any time the :class:`.Template` is recompiled, such as when the original template file changed on the filesystem. """ def __init__(self, template, *args): # check for a stale template calling the # constructor if isinstance(template, compat.string_types) and args: return self.template = template self.id = template.module.__name__ self.starttime = template.module._modified_time self._def_regions = {} self.impl = self._load_impl(self.template.cache_impl) def _load_impl(self, name): return _cache_plugins.load(name)(self) def get_or_create(self, key, creation_function, **kw): """Retrieve a value from the cache, using the given creation function to generate a new value.""" return self._ctx_get_or_create(key, creation_function, None, **kw) def _ctx_get_or_create(self, key, creation_function, context, **kw): """Retrieve a value from the cache, using the given creation function to generate a new value.""" if not self.template.cache_enabled: return creation_function() return self.impl.get_or_create( key, creation_function, **self._get_cache_kw(kw, context) ) def set(self, key, value, **kw): r"""Place a value in the cache. :param key: the value's key. :param value: the value. :param \**kw: cache configuration arguments. """ self.impl.set(key, value, **self._get_cache_kw(kw, None)) put = set """A synonym for :meth:`.Cache.set`. This is here for backwards compatibility. """ def get(self, key, **kw): r"""Retrieve a value from the cache. :param key: the value's key. :param \**kw: cache configuration arguments. The backend is configured using these arguments upon first request. Subsequent requests that use the same series of configuration values will use that same backend. """ return self.impl.get(key, **self._get_cache_kw(kw, None)) def invalidate(self, key, **kw): r"""Invalidate a value in the cache. :param key: the value's key. :param \**kw: cache configuration arguments. The backend is configured using these arguments upon first request. Subsequent requests that use the same series of configuration values will use that same backend. """ self.impl.invalidate(key, **self._get_cache_kw(kw, None)) def invalidate_body(self): """Invalidate the cached content of the "body" method for this template. """ self.invalidate("render_body", __M_defname="render_body") def invalidate_def(self, name): """Invalidate the cached content of a particular ``<%def>`` within this template. """ self.invalidate("render_%s" % name, __M_defname="render_%s" % name) def invalidate_closure(self, name): """Invalidate a nested ``<%def>`` within this template. Caching of nested defs is a blunt tool as there is no management of scope -- nested defs that use cache tags need to have names unique of all other nested defs in the template, else their content will be overwritten by each other. """ self.invalidate(name, __M_defname=name) def _get_cache_kw(self, kw, context): defname = kw.pop("__M_defname", None) if not defname: tmpl_kw = self.template.cache_args.copy() tmpl_kw.update(kw) elif defname in self._def_regions: tmpl_kw = self._def_regions[defname] else: tmpl_kw = self.template.cache_args.copy() tmpl_kw.update(kw) self._def_regions[defname] = tmpl_kw if context and self.impl.pass_context: tmpl_kw = tmpl_kw.copy() tmpl_kw.setdefault("context", context) return tmpl_kw class CacheImpl(object): """Provide a cache implementation for use by :class:`.Cache`.""" def __init__(self, cache): self.cache = cache pass_context = False """If ``True``, the :class:`.Context` will be passed to :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``. """ def get_or_create(self, key, creation_function, **kw): r"""Retrieve a value from the cache, using the given creation function to generate a new value. This function *must* return a value, either from the cache, or via the given creation function. If the creation function is called, the newly created value should be populated into the cache under the given key before being returned. :param key: the value's key. :param creation_function: function that when called generates a new value. :param \**kw: cache configuration arguments. """ raise NotImplementedError() def set(self, key, value, **kw): r"""Place a value in the cache. :param key: the value's key. :param value: the value. :param \**kw: cache configuration arguments. """ raise NotImplementedError() def get(self, key, **kw): r"""Retrieve a value from the cache. :param key: the value's key. :param \**kw: cache configuration arguments. """ raise NotImplementedError() def invalidate(self, key, **kw): r"""Invalidate a value in the cache. :param key: the value's key. :param \**kw: cache configuration arguments. """ raise NotImplementedError()
bsd-3-clause
sunils34/buffer-django-nonrel
django/contrib/gis/gdal/prototypes/srs.py
321
3378
from ctypes import c_char_p, c_int, c_void_p, POINTER from django.contrib.gis.gdal.libgdal import lgdal, std_call from django.contrib.gis.gdal.prototypes.generation import \ const_string_output, double_output, int_output, \ srs_output, string_output, void_output ## Shortcut generation for routines with known parameters. def srs_double(f): """ Creates a function prototype for the OSR routines that take the OSRSpatialReference object and """ return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True) def units_func(f): """ Creates a ctypes function prototype for OSR units functions, e.g., OSRGetAngularUnits, OSRGetLinearUnits. """ return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True) # Creation & destruction. clone_srs = srs_output(std_call('OSRClone'), [c_void_p]) new_srs = srs_output(std_call('OSRNewSpatialReference'), [c_char_p]) release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False) destroy_srs = void_output(std_call('OSRDestroySpatialReference'), [c_void_p], errcheck=False) srs_validate = void_output(lgdal.OSRValidate, [c_void_p]) # Getting the semi_major, semi_minor, and flattening functions. semi_major = srs_double(lgdal.OSRGetSemiMajor) semi_minor = srs_double(lgdal.OSRGetSemiMinor) invflattening = srs_double(lgdal.OSRGetInvFlattening) # WKT, PROJ, EPSG, XML importation routines. from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)]) from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p]) from_epsg = void_output(std_call('OSRImportFromEPSG'), [c_void_p, c_int]) from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p]) from_user_input = void_output(std_call('OSRSetFromUserInput'), [c_void_p, c_char_p]) # Morphing to/from ESRI WKT. morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p]) morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p]) # Identifying the EPSG identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p]) # Getting the angular_units, linear_units functions linear_units = units_func(lgdal.OSRGetLinearUnits) angular_units = units_func(lgdal.OSRGetAngularUnits) # For exporting to WKT, PROJ.4, "Pretty" WKT, and XML. to_wkt = string_output(std_call('OSRExportToWkt'), [c_void_p, POINTER(c_char_p)]) to_proj = string_output(std_call('OSRExportToProj4'), [c_void_p, POINTER(c_char_p)]) to_pretty_wkt = string_output(std_call('OSRExportToPrettyWkt'), [c_void_p, POINTER(c_char_p), c_int], offset=-2) # Memory leak fixed in GDAL 1.5; still exists in 1.4. to_xml = string_output(lgdal.OSRExportToXML, [c_void_p, POINTER(c_char_p), c_char_p], offset=-2) # String attribute retrival routines. get_attr_value = const_string_output(std_call('OSRGetAttrValue'), [c_void_p, c_char_p, c_int]) get_auth_name = const_string_output(lgdal.OSRGetAuthorityName, [c_void_p, c_char_p]) get_auth_code = const_string_output(lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p]) # SRS Properties isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p]) islocal = int_output(lgdal.OSRIsLocal, [c_void_p]) isprojected = int_output(lgdal.OSRIsProjected, [c_void_p]) # Coordinate transformation new_ct= srs_output(std_call('OCTNewCoordinateTransformation'), [c_void_p, c_void_p]) destroy_ct = void_output(std_call('OCTDestroyCoordinateTransformation'), [c_void_p], errcheck=False)
bsd-3-clause
kernevil/samba
python/samba/tests/getdcname.py
2
18962
# Unix SMB/CIFS implementation. # Copyright (C) Andrew Bartlett <abartlet@samba.org> 2018 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ Tests GetDCNameEx calls in NETLOGON """ from samba import auth from samba import WERRORError, werror import samba.tests import os from samba.credentials import Credentials from samba.dcerpc import netlogon from samba.dcerpc.misc import GUID class GetDCNameEx(samba.tests.TestCase): def setUp(self): self.lp = samba.tests.env_loadparm() self.creds = Credentials() self.netlogon_conn = None self.server = os.environ.get('SERVER') self.realm = os.environ.get('REALM') self.domain = os.environ.get('DOMAIN') self.trust_realm = os.environ.get('TRUST_REALM') self.trust_domain = os.environ.get('TRUST_DOMAIN') def _call_get_dc_name(self, domain=None, domain_guid=None, site_name=None, ex2=False, flags=0): if self.netlogon_conn is None: self.netlogon_conn = netlogon.netlogon("ncalrpc:[schannel]", self.get_loadparm()) if ex2: return self.netlogon_conn.netr_DsRGetDCNameEx2(self.server, None, 0, domain, domain_guid, site_name, flags) else: return self.netlogon_conn.netr_DsRGetDCNameEx(self.server, domain, domain_guid, site_name, flags) def test_get_dc_ex2(self): """Check the most trivial requirements of Ex2 (no domain or site) a) The paths are prefixed with two backslashes b) The returned domains conform to the format requested c) The domain matches our own domain """ response = self._call_get_dc_name(ex2=True) self.assertTrue(response.dc_unc is not None) self.assertTrue(response.dc_unc.startswith('\\\\')) self.assertTrue(response.dc_address is not None) self.assertTrue(response.dc_address.startswith('\\\\')) self.assertTrue(response.domain_name.lower() == self.realm.lower() or response.domain_name.lower() == self.domain.lower()) response = self._call_get_dc_name(ex2=True, flags=netlogon.DS_RETURN_DNS_NAME) self.assertEqual(response.domain_name.lower(), self.realm.lower()) response = self._call_get_dc_name(ex2=True, flags=netlogon.DS_RETURN_FLAT_NAME) self.assertEqual(response.domain_name.lower(), self.domain.lower()) def test_get_dc_over_winbind_ex2(self): """Check what happens to Ex2 requests after being forwarded to winbind a) The paths must still have the same backslash prefixes b) The returned domain does not match our own domain c) The domain matches the format requested """ if self.trust_realm is None: return response_trust = self._call_get_dc_name(domain=self.trust_realm, ex2=True) response = self._call_get_dc_name(domain=self.realm, ex2=True) self.assertTrue(response_trust.dc_unc is not None) self.assertTrue(response_trust.dc_unc.startswith('\\\\')) self.assertTrue(response_trust.dc_address is not None) self.assertTrue(response_trust.dc_address.startswith('\\\\')) self.assertNotEqual(response_trust.dc_unc, response.dc_unc) self.assertNotEqual(response_trust.dc_address, response.dc_address) self.assertTrue(response_trust.domain_name.lower() == self.trust_realm.lower() or response_trust.domain_name.lower() == self.trust_domain.lower()) response_trust = self._call_get_dc_name(domain=self.trust_realm, flags=netlogon.DS_RETURN_DNS_NAME, ex2=True) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) response_trust = self._call_get_dc_name(domain=self.trust_realm, flags=netlogon.DS_RETURN_FLAT_NAME, ex2=True) self.assertEqual(response_trust.domain_name.lower(), self.trust_domain.lower()) def test_get_dc_over_winbind(self): """Test the standard Ex version (not Ex2) Ex calls Ex2 anyways, from now on, just test Ex. """ if self.trust_realm is None: return response_trust = self._call_get_dc_name(domain=self.trust_realm, flags=netlogon.DS_RETURN_DNS_NAME) self.assertTrue(response_trust.dc_unc is not None) self.assertTrue(response_trust.dc_unc.startswith('\\\\')) self.assertTrue(response_trust.dc_address is not None) self.assertTrue(response_trust.dc_address.startswith('\\\\')) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) def test_get_dc_over_winbind_with_site(self): """Test the standard Ex version (not Ex2) We assume that there is a Default-First-Site-Name site. """ if self.trust_realm is None: return site = 'Default-First-Site-Name' response_trust = self._call_get_dc_name(domain=self.trust_realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME) self.assertTrue(response_trust.dc_unc is not None) self.assertTrue(response_trust.dc_unc.startswith('\\\\')) self.assertTrue(response_trust.dc_address is not None) self.assertTrue(response_trust.dc_address.startswith('\\\\')) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) self.assertEqual(site.lower(), response_trust.dc_site_name.lower()) def test_get_dc_over_winbind_invalid_site(self): """Test the standard Ex version (not Ex2) We assume that there is no Invalid-First-Site-Name site. """ if self.trust_realm is None: return site = 'Invalid-First-Site-Name' try: response_trust = self._call_get_dc_name(domain=self.trust_realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) self.fail("Failed to give the correct error for incorrect site") except WERRORError as e: enum, estr = e.args if enum != werror.WERR_NO_SUCH_DOMAIN: self.fail("Failed to detect an invalid site name") def test_get_dc_over_winbind_invalid_site_ex2(self): """Test the Ex2 version. We assume that there is no Invalid-First-Site-Name site. """ if self.trust_realm is None: return site = 'Invalid-First-Site-Name' try: response_trust = self._call_get_dc_name(domain=self.trust_realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=True) self.fail("Failed to give the correct error for incorrect site") except WERRORError as e: enum, estr = e.args if enum != werror.WERR_NO_SUCH_DOMAIN: self.fail("Failed to detect an invalid site name") def test_get_dc_over_winbind_empty_string_site(self): """Test the standard Ex version (not Ex2) We assume that there is a Default-First-Site-Name site. """ if self.trust_realm is None: return site = '' try: response_trust = self._call_get_dc_name(domain=self.trust_realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME) except WERRORError as e: self.fail("Unable to get empty string site result: " + str(e)) self.assertTrue(response_trust.dc_unc is not None) self.assertTrue(response_trust.dc_unc.startswith('\\\\')) self.assertTrue(response_trust.dc_address is not None) self.assertTrue(response_trust.dc_address.startswith('\\\\')) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) self.assertTrue(response_trust.dc_site_name is not None) self.assertNotEqual('', response_trust.dc_site_name) def test_get_dc_over_winbind_netbios(self): """Supply a NETBIOS trust domain name.""" if self.trust_realm is None: return try: response_trust = self._call_get_dc_name(domain=self.trust_domain, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) except WERRORError as e: self.fail("Failed to succeed over winbind: " + str(e)) self.assertTrue(response_trust is not None) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) def test_get_dc_over_winbind_with_site_netbios(self): """Supply a NETBIOS trust domain name. Sporadically fails because NETBIOS queries do not return site name in winbind. The site check in NETLOGON will trigger and fail the request. Currently marked in flapping... """ if self.trust_realm is None: return site = 'Default-First-Site-Name' try: response_trust = self._call_get_dc_name(domain=self.trust_domain, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) except WERRORError as e: self.fail("get_dc_name (domain=%s,site=%s) over winbind failed: %s" % (self.trust_domain, site, e)) self.assertTrue(response_trust is not None) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) self.assertEqual(site.lower(), response_trust.dc_site_name.lower()) def test_get_dc_over_winbind_domain_guid(self): """Ensure that we do not reject requests supplied with a NULL GUID""" if self.trust_realm is None: return null_guid = GUID() try: response_trust = self._call_get_dc_name(domain=self.trust_realm, domain_guid=null_guid, flags=netlogon.DS_RETURN_DNS_NAME) except WERRORError as e: self.fail("Unable to get NULL domain GUID result: " + str(e)) self.assertTrue(response_trust.dc_unc is not None) self.assertTrue(response_trust.dc_unc.startswith('\\\\')) self.assertTrue(response_trust.dc_address is not None) self.assertTrue(response_trust.dc_address.startswith('\\\\')) self.assertEqual(response_trust.domain_name.lower(), self.trust_realm.lower()) def test_get_dc_with_site(self): """Test the standard Ex version (not Ex2) We assume that there is a Default-First-Site-Name site. """ site = 'Default-First-Site-Name' response = self._call_get_dc_name(domain=self.realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME) self.assertTrue(response.dc_unc is not None) self.assertTrue(response.dc_unc.startswith('\\\\')) self.assertTrue(response.dc_address is not None) self.assertTrue(response.dc_address.startswith('\\\\')) self.assertEqual(response.domain_name.lower(), self.realm.lower()) self.assertEqual(site.lower(), response.dc_site_name.lower()) def test_get_dc_invalid_site(self): """Test the standard Ex version (not Ex2) We assume that there is no Invalid-First-Site-Name site. """ if self.realm is None: return site = 'Invalid-First-Site-Name' try: response = self._call_get_dc_name(domain=self.realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) self.fail("Failed to give the correct error for incorrect site") except WERRORError as e: enum, estr = e.args if enum != werror.WERR_NO_SUCH_DOMAIN: self.fail("Failed to detect an invalid site name") def test_get_dc_invalid_site_ex2(self): """Test the Ex2 version We assume that there is no Invalid-First-Site-Name site. """ site = 'Invalid-First-Site-Name' try: response = self._call_get_dc_name(domain=self.realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=True) self.fail("Failed to give the correct error for incorrect site") except WERRORError as e: enum, estr = e.args if enum != werror.WERR_NO_SUCH_DOMAIN: self.fail("Failed to detect an invalid site name") def test_get_dc_empty_string_site(self): """Test the standard Ex version (not Ex2) We assume that there is a Default-First-Site-Name site. """ site = '' try: response = self._call_get_dc_name(domain=self.realm, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME) except WERRORError as e: self.fail("Unable to get empty string site result: " + str(e)) self.assertTrue(response.dc_unc is not None) self.assertTrue(response.dc_unc.startswith('\\\\')) self.assertTrue(response.dc_address is not None) self.assertTrue(response.dc_address.startswith('\\\\')) self.assertEqual(response.domain_name.lower(), self.realm.lower()) self.assertTrue(response.dc_site_name is not None) self.assertNotEqual('', response.dc_site_name) def test_get_dc_netbios(self): """Supply a NETBIOS domain name.""" try: response = self._call_get_dc_name(domain=self.domain, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) except WERRORError as e: self.fail("Failed to succeed over winbind: " + str(e)) self.assertTrue(response is not None) self.assertEqual(response.domain_name.lower(), self.realm.lower()) def test_get_dc_with_site_netbios(self): """Supply a NETBIOS domain name.""" site = 'Default-First-Site-Name' try: response = self._call_get_dc_name(domain=self.domain, site_name=site, flags=netlogon.DS_RETURN_DNS_NAME, ex2=False) except WERRORError as e: self.fail("Failed to succeed over winbind: " + str(e)) self.assertTrue(response is not None) self.assertEqual(response.domain_name.lower(), self.realm.lower()) self.assertEqual(site.lower(), response.dc_site_name.lower()) def test_get_dc_with_domain_guid(self): """Ensure that we do not reject requests supplied with a NULL GUID""" null_guid = GUID() response = self._call_get_dc_name(domain=self.realm, domain_guid=null_guid, flags=netlogon.DS_RETURN_DNS_NAME) self.assertTrue(response.dc_unc is not None) self.assertTrue(response.dc_unc.startswith('\\\\')) self.assertTrue(response.dc_address is not None) self.assertTrue(response.dc_address.startswith('\\\\')) self.assertEqual(response.domain_name.lower(), self.realm.lower()) def test_get_dc_with_empty_string_domain(self): """Ensure that empty domain resolve to the DC domain""" response = self._call_get_dc_name(domain='', flags=netlogon.DS_RETURN_DNS_NAME) self.assertTrue(response.dc_unc is not None) self.assertTrue(response.dc_unc.startswith('\\\\')) self.assertTrue(response.dc_address is not None) self.assertTrue(response.dc_address.startswith('\\\\')) self.assertEqual(response.domain_name.lower(), self.realm.lower()) # TODO Thorough tests of domain GUID # # The domain GUID does not seem to be authoritative, and seems to be a # fallback case for renamed domains.
gpl-3.0
nhicher/ansible
lib/ansible/modules/web_infrastructure/ansible_tower/tower_project.py
27
5886
#!/usr/bin/python # coding: utf-8 -*- # (c) 2017, Wayne Witzel III <wayne@riotousliving.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: tower_project author: "Wayne Witzel III (@wwitzel3)" version_added: "2.3" short_description: create, update, or destroy Ansible Tower projects description: - Create, update, or destroy Ansible Tower projects. See U(https://www.ansible.com/tower) for an overview. options: name: description: - Name to use for the project. required: True description: description: - Description to use for the project. scm_type: description: - Type of SCM resource. choices: ["manual", "git", "hg", "svn"] default: "manual" scm_url: description: - URL of SCM resource. local_path: description: - The server playbook directory for manual projects. scm_branch: description: - The branch to use for the SCM resource. scm_credential: description: - Name of the credential to use with this SCM resource. scm_clean: description: - Remove local modifications before updating. type: bool default: 'no' scm_delete_on_update: description: - Remove the repository completely before updating. type: bool default: 'no' scm_update_on_launch: description: - Before an update to the local repository before launching a job with this project. type: bool default: 'no' organization: description: - Primary key of organization for project. state: description: - Desired state of the resource. default: "present" choices: ["present", "absent"] extends_documentation_fragment: tower ''' EXAMPLES = ''' - name: Add tower project tower_project: name: "Foo" description: "Foo bar project" organization: "test" state: present tower_config_file: "~/tower_cli.cfg" ''' from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode try: import tower_cli import tower_cli.utils.exceptions as exc from tower_cli.conf import settings except ImportError: pass def main(): argument_spec = dict( name=dict(), description=dict(), organization=dict(), scm_type=dict(choices=['manual', 'git', 'hg', 'svn'], default='manual'), scm_url=dict(), scm_branch=dict(), scm_credential=dict(), scm_clean=dict(type='bool', default=False), scm_delete_on_update=dict(type='bool', default=False), scm_update_on_launch=dict(type='bool', default=False), local_path=dict(), state=dict(choices=['present', 'absent'], default='present'), ) module = TowerModule(argument_spec=argument_spec, supports_check_mode=True) name = module.params.get('name') description = module.params.get('description') organization = module.params.get('organization') scm_type = module.params.get('scm_type') if scm_type == "manual": scm_type = "" scm_url = module.params.get('scm_url') local_path = module.params.get('local_path') scm_branch = module.params.get('scm_branch') scm_credential = module.params.get('scm_credential') scm_clean = module.params.get('scm_clean') scm_delete_on_update = module.params.get('scm_delete_on_update') scm_update_on_launch = module.params.get('scm_update_on_launch') state = module.params.get('state') json_output = {'project': name, 'state': state} tower_auth = tower_auth_config(module) with settings.runtime_values(**tower_auth): tower_check_mode(module) project = tower_cli.get_resource('project') try: if state == 'present': try: org_res = tower_cli.get_resource('organization') org = org_res.get(name=organization) except (exc.NotFound) as excinfo: module.fail_json(msg='Failed to update project, organization not found: {0}'.format(organization), changed=False) if scm_credential: try: cred_res = tower_cli.get_resource('credential') cred = cred_res.get(name=scm_credential) scm_credential = cred['id'] except (exc.NotFound) as excinfo: module.fail_json(msg='Failed to update project, credential not found: {0}'.format(scm_credential), changed=False) result = project.modify(name=name, description=description, organization=org['id'], scm_type=scm_type, scm_url=scm_url, local_path=local_path, scm_branch=scm_branch, scm_clean=scm_clean, credential=scm_credential, scm_delete_on_update=scm_delete_on_update, scm_update_on_launch=scm_update_on_launch, create_on_missing=True) json_output['id'] = result['id'] elif state == 'absent': result = project.delete(name=name) except (exc.ConnectionError, exc.BadRequest) as excinfo: module.fail_json(msg='Failed to update project: {0}'.format(excinfo), changed=False) json_output['changed'] = result['changed'] module.exit_json(**json_output) if __name__ == '__main__': main()
gpl-3.0
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.3/Lib/test/test_shelve.py
9
3720
import os import unittest import shelve import glob from test import test_support class TestCase(unittest.TestCase): fn = "shelftemp" + os.extsep + "db" def test_ascii_file_shelf(self): try: s = shelve.open(self.fn, binary=False) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() finally: for f in glob.glob(self.fn+"*"): os.unlink(f) def test_binary_file_shelf(self): try: s = shelve.open(self.fn, binary=True) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() finally: for f in glob.glob(self.fn+"*"): os.unlink(f) def test_proto2_file_shelf(self): try: s = shelve.open(self.fn, protocol=2) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() finally: for f in glob.glob(self.fn+"*"): os.unlink(f) def test_in_memory_shelf(self): d1 = {} s = shelve.Shelf(d1, binary=False) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() d2 = {} s = shelve.Shelf(d2, binary=True) s['key1'] = (1,2,3,4) self.assertEqual(s['key1'], (1,2,3,4)) s.close() self.assertEqual(len(d1), 1) self.assertNotEqual(d1, d2) def test_mutable_entry(self): d1 = {} s = shelve.Shelf(d1, protocol=2, writeback=False) s['key1'] = [1,2,3,4] self.assertEqual(s['key1'], [1,2,3,4]) s['key1'].append(5) self.assertEqual(s['key1'], [1,2,3,4]) s.close() d2 = {} s = shelve.Shelf(d2, protocol=2, writeback=True) s['key1'] = [1,2,3,4] self.assertEqual(s['key1'], [1,2,3,4]) s['key1'].append(5) self.assertEqual(s['key1'], [1,2,3,4,5]) s.close() self.assertEqual(len(d1), 1) self.assertEqual(len(d2), 1) from test_userdict import TestMappingProtocol class TestShelveBase(TestMappingProtocol): fn = "shelftemp.db" counter = 0 def __init__(self, *args, **kw): self._db = [] TestMappingProtocol.__init__(self, *args, **kw) _tested_class = shelve.Shelf def _reference(self): return {"key1":"value1", "key2":2, "key3":(1,2,3)} def _empty_mapping(self): if self._in_mem: x= shelve.Shelf({}, **self._args) else: self.counter+=1 x= shelve.open(self.fn+str(self.counter), **self._args) self._db.append(x) return x def tearDown(self): for db in self._db: db.close() self._db = [] if not self._in_mem: for f in glob.glob(self.fn+"*"): os.unlink(f) class TestAsciiFileShelve(TestShelveBase): _args={'binary':False} _in_mem = False class TestBinaryFileShelve(TestShelveBase): _args={'binary':True} _in_mem = False class TestProto2FileShelve(TestShelveBase): _args={'protocol':2} _in_mem = False class TestAsciiMemShelve(TestShelveBase): _args={'binary':False} _in_mem = True class TestBinaryMemShelve(TestShelveBase): _args={'binary':True} _in_mem = True class TestProto2MemShelve(TestShelveBase): _args={'protocol':2} _in_mem = True def test_main(): test_support.run_unittest( TestAsciiFileShelve, TestBinaryFileShelve, TestProto2FileShelve, TestAsciiMemShelve, TestBinaryMemShelve, TestProto2MemShelve, TestCase ) if __name__ == "__main__": test_main()
mit
pravinas/et-maslab-2016
modules/follow.py
1
1959
# follow.py from module import Module import sys, os sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from constants import * class FollowModule(Module): def __init__(self, timer, leftMotor, rightMotor, intakeMotor, wallFollow, forwardSpeed, blockSwitch): self.timer = timer self.leftMotor = leftMotor self.rightMotor = rightMotor self.intakeMotor = intakeMotor self.movement = wallFollow self.forwardSpeed = forwardSpeed self.blockSwitch = blockSwitch def start(self): self.timer.reset() #turn intake on self.intakeMotor.write(INTAKE_IN, INTAKE_POWER) def run(self): # usually wall follow for a few seconds if self.timer.millis() < FOLLOW_WALL_TIME: #print "follow blockswitch", bool(self.blockSwitch.val) if self.blockSwitch.val: print "Going from FOLLOW to CHECK" self.intakeMotor.write(0,0) self.leftMotor.write(0,0) self.rightMotor.write(0,0) return MODULE_CHECK self.movement.followWall(self.movement.distance(), FORWARD_SPEED) # Back up a little elif self.timer.millis() < FOLLOW_WALL_TIME + FOLLOW_BACK_TIME: self.leftMotor.write(BACKWARD_DIR, FOLLOW_BACKUP_SPEED) self.rightMotor.write(BACKWARD_DIR, FOLLOW_BACKUP_SPEED) # turn aggressively for .3 seconds in case of being stuck elif self.timer.millis() < FOLLOW_WALL_TIME + FOLLOW_BACK_TIME + FOLLOW_TURN_TIME: self.leftMotor.write(FORWARD_DIR, TURN_FAST_SPEED) self.rightMotor.write(BACKWARD_DIR, TURN_FAST_SPEED) # reset everything and start over else: self.timer.reset() self.movement.reset() self.leftMotor.write(0,0) self.rightMotor.write(0,0) return MODULE_FOLLOW
mit
rynomster/django
tests/forms_tests/tests/test_media.py
76
23851
# -*- coding: utf-8 -*- from django.forms import CharField, Form, Media, MultiWidget, TextInput from django.template import Context, Template from django.test import SimpleTestCase, override_settings from django.utils.encoding import force_text @override_settings( STATIC_URL='http://media.example.com/static/', ) class FormsMediaTestCase(SimpleTestCase): """Tests for the media handling on widgets and forms""" def test_construction(self): # Check construction of media objects m = Media( css={'all': ('path/to/css1', '/path/to/css2')}, js=('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3'), ) self.assertEqual( str(m), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) class Foo: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') m3 = Media(Foo) self.assertEqual( str(m3), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) # A widget can exist without a media definition class MyWidget(TextInput): pass w = MyWidget() self.assertEqual(str(w.media), '') def test_media_dsl(self): ############################################################### # DSL Class-based media definitions ############################################################### # A widget can define media if it needs to. # Any absolute path will be preserved; relative paths are combined # with the value of settings.MEDIA_URL class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') w1 = MyWidget1() self.assertEqual( str(w1.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) # Media objects can be interrogated by media type self.assertEqual( str(w1.media['css']), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />""" ) self.assertEqual( str(w1.media['js']), """<script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) def test_combine_media(self): # Media objects can be combined. Any given media resource will appear only # once. Duplicated media definitions are ignored. class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w1 = MyWidget1() w2 = MyWidget2() w3 = MyWidget3() self.assertEqual( str(w1.media + w2.media + w3.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) # Check that media addition hasn't affected the original objects self.assertEqual( str(w1.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) # Regression check for #12879: specifying the same CSS or JS file # multiple times in a single Media instance should result in that file # only being included once. class MyWidget4(TextInput): class Media: css = {'all': ('/path/to/css1', '/path/to/css1')} js = ('/path/to/js1', '/path/to/js1') w4 = MyWidget4() self.assertEqual(str(w4.media), """<link href="/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script>""") def test_media_property(self): ############################################################### # Property-based media definitions ############################################################### # Widget media can be defined as a property class MyWidget4(TextInput): def _media(self): return Media(css={'all': ('/some/path',)}, js=('/some/js',)) media = property(_media) w4 = MyWidget4() self.assertEqual(str(w4.media), """<link href="/some/path" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/some/js"></script>""") # Media properties can reference the media of their parents class MyWidget5(MyWidget4): def _media(self): return super(MyWidget5, self).media + Media(css={'all': ('/other/path',)}, js=('/other/js',)) media = property(_media) w5 = MyWidget5() self.assertEqual(str(w5.media), """<link href="/some/path" type="text/css" media="all" rel="stylesheet" /> <link href="/other/path" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/some/js"></script> <script type="text/javascript" src="/other/js"></script>""") def test_media_property_parent_references(self): # Media properties can reference the media of their parents, # even if the parent media was defined using a class class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget6(MyWidget1): def _media(self): return super(MyWidget6, self).media + Media(css={'all': ('/other/path',)}, js=('/other/js',)) media = property(_media) w6 = MyWidget6() self.assertEqual( str(w6.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/other/path" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/other/js"></script>""" ) def test_media_inheritance(self): ############################################################### # Inheritance of media ############################################################### # If a widget extends another but provides no media definition, it inherits the parent widget's media class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget7(MyWidget1): pass w7 = MyWidget7() self.assertEqual( str(w7.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>""" ) # If a widget extends another but defines media, it extends the parent widget's media by default class MyWidget8(MyWidget1): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w8 = MyWidget8() self.assertEqual( str(w8.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) def test_media_inheritance_from_property(self): # If a widget extends another but defines media, it extends the parents widget's media, # even if the parent defined media using a property. class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget4(TextInput): def _media(self): return Media(css={'all': ('/some/path',)}, js=('/some/js',)) media = property(_media) class MyWidget9(MyWidget4): class Media: css = { 'all': ('/other/path',) } js = ('/other/js',) w9 = MyWidget9() self.assertEqual( str(w9.media), """<link href="/some/path" type="text/css" media="all" rel="stylesheet" /> <link href="/other/path" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/some/js"></script> <script type="text/javascript" src="/other/js"></script>""" ) # A widget can disable media inheritance by specifying 'extend=False' class MyWidget10(MyWidget1): class Media: extend = False css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w10 = MyWidget10() self.assertEqual(str(w10.media), """<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="/path/to/js4"></script>""") def test_media_inheritance_extends(self): # A widget can explicitly enable full media inheritance by specifying 'extend=True' class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget11(MyWidget1): class Media: extend = True css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w11 = MyWidget11() self.assertEqual( str(w11.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) def test_media_inheritance_single_type(self): # A widget can enable inheritance of one media type by specifying extend as a tuple class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget12(MyWidget1): class Media: extend = ('css',) css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w12 = MyWidget12() self.assertEqual( str(w12.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) def test_multi_media(self): ############################################################### # Multi-media handling for CSS ############################################################### # A widget can define CSS media for multiple output media types class MultimediaWidget(TextInput): class Media: css = { 'screen, print': ('/file1', '/file2'), 'screen': ('/file3',), 'print': ('/file4',) } js = ('/path/to/js1', '/path/to/js4') multimedia = MultimediaWidget() self.assertEqual( str(multimedia.media), """<link href="/file4" type="text/css" media="print" rel="stylesheet" /> <link href="/file3" type="text/css" media="screen" rel="stylesheet" /> <link href="/file1" type="text/css" media="screen, print" rel="stylesheet" /> <link href="/file2" type="text/css" media="screen, print" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) def test_multi_widget(self): ############################################################### # Multiwidget media handling ############################################################### class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') # MultiWidgets have a default media definition that gets all the # media from the component widgets class MyMultiWidget(MultiWidget): def __init__(self, attrs=None): widgets = [MyWidget1, MyWidget2, MyWidget3] super(MyMultiWidget, self).__init__(widgets, attrs) mymulti = MyMultiWidget() self.assertEqual( str(mymulti.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) def test_form_media(self): ############################################################### # Media processing for forms ############################################################### class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') # You can ask a form for the media required by its widgets. class MyForm(Form): field1 = CharField(max_length=20, widget=MyWidget1()) field2 = CharField(max_length=20, widget=MyWidget2()) f1 = MyForm() self.assertEqual( str(f1.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) # Form media can be combined to produce a single media definition. class AnotherForm(Form): field3 = CharField(max_length=20, widget=MyWidget3()) f2 = AnotherForm() self.assertEqual( str(f1.media + f2.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script>""" ) # Forms can also define media, following the same rules as widgets. class FormWithMedia(Form): field1 = CharField(max_length=20, widget=MyWidget1()) field2 = CharField(max_length=20, widget=MyWidget2()) class Media: js = ('/some/form/javascript',) css = { 'all': ('/some/form/css',) } f3 = FormWithMedia() self.assertEqual( str(f3.media), """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <link href="/some/form/css" type="text/css" media="all" rel="stylesheet" /> <script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script> <script type="text/javascript" src="/some/form/javascript"></script>""" ) # Media works in templates self.assertEqual( Template("{{ form.media.js }}{{ form.media.css }}").render(Context({'form': f3})), """<script type="text/javascript" src="/path/to/js1"></script> <script type="text/javascript" src="http://media.other.com/path/to/js2"></script> <script type="text/javascript" src="https://secure.other.com/path/to/js3"></script> <script type="text/javascript" src="/path/to/js4"></script> <script type="text/javascript" src="/some/form/javascript"></script>""" """<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" /> <link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" /> <link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />""" ) def test_html_safe(self): media = Media(css={'all': ['/path/to/css']}, js=['/path/to/js']) self.assertTrue(hasattr(Media, '__html__')) self.assertEqual(force_text(media), media.__html__())
bsd-3-clause
yudingding6197/fin_script
internal/update_tday_db.py
1
1212
#!/usr/bin/env python # -*- coding:utf8 -*- import os import urllib2,time from internal.url_163.service_163 import * DB_PATH = './internal/db' def update_latest_trade(latest_day): if latest_day=='': print "Invalid day", latest_day return -1 #print(latest_day) filenm = 'sh000001' if not os.path.exists(DB_PATH): os.mikedirs(DB_PATH) return get_index_history_byNetease(filenm) location = DB_PATH + '/' + filenm + '.csv' if not os.path.isfile(location): return get_index_history_byNetease(filenm) fl = open(location, 'r') line = fl.readline() line = fl.readline() file_day = line.split(',')[0] print("_____ upd_trd", file_day,latest_day) if latest_day != file_day: return get_index_history_byNetease(filenm) #else: # print("Already the latest") return 0 ''' if __name__=='__main__': latest_day = get_lastday() print(latest_day) filenm = 'sh000001' if not os.path.exists(DB_PATH): os.mikedirs(DB_PATH) get_index_history_byNetease(filenm) exit() location = DB_PATH + '/' + filenm + '.csv' fl = open(location, 'r') lines = fl.readlines(5) file_day = lines[1].split(',')[0] print(file_day) if latest_day != file_day: get_index_history_byNetease(filenm) '''
gpl-2.0
GauravSahu/odoo
addons/analytic_user_function/analytic_user_function.py
174
7775
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields,osv from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp class analytic_user_funct_grid(osv.osv): _name="analytic.user.funct.grid" _description= "Price per User" _rec_name="user_id" _columns={ 'user_id': fields.many2one("res.users", "User", required=True,), 'product_id': fields.many2one("product.product", "Service", required=True,), 'account_id': fields.many2one("account.analytic.account", "Analytic Account", required=True,), 'uom_id': fields.related("product_id", "uom_id", relation="product.uom", string="Unit of Measure", type="many2one", readonly=True), 'price': fields.float('Price', digits_compute=dp.get_precision('Product Price'), help="Price per hour for this user.", required=True), } def onchange_user_product_id(self, cr, uid, ids, user_id, product_id, context=None): if not user_id: return {} emp_obj = self.pool.get('hr.employee') emp_id = emp_obj.search(cr, uid, [('user_id', '=', user_id)], context=context) if not emp_id: return {} value = {} prod = False if product_id: prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context) emp = emp_obj.browse(cr, uid, emp_id[0], context=context) if emp.product_id and not product_id: value['product_id'] = emp.product_id.id prod = emp.product_id if prod: value['price'] = prod.list_price value['uom_id'] = prod.uom_id.id return {'value': value} class account_analytic_account(osv.osv): _inherit = "account.analytic.account" _columns = { 'user_product_ids': fields.one2many('analytic.user.funct.grid', 'account_id', 'Users/Products Rel.', copy=True), } class hr_analytic_timesheet(osv.osv): _inherit = "hr.analytic.timesheet" # Look in account, if no value for the user => look in parent until there is no more parent to look # Take the first found... if nothing found => return False def _get_related_user_account_recursiv(self, cr, uid, user_id, account_id): temp=self.pool.get('analytic.user.funct.grid').search(cr, uid, [('user_id', '=', user_id),('account_id', '=', account_id) ]) account=self.pool.get('account.analytic.account').browse(cr, uid, account_id) if temp: return temp else: if account.parent_id: return self._get_related_user_account_recursiv(cr, uid, user_id, account.parent_id.id) else: return False def on_change_account_id(self, cr, uid, ids, account_id, user_id=False, unit_amount=0): res = {} if not (account_id): #avoid a useless call to super return res if not (user_id): return super(hr_analytic_timesheet, self).on_change_account_id(cr, uid, ids, account_id) #get the browse record related to user_id and account_id temp = self._get_related_user_account_recursiv(cr, uid, user_id, account_id) if not temp: #if there isn't any record for this user_id and account_id return super(hr_analytic_timesheet, self).on_change_account_id(cr, uid, ids, account_id) else: #get the old values from super and add the value from the new relation analytic_user_funct_grid r = self.pool.get('analytic.user.funct.grid').browse(cr, uid, temp)[0] res.setdefault('value',{}) res['value']= super(hr_analytic_timesheet, self).on_change_account_id(cr, uid, ids, account_id)['value'] res['value']['product_id'] = r.product_id.id res['value']['product_uom_id'] = r.product_id.uom_id.id #the change of product has to impact the amount, uom and general_account_id a = r.product_id.property_account_expense.id if not a: a = r.product_id.categ_id.property_account_expense_categ.id if not a: raise osv.except_osv(_('Error!'), _('There is no expense account defined ' \ 'for this product: "%s" (id:%d)') % \ (r.product_id.name, r.product_id.id,)) # Compute based on pricetype if unit_amount: amount_unit = self.on_change_unit_amount(cr, uid, ids, r.product_id.id, unit_amount, False, r.product_id.uom_id.id)['value']['amount'] amount = unit_amount * amount_unit res ['value']['amount']= - round(amount, 2) res ['value']['general_account_id']= a return res def on_change_user_id(self, cr, uid, ids, user_id, account_id=False, unit_amount=0): res = super(hr_analytic_timesheet, self).on_change_user_id(cr, uid, ids, user_id) if account_id: #get the browse record related to user_id and account_id temp = self._get_related_user_account_recursiv(cr, uid, user_id, account_id) if temp: #add the value from the new relation analytic_user_funct_grid r = self.pool.get('analytic.user.funct.grid').browse(cr, uid, temp)[0] res['value']['product_id'] = r.product_id.id #the change of product has to impact the amount, uom and general_account_id a = r.product_id.property_account_expense.id if not a: a = r.product_id.categ_id.property_account_expense_categ.id if not a: raise osv.except_osv(_('Error!'), _('There is no expense account defined ' \ 'for this product: "%s" (id:%d)') % \ (r.product_id.name, r.product_id.id,)) # Compute based on pricetype if unit_amount: amount_unit = self.on_change_unit_amount(cr, uid, ids, r.product_id.id, unit_amount, False, r.product_id.uom_id.id)['value']['amount'] amount = unit_amount * amount_unit res ['value']['amount']= - round(amount, 2) res ['value']['general_account_id']= a return res class account_analytic_line(osv.osv): _inherit = "account.analytic.line" def _get_invoice_price(self, cr, uid, account, product_id, user_id, qty, context = {}): for grid in account.user_product_ids: if grid.user_id.id==user_id: return grid.price return super(account_analytic_line, self)._get_invoice_price(cr, uid, account, product_id, user_id, qty, context)
agpl-3.0
uros-sipetic/PyLOH
pyloh/constants.py
2
1116
''' Created on 2013-07-20 @author: Yi Li ''' import numpy as np BAF_N_MIN = 0.4 BAF_N_MAX = 0.6 BAF_T_MIN = 0.35 BAF_T_MAX = 0.65 BAF_COUNTS_MIN = 10 BAF_COUNTS_MAX = 95 BAF_BINS = np.array(range(0, 100 + 1))/100.0 LOH_FREC_MAX = 0.25 LOH_FREC_MIN = 0.16 SITES_NUM_MIN_WGS = 100 SITES_NUM_MIN_WES = 20 PHI_INIT = [0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] PHI_RANGE = [i/100.0 for i in range(2, 99)] ETA = 1.01 #ETA = 1 BURN_IN = 10 EPS = np.finfo(float).eps TAU = 1800 ALPHA = 0.5 SIGMA = 0.001 ERR = 0.01 EMPIRI_BAF = 0.485 EMPIRI_AAF = 1 - EMPIRI_BAF GENOTYPES_NORMAL = ['AB'] COPY_NUMBER_NORMAL = [2] COPY_NUMBER_BASE = [2, 4] MU_N = [EMPIRI_BAF/(EMPIRI_BAF + EMPIRI_AAF)] UPDATE_WEIGHTS = {} UPDATE_WEIGHTS['x1'] = 0.9 UPDATE_WEIGHTS['y1'] = 0.1 CHROM_START = 0 CHROM_ID_LIST = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] #CHROM_LIST = ['chr1', 'chr2', 'chr3', 'chr4', 'chr5', 'chr6', 'chr7', 'chr8', # 'chr9', 'chr10', 'chr11', 'chr12', 'chr13', 'chr14', 'chr15', # 'chr16', 'chr17', 'chr18', 'chr19', 'chr20', 'chr21', 'chr22']
gpl-2.0
greg-hellings/ansible-modules-extras
database/misc/riak.py
67
8061
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, James Martin <jmartin@basho.com>, Drew Kerrigan <dkerrigan@basho.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' --- module: riak short_description: This module handles some common Riak operations description: - This module can be used to join nodes to a cluster, check the status of the cluster. version_added: "1.2" author: - "James Martin (@jsmartin)" - "Drew Kerrigan (@drewkerrigan)" options: command: description: - The command you would like to perform against the cluster. required: false default: null aliases: [] choices: ['ping', 'kv_test', 'join', 'plan', 'commit'] config_dir: description: - The path to the riak configuration directory required: false default: /etc/riak aliases: [] http_conn: description: - The ip address and port that is listening for Riak HTTP queries required: false default: 127.0.0.1:8098 aliases: [] target_node: description: - The target node for certain operations (join, ping) required: false default: riak@127.0.0.1 aliases: [] wait_for_handoffs: description: - Number of seconds to wait for handoffs to complete. required: false default: null aliases: [] type: 'int' wait_for_ring: description: - Number of seconds to wait for all nodes to agree on the ring. required: false default: null aliases: [] type: 'int' wait_for_service: description: - Waits for a riak service to come online before continuing. required: false default: None aliases: [] choices: ['kv'] validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: ['yes', 'no'] version_added: 1.5.1 ''' EXAMPLES = ''' # Join's a Riak node to another node - riak: command=join target_node=riak@10.1.1.1 # Wait for handoffs to finish. Use with async and poll. - riak: wait_for_handoffs=yes # Wait for riak_kv service to startup - riak: wait_for_service=kv ''' import time import socket import sys try: import json except ImportError: import simplejson as json def ring_check(module, riak_admin_bin): cmd = '%s ringready' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0 and 'TRUE All nodes agree on the ring' in out: return True else: return False def main(): module = AnsibleModule( argument_spec=dict( command=dict(required=False, default=None, choices=[ 'ping', 'kv_test', 'join', 'plan', 'commit']), config_dir=dict(default='/etc/riak'), http_conn=dict(required=False, default='127.0.0.1:8098'), target_node=dict(default='riak@127.0.0.1', required=False), wait_for_handoffs=dict(default=False, type='int'), wait_for_ring=dict(default=False, type='int'), wait_for_service=dict( required=False, default=None, choices=['kv']), validate_certs = dict(default='yes', type='bool')) ) command = module.params.get('command') config_dir = module.params.get('config_dir') http_conn = module.params.get('http_conn') target_node = module.params.get('target_node') wait_for_handoffs = module.params.get('wait_for_handoffs') wait_for_ring = module.params.get('wait_for_ring') wait_for_service = module.params.get('wait_for_service') validate_certs = module.params.get('validate_certs') #make sure riak commands are on the path riak_bin = module.get_bin_path('riak') riak_admin_bin = module.get_bin_path('riak-admin') timeout = time.time() + 120 while True: if time.time() > timeout: module.fail_json(msg='Timeout, could not fetch Riak stats.') (response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5) if info['status'] == 200: stats_raw = response.read() break time.sleep(5) # here we attempt to load those stats, try: stats = json.loads(stats_raw) except: module.fail_json(msg='Could not parse Riak stats.') node_name = stats['nodename'] nodes = stats['ring_members'] ring_size = stats['ring_creation_size'] rc, out, err = module.run_command([riak_bin, 'version'] ) version = out.strip() result = dict(node_name=node_name, nodes=nodes, ring_size=ring_size, version=version) if command == 'ping': cmd = '%s ping %s' % ( riak_bin, target_node ) rc, out, err = module.run_command(cmd) if rc == 0: result['ping'] = out else: module.fail_json(msg=out) elif command == 'kv_test': cmd = '%s test' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['kv_test'] = out else: module.fail_json(msg=out) elif command == 'join': if nodes.count(node_name) == 1 and len(nodes) > 1: result['join'] = 'Node is already in cluster or staged to be in cluster.' else: cmd = '%s cluster join %s' % (riak_admin_bin, target_node) rc, out, err = module.run_command(cmd) if rc == 0: result['join'] = out result['changed'] = True else: module.fail_json(msg=out) elif command == 'plan': cmd = '%s cluster plan' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['plan'] = out if 'Staged Changes' in out: result['changed'] = True else: module.fail_json(msg=out) elif command == 'commit': cmd = '%s cluster commit' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['commit'] = out result['changed'] = True else: module.fail_json(msg=out) # this could take a while, recommend to run in async mode if wait_for_handoffs: timeout = time.time() + wait_for_handoffs while True: cmd = '%s transfers' % riak_admin_bin rc, out, err = module.run_command(cmd) if 'No transfers active' in out: result['handoffs'] = 'No transfers active.' break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for handoffs.') if wait_for_service: cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ] rc, out, err = module.run_command(cmd) result['service'] = out if wait_for_ring: timeout = time.time() + wait_for_ring while True: if ring_check(module, riak_admin_bin): break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for nodes to agree on ring.') result['ring_ready'] = ring_check(module, riak_admin_bin) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ == '__main__': main()
gpl-3.0
nanditav/15712-TensorFlow
tensorflow/python/kernel_tests/bitcast_op_test.py
29
2317
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tf.bitcast.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf class BitcastTest(tf.test.TestCase): def _testBitcast(self, x, datatype, shape): with self.test_session(): tf_ans = tf.bitcast(x, datatype) out = tf_ans.eval() buff_after = memoryview(out).tobytes() buff_before = memoryview(x).tobytes() self.assertEqual(buff_before, buff_after) self.assertEqual(tf_ans.get_shape(), shape) self.assertEqual(tf_ans.dtype, datatype) def testSmaller(self): x = np.random.rand(3, 2) datatype = tf.int8 shape = [3, 2, 8] self._testBitcast(x, datatype, shape) def testLarger(self): x = np.arange(16, dtype=np.int8).reshape([4, 4]) datatype = tf.int32 shape = [4] self._testBitcast(x, datatype, shape) def testSameDtype(self): x = np.random.rand(3, 4) shape = [3, 4] self._testBitcast(x, x.dtype, shape) def testSameSize(self): x = np.random.rand(3, 4) shape = [3, 4] self._testBitcast(x, tf.int64, shape) def testErrors(self): x = np.zeros([1, 1], np.int8) datatype = tf.int32 with self.assertRaisesRegexp(ValueError, "Cannot bitcast due to shape"): tf.bitcast(x, datatype, None) def testEmpty(self): x = np.ones([], np.int32) datatype = tf.int8 shape = [4] self._testBitcast(x, datatype, shape) def testUnknown(self): x = tf.placeholder(tf.float32) datatype = tf.int8 tf.bitcast(x, datatype, None) if __name__ == "__main__": tf.test.main()
apache-2.0
YosefLab/scVI
scvi/_settings.py
1
3583
import logging from typing import Union import numpy as np import torch from rich.console import Console from rich.logging import RichHandler from ._compat import Literal logger = logging.getLogger(__name__) scvi_logger = logging.getLogger("scvi") class ScviConfig: """ Config manager for scvi-tools. Examples -------- >>> import scvi >>> scvi.settings.seed = 1 """ def __init__( self, verbosity: int = logging.INFO, progress_bar_style: Literal["rich", "tqdm"] = "tqdm", batch_size: int = 128, seed: int = 0, ): self.verbosity = verbosity self.seed = seed self.batch_size = batch_size if progress_bar_style not in ["rich", "tqdm"]: raise ValueError("Progress bar style must be in ['rich', 'tqdm']") self.progress_bar_style = progress_bar_style @property def batch_size(self) -> int: """ Minibatch size for loading data into the model. This is only used after a model is trained. Trainers have specific `batch_size` parameters. """ return self._batch_size @batch_size.setter def batch_size(self, batch_size: int): """ Minibatch size for loading data into the model. This is only used after a model is trained. Trainers have specific `batch_size` parameters. """ self._batch_size = batch_size @property def progress_bar_style(self) -> str: """Library to use for progress bar.""" return self._pbar_style @progress_bar_style.setter def progress_bar_style(self, pbar_style: Literal["tqdm", "rich"]): """Library to use for progress bar.""" self._pbar_style = pbar_style @property def seed(self) -> int: """Random seed for torch and numpy.""" return self._seed @seed.setter def seed(self, seed: int): """Random seed for torch and numpy.""" torch.manual_seed(seed) torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False np.random.seed(seed) self._seed = seed @property def verbosity(self) -> int: """Verbosity level (default `logging.INFO`).""" return self._verbosity @verbosity.setter def verbosity(self, level: Union[str, int]): """ Sets logging configuration for scvi based on chosen level of verbosity. Sets "scvi" logging level to `level` If "scvi" logger has no StreamHandler, add one. Else, set its level to `level`. """ self._verbosity = level scvi_logger.setLevel(level) has_streamhandler = False for handler in scvi_logger.handlers: if isinstance(handler, RichHandler): handler.setLevel(level) logger.info( "'scvi' logger already has a StreamHandler, set its level to {}.".format( level ) ) has_streamhandler = True if not has_streamhandler: console = Console(force_terminal=True) if console.is_jupyter is True: console.is_jupyter = False ch = RichHandler(show_path=False, console=console, show_time=False) formatter = logging.Formatter("%(message)s") ch.setFormatter(formatter) scvi_logger.addHandler(ch) logger.debug("Added StreamHandler with custom formatter to 'scvi' logger.") settings = ScviConfig()
bsd-3-clause
gerald-yang/ubuntu-iotivity-demo
snappy/grovepi/python-env/lib/python2.7/encodings/iso8859_13.py
593
13527
""" Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='iso8859-13', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\x80' # 0x80 -> <control> u'\x81' # 0x81 -> <control> u'\x82' # 0x82 -> <control> u'\x83' # 0x83 -> <control> u'\x84' # 0x84 -> <control> u'\x85' # 0x85 -> <control> u'\x86' # 0x86 -> <control> u'\x87' # 0x87 -> <control> u'\x88' # 0x88 -> <control> u'\x89' # 0x89 -> <control> u'\x8a' # 0x8A -> <control> u'\x8b' # 0x8B -> <control> u'\x8c' # 0x8C -> <control> u'\x8d' # 0x8D -> <control> u'\x8e' # 0x8E -> <control> u'\x8f' # 0x8F -> <control> u'\x90' # 0x90 -> <control> u'\x91' # 0x91 -> <control> u'\x92' # 0x92 -> <control> u'\x93' # 0x93 -> <control> u'\x94' # 0x94 -> <control> u'\x95' # 0x95 -> <control> u'\x96' # 0x96 -> <control> u'\x97' # 0x97 -> <control> u'\x98' # 0x98 -> <control> u'\x99' # 0x99 -> <control> u'\x9a' # 0x9A -> <control> u'\x9b' # 0x9B -> <control> u'\x9c' # 0x9C -> <control> u'\x9d' # 0x9D -> <control> u'\x9e' # 0x9E -> <control> u'\x9f' # 0x9F -> <control> u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\u201d' # 0xA1 -> RIGHT DOUBLE QUOTATION MARK u'\xa2' # 0xA2 -> CENT SIGN u'\xa3' # 0xA3 -> POUND SIGN u'\xa4' # 0xA4 -> CURRENCY SIGN u'\u201e' # 0xA5 -> DOUBLE LOW-9 QUOTATION MARK u'\xa6' # 0xA6 -> BROKEN BAR u'\xa7' # 0xA7 -> SECTION SIGN u'\xd8' # 0xA8 -> LATIN CAPITAL LETTER O WITH STROKE u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\u0156' # 0xAA -> LATIN CAPITAL LETTER R WITH CEDILLA u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xac' # 0xAC -> NOT SIGN u'\xad' # 0xAD -> SOFT HYPHEN u'\xae' # 0xAE -> REGISTERED SIGN u'\xc6' # 0xAF -> LATIN CAPITAL LETTER AE u'\xb0' # 0xB0 -> DEGREE SIGN u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\xb2' # 0xB2 -> SUPERSCRIPT TWO u'\xb3' # 0xB3 -> SUPERSCRIPT THREE u'\u201c' # 0xB4 -> LEFT DOUBLE QUOTATION MARK u'\xb5' # 0xB5 -> MICRO SIGN u'\xb6' # 0xB6 -> PILCROW SIGN u'\xb7' # 0xB7 -> MIDDLE DOT u'\xf8' # 0xB8 -> LATIN SMALL LETTER O WITH STROKE u'\xb9' # 0xB9 -> SUPERSCRIPT ONE u'\u0157' # 0xBA -> LATIN SMALL LETTER R WITH CEDILLA u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS u'\xe6' # 0xBF -> LATIN SMALL LETTER AE u'\u0104' # 0xC0 -> LATIN CAPITAL LETTER A WITH OGONEK u'\u012e' # 0xC1 -> LATIN CAPITAL LETTER I WITH OGONEK u'\u0100' # 0xC2 -> LATIN CAPITAL LETTER A WITH MACRON u'\u0106' # 0xC3 -> LATIN CAPITAL LETTER C WITH ACUTE u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\u0118' # 0xC6 -> LATIN CAPITAL LETTER E WITH OGONEK u'\u0112' # 0xC7 -> LATIN CAPITAL LETTER E WITH MACRON u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE u'\u0179' # 0xCA -> LATIN CAPITAL LETTER Z WITH ACUTE u'\u0116' # 0xCB -> LATIN CAPITAL LETTER E WITH DOT ABOVE u'\u0122' # 0xCC -> LATIN CAPITAL LETTER G WITH CEDILLA u'\u0136' # 0xCD -> LATIN CAPITAL LETTER K WITH CEDILLA u'\u012a' # 0xCE -> LATIN CAPITAL LETTER I WITH MACRON u'\u013b' # 0xCF -> LATIN CAPITAL LETTER L WITH CEDILLA u'\u0160' # 0xD0 -> LATIN CAPITAL LETTER S WITH CARON u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE u'\u0145' # 0xD2 -> LATIN CAPITAL LETTER N WITH CEDILLA u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE u'\u014c' # 0xD4 -> LATIN CAPITAL LETTER O WITH MACRON u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xd7' # 0xD7 -> MULTIPLICATION SIGN u'\u0172' # 0xD8 -> LATIN CAPITAL LETTER U WITH OGONEK u'\u0141' # 0xD9 -> LATIN CAPITAL LETTER L WITH STROKE u'\u015a' # 0xDA -> LATIN CAPITAL LETTER S WITH ACUTE u'\u016a' # 0xDB -> LATIN CAPITAL LETTER U WITH MACRON u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\u017b' # 0xDD -> LATIN CAPITAL LETTER Z WITH DOT ABOVE u'\u017d' # 0xDE -> LATIN CAPITAL LETTER Z WITH CARON u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German) u'\u0105' # 0xE0 -> LATIN SMALL LETTER A WITH OGONEK u'\u012f' # 0xE1 -> LATIN SMALL LETTER I WITH OGONEK u'\u0101' # 0xE2 -> LATIN SMALL LETTER A WITH MACRON u'\u0107' # 0xE3 -> LATIN SMALL LETTER C WITH ACUTE u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE u'\u0119' # 0xE6 -> LATIN SMALL LETTER E WITH OGONEK u'\u0113' # 0xE7 -> LATIN SMALL LETTER E WITH MACRON u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE u'\u017a' # 0xEA -> LATIN SMALL LETTER Z WITH ACUTE u'\u0117' # 0xEB -> LATIN SMALL LETTER E WITH DOT ABOVE u'\u0123' # 0xEC -> LATIN SMALL LETTER G WITH CEDILLA u'\u0137' # 0xED -> LATIN SMALL LETTER K WITH CEDILLA u'\u012b' # 0xEE -> LATIN SMALL LETTER I WITH MACRON u'\u013c' # 0xEF -> LATIN SMALL LETTER L WITH CEDILLA u'\u0161' # 0xF0 -> LATIN SMALL LETTER S WITH CARON u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE u'\u0146' # 0xF2 -> LATIN SMALL LETTER N WITH CEDILLA u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE u'\u014d' # 0xF4 -> LATIN SMALL LETTER O WITH MACRON u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf7' # 0xF7 -> DIVISION SIGN u'\u0173' # 0xF8 -> LATIN SMALL LETTER U WITH OGONEK u'\u0142' # 0xF9 -> LATIN SMALL LETTER L WITH STROKE u'\u015b' # 0xFA -> LATIN SMALL LETTER S WITH ACUTE u'\u016b' # 0xFB -> LATIN SMALL LETTER U WITH MACRON u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE u'\u017e' # 0xFE -> LATIN SMALL LETTER Z WITH CARON u'\u2019' # 0xFF -> RIGHT SINGLE QUOTATION MARK ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
apache-2.0
blueboxgroup/keystone
keystone/trust/schema.py
5
1571
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystone.common import validation from keystone.common.validation import parameter_types _trust_properties = { 'trustor_user_id': parameter_types.id_string, 'trustee_user_id': parameter_types.id_string, 'impersonation': parameter_types.boolean, 'project_id': validation.nullable(parameter_types.id_string), 'remaining_uses': { 'type': ['integer', 'null'], 'minimum': 1 }, 'expires_at': { 'type': ['null', 'string'] }, 'allow_redelegation': { 'type': ['boolean', 'null'] }, 'redelegation_count': { 'type': ['integer', 'null'], 'minimum': 0 }, # TODO(lbragstad): Need to find a better way to do this. We should be # checking that a role is a list of IDs and/or names. 'roles': validation.add_array_type(parameter_types.id_string) } trust_create = { 'type': 'object', 'properties': _trust_properties, 'required': ['trustor_user_id', 'trustee_user_id', 'impersonation'], 'additionalProperties': True }
apache-2.0
ksmaheshkumar/grr
client/vfs_handlers/sleuthkit.py
2
11988
#!/usr/bin/env python """Implement low level disk access using the sleuthkit.""" import stat import pytsk3 from grr.client import client_utils from grr.client import vfs from grr.lib import rdfvalue from grr.lib import utils class CachedFilesystem(object): """A container for the filesystem and image.""" def __init__(self, fs, img): self.fs = fs self.img = img class MyImgInfo(pytsk3.Img_Info): """An Img_Info class using the regular python file handling.""" def __init__(self, fd=None, progress_callback=None): pytsk3.Img_Info.__init__(self) self.progress_callback = progress_callback self.fd = fd def read(self, offset, length): # pylint: disable=g-bad-name # Sleuthkit operations might take a long time so we periodically call the # progress indicator callback as long as there are still data reads. if self.progress_callback: self.progress_callback() self.fd.seek(offset) return self.fd.read(length) def get_size(self): # pylint: disable=g-bad-name # Windows is unable to report the true size of the raw device and allows # arbitrary reading past the end - so we lie here to force tsk to read it # anyway return long(1e12) class TSKFile(vfs.VFSHandler): """Read a regular file.""" supported_pathtype = rdfvalue.PathSpec.PathType.TSK auto_register = True # A mapping to encode TSK types to a stat.st_mode FILE_TYPE_LOOKUP = { pytsk3.TSK_FS_NAME_TYPE_UNDEF: 0, pytsk3.TSK_FS_NAME_TYPE_FIFO: stat.S_IFIFO, pytsk3.TSK_FS_NAME_TYPE_CHR: stat.S_IFCHR, pytsk3.TSK_FS_NAME_TYPE_DIR: stat.S_IFDIR, pytsk3.TSK_FS_NAME_TYPE_BLK: stat.S_IFBLK, pytsk3.TSK_FS_NAME_TYPE_REG: stat.S_IFREG, pytsk3.TSK_FS_NAME_TYPE_LNK: stat.S_IFLNK, pytsk3.TSK_FS_NAME_TYPE_SOCK: stat.S_IFSOCK, } META_TYPE_LOOKUP = { pytsk3.TSK_FS_META_TYPE_BLK: 0, pytsk3.TSK_FS_META_TYPE_CHR: stat.S_IFCHR, pytsk3.TSK_FS_META_TYPE_DIR: stat.S_IFDIR, pytsk3.TSK_FS_META_TYPE_FIFO: stat.S_IFIFO, pytsk3.TSK_FS_META_TYPE_LNK: stat.S_IFLNK, pytsk3.TSK_FS_META_TYPE_REG: stat.S_IFREG, pytsk3.TSK_FS_META_TYPE_SOCK: stat.S_IFSOCK, } # Files we won't return in directories. BLACKLIST_FILES = ["$OrphanFiles" # Special TSK dir that invokes processing. ] # The file like object we read our image from tsk_raw_device = None # NTFS files carry an attribute identified by ntfs_type and ntfs_id. tsk_attribute = None def __init__(self, base_fd, pathspec=None, progress_callback=None): """Use TSK to read the pathspec. Args: base_fd: The file like object we read this component from. pathspec: An optional pathspec to open directly. progress_callback: A callback to indicate that the open call is still working but needs more time. Raises: IOError: If the file can not be opened. """ super(TSKFile, self).__init__(base_fd, pathspec=pathspec, progress_callback=progress_callback) if self.base_fd is None: raise IOError("TSK driver must have a file base.") # If our base is another tsk driver - borrow the reference to the raw # device, and replace the last pathspec component with this one after # extending its path. elif isinstance(base_fd, TSKFile) and self.base_fd.IsDirectory(): self.tsk_raw_device = self.base_fd.tsk_raw_device last_path = utils.JoinPath(self.pathspec.last.path, pathspec.path) # Replace the last component with this one. self.pathspec.Pop(-1) self.pathspec.Append(pathspec) self.pathspec.last.path = last_path # Use the base fd as a base to parse the filesystem only if its file like. elif not self.base_fd.IsDirectory(): self.tsk_raw_device = self.base_fd self.pathspec.Append(pathspec) else: # If we get here we have a directory from a non sleuthkit driver - dont # know what to do with it. raise IOError("Unable to parse base using Sleuthkit.") # If we are successful in opening this path below the path casing is # correct. self.pathspec.last.path_options = rdfvalue.PathSpec.Options.CASE_LITERAL fd_hash = self.tsk_raw_device.pathspec.SerializeToString() # Cache the filesystem using the path of the raw device try: self.filesystem = vfs.DEVICE_CACHE.Get(fd_hash) self.fs = self.filesystem.fs except KeyError: self.img = MyImgInfo(fd=self.tsk_raw_device, progress_callback=progress_callback) self.fs = pytsk3.FS_Info(self.img, 0) self.filesystem = CachedFilesystem(self.fs, self.img) vfs.DEVICE_CACHE.Put(fd_hash, self.filesystem) # We prefer to open the file based on the inode because that is more # efficient. if pathspec.HasField("inode"): self.fd = self.fs.open_meta(pathspec.inode) self.tsk_attribute = self.GetAttribute( pathspec.ntfs_type, pathspec.ntfs_id) if self.tsk_attribute: self.size = self.tsk_attribute.info.size else: self.size = self.fd.info.meta.size else: # Does the filename exist in the image? self.fd = self.fs.open(utils.SmartStr(self.pathspec.last.path)) self.size = self.fd.info.meta.size self.pathspec.last.inode = self.fd.info.meta.addr def GetAttribute(self, ntfs_type, ntfs_id): for attribute in self.fd: if attribute.info.type == ntfs_type: # If ntfs_id is specified it has to also match. if ntfs_id != 0 and attribute.info.id != ntfs_id: continue return attribute return None def ListNames(self): directory_handle = self.fd.as_directory() for f in directory_handle: # TSK only deals with utf8 strings, but path components are always unicode # objects - so we convert to unicode as soon as we receive data from # TSK. Prefer to compare unicode objects to guarantee they are normalized. yield utils.SmartUnicode(f.info.name.name) def MakeStatResponse(self, tsk_file, tsk_attribute=None, append_name=False): """Given a TSK info object make a StatEntry. Note that tsk uses two things to uniquely identify a data stream - the inode object given in tsk_file and the attribute object which may correspond to an ADS of this file for filesystems which support ADS. We store both of these in the stat response. Args: tsk_file: A TSK File object for the specified inode. tsk_attribute: A TSK Attribute object for the ADS. If None we use the main stream. append_name: If specified we append this name to the last element of the pathspec. Returns: A StatEntry which can be used to re-open this exact VFS node. """ info = tsk_file.info response = rdfvalue.StatEntry() meta = info.meta if meta: response.st_ino = meta.addr for attribute in ["mode", "nlink", "uid", "gid", "size", "atime", "mtime", "ctime", "crtime"]: try: value = int(getattr(meta, attribute)) if value < 0: value &= 0xFFFFFFFF setattr(response, "st_%s" % attribute, value) except AttributeError: pass name = info.name child_pathspec = self.pathspec.Copy() if append_name: # Append the name to the most inner pathspec child_pathspec.last.path = utils.JoinPath(child_pathspec.last.path, utils.SmartUnicode(append_name)) child_pathspec.last.inode = meta.addr if tsk_attribute is not None: child_pathspec.last.ntfs_type = int(tsk_attribute.info.type) child_pathspec.last.ntfs_id = int(tsk_attribute.info.id) child_pathspec.last.stream_name = tsk_attribute.info.name # Update the size with the attribute size. response.st_size = tsk_attribute.info.size if name: # Encode the type onto the st_mode response response.st_mode |= self.FILE_TYPE_LOOKUP.get(int(name.type), 0) if meta: # What if the types are different? What to do here? response.st_mode |= self.META_TYPE_LOOKUP.get(int(meta.type), 0) # Write the pathspec on the response. response.pathspec = child_pathspec return response def Read(self, length): """Read from the file.""" if not self.IsFile(): raise IOError("%s is not a file." % self.pathspec.last.path) available = min(self.size - self.offset, length) if available > 0: # This raises a RuntimeError in some situations. try: data = self.fd.read_random(self.offset, available, self.pathspec.last.ntfs_type, self.pathspec.last.ntfs_id) except RuntimeError as e: raise IOError(e) self.offset += len(data) return data return "" def Stat(self): """Return a stat of the file.""" return self.MakeStatResponse(self.fd, tsk_attribute=self.tsk_attribute) def ListFiles(self): """List all the files in the directory.""" if self.IsDirectory(): dir_fd = self.fd.as_directory() for f in dir_fd: try: name = f.info.name.name # Drop these useless entries. if name in [".", ".."] or name in self.BLACKLIST_FILES: continue # First we yield a standard response using the default attributes. yield self.MakeStatResponse(f, tsk_attribute=None, append_name=name) # Now send back additional named attributes for the ADS. for attribute in f: if attribute.info.type in [pytsk3.TSK_FS_ATTR_TYPE_NTFS_DATA, pytsk3.TSK_FS_ATTR_TYPE_DEFAULT]: if attribute.info.name: yield self.MakeStatResponse(f, append_name=name, tsk_attribute=attribute) except AttributeError: pass else: raise IOError("%s is not a directory" % self.pathspec.CollapsePath()) def IsDirectory(self): return self.fd.info.meta.type == pytsk3.TSK_FS_META_TYPE_DIR def IsFile(self): return self.fd.info.meta.type == pytsk3.TSK_FS_META_TYPE_REG @classmethod def Open(cls, fd, component, pathspec=None, progress_callback=None): # A Pathspec which starts with TSK means we need to resolve the mount point # at runtime. if fd is None and component.pathtype == rdfvalue.PathSpec.PathType.TSK: # We are the top level handler. This means we need to check the system # mounts to work out the exact mount point and device we need to # open. We then modify the pathspec so we get nested in the raw # pathspec. raw_pathspec, corrected_path = client_utils.GetRawDevice(component.path) # Insert the raw device before the component in the pathspec and correct # the path component.path = corrected_path pathspec.Insert(0, component) pathspec.Insert(0, raw_pathspec) # Allow incoming pathspec to be given in the local system path # conventions. for component in pathspec: if component.path: component.path = client_utils.LocalPathToCanonicalPath( component.path) # We have not actually opened anything in this iteration, but modified the # pathspec. Next time we should be able to open it properly. return fd # If an inode is specified, just use it directly. elif component.HasField("inode"): return TSKFile(fd, component, progress_callback=progress_callback) # Otherwise do the usual case folding. else: return vfs.VFSHandler.Open(fd, component, pathspec=pathspec, progress_callback=progress_callback)
apache-2.0
M4573R/BerkeleyX-CS188.1x-Artificial-Intelligence
reinforcement/mdp.py
4
2214
# mdp.py # ------ # Licensing Information: You are free to use or extend these projects for # educational purposes provided that (1) you do not distribute or publish # solutions, (2) you retain this notice, and (3) you provide clear # attribution to UC Berkeley, including a link to # http://inst.eecs.berkeley.edu/~cs188/pacman/pacman.html # # Attribution Information: The Pacman AI projects were developed at UC Berkeley. # The core projects and autograders were primarily created by John DeNero # (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu). # Student side autograding was added by Brad Miller, Nick Hay, and # Pieter Abbeel (pabbeel@cs.berkeley.edu). import random class MarkovDecisionProcess: def getStates(self): """ Return a list of all states in the MDP. Not generally possible for large MDPs. """ abstract def getStartState(self): """ Return the start state of the MDP. """ abstract def getPossibleActions(self, state): """ Return list of possible actions from 'state'. """ abstract def getTransitionStatesAndProbs(self, state, action): """ Returns list of (nextState, prob) pairs representing the states reachable from 'state' by taking 'action' along with their transition probabilities. Note that in Q-Learning and reinforcment learning in general, we do not know these probabilities nor do we directly model them. """ abstract def getReward(self, state, action, nextState): """ Get the reward for the state, action, nextState transition. Not available in reinforcement learning. """ abstract def isTerminal(self, state): """ Returns true if the current state is a terminal state. By convention, a terminal state has zero future rewards. Sometimes the terminal state(s) may have no possible actions. It is also common to think of the terminal state as having a self-loop action 'pass' with zero reward; the formulations are equivalent. """ abstract
mit
KhronosGroup/COLLADA-CTS
StandardDataSets/collada/library_visual_scenes/visual_scene/node/instance_controller/same_material_different_skeletons/same_material_different_skeletons.py
2
4045
# Copyright (c) 2012 The Khronos Group Inc. # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to # the following conditions: # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Materials. # THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. # See Core.Logic.FJudgementContext for the information # of the 'context' parameter. # This sample judging object does the following: # # JudgeBaseline: just verifies that the standard steps did not crash. # JudgeSuperior: also verifies that the validation steps are not in error. # JudgeExemplary: same as intermediate badge. # We import an assistant script that includes the common verifications # methods. The assistant buffers its checks, so that running them again # does not incurs an unnecessary performance hint. from StandardDataSets.scripts import JudgeAssistant # Please feed your node list here: tagLst = [] attrName = '' attrVal = '' dataToCheck = '' class SimpleJudgingObject: def __init__(self, _tagLst, _attrName, _attrVal, _data): self.tagList = _tagLst self.attrName = _attrName self.attrVal = _attrVal self.dataToCheck = _data self.status_baseline = False self.status_superior = False self.status_exemplary = False self.__assistant = JudgeAssistant.JudgeAssistant() def JudgeBaseline(self, context): # No step should not crash self.__assistant.CheckCrashes(context) # Import/export/validate must exist and pass, while Render must only exist. self.__assistant.CheckSteps(context, ["Import", "Export", "Validate"], ["Render"]) self.status_baseline = self.__assistant.GetResults() return self.status_baseline # To pass intermediate you need to pass basic, this object could also include additional # tests that were specific to the intermediate badge. def JudgeSuperior(self, context): # if baseline fails, no point in further checking if (self.status_baseline == False): self.status_superior = self.status_baseline return self.status_superior # Compare the rendered images between import and export # Then compare images against reference test to check for non-equivalence if ( self.__assistant.CompareRenderedImages(context) ): self.__assistant.CompareImagesAgainst(context, "different_materials_skeletons", None, None, 5, True, False) self.status_superior = self.__assistant.DeferJudgement(context) return self.status_superior # To pass advanced you need to pass intermediate, this object could also include additional # tests that were specific to the advanced badge def JudgeExemplary(self, context): self.status_exemplary = self.status_superior return self.status_exemplary # This is where all the work occurs: "judgingObject" is an absolutely necessary token. # The dynamic loader looks very specifically for a class instance named "judgingObject". # judgingObject = SimpleJudgingObject(tagLst, attrName, attrVal, dataToCheck);
mit
bdrillard/spark
examples/src/main/python/ml/power_iteration_clustering_example.py
54
1604
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ An example demonstrating PowerIterationClustering. Run with: bin/spark-submit examples/src/main/python/ml/power_iteration_clustering_example.py """ # $example on$ from pyspark.ml.clustering import PowerIterationClustering # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("PowerIterationClusteringExample")\ .getOrCreate() # $example on$ df = spark.createDataFrame([ (0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 4, 1.0), (4, 0, 0.1) ], ["src", "dst", "weight"]) pic = PowerIterationClustering(k=2, maxIter=20, initMode="degree", weightCol="weight") # Shows the cluster assignment pic.assignClusters(df).show() # $example off$ spark.stop()
apache-2.0
tchernomax/ansible
test/units/modules/network/f5/test_bigip_gtm_monitor_http.py
23
4752
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys from nose.plugins.skip import SkipTest if sys.version_info < (2, 7): raise SkipTest("F5 Ansible modules require Python >= 2.7") from ansible.compat.tests import unittest from ansible.compat.tests.mock import Mock from ansible.compat.tests.mock import patch from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_gtm_monitor_http import ApiParameters from library.modules.bigip_gtm_monitor_http import ModuleParameters from library.modules.bigip_gtm_monitor_http import ModuleManager from library.modules.bigip_gtm_monitor_http import ArgumentSpec from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import iControlUnexpectedHTTPError from test.unit.modules.utils import set_module_args except ImportError: try: from ansible.modules.network.f5.bigip_gtm_monitor_http import ApiParameters from ansible.modules.network.f5.bigip_gtm_monitor_http import ModuleParameters from ansible.modules.network.f5.bigip_gtm_monitor_http import ModuleManager from ansible.modules.network.f5.bigip_gtm_monitor_http import ArgumentSpec from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError from units.modules.utils import set_module_args except ImportError: raise SkipTest("F5 Ansible modules require the f5-sdk Python library") fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', parent='/Common/my-http', send='the send string', receive='the receive string', ip='1.1.1.1', port='80', interval='10', timeout='20', ignore_down_response=True, transparent=False, probe_timeout='30', reverse=True ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/my-http' assert p.send == 'the send string' assert p.receive == 'the receive string' assert p.destination == '1.1.1.1:80' assert p.ip == '1.1.1.1' assert p.port == 80 assert p.interval == 10 assert p.timeout == 20 assert p.ignore_down_response is True assert p.transparent is False assert p.probe_timeout == 30 assert p.reverse is True def test_api_parameters(self): args = load_fixture('load_gtm_monitor_http_1.json') p = ApiParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/http' assert p.send == 'GET /' assert p.receive == 'the receive string' assert p.destination == '3.3.3.3:8080' assert p.ip == '3.3.3.3' assert p.port == 8080 assert p.interval == 30 assert p.timeout == 120 assert p.ignore_down_response is False assert p.transparent is True assert p.probe_timeout == 5 assert p.reverse is True @patch('ansible.module_utils.f5_utils.AnsibleF5Client._get_mgmt_root', return_value=True) class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create_monitor(self, *args): set_module_args(dict( name='foo', ip='10.10.10.10', port=80, interval=20, timeout=30, server='localhost', password='password', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) # Override methods in the specific type of manager mm = ModuleManager(module=module) mm.exists = Mock(side_effect=[False, True]) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True
gpl-3.0
jhg/django
tests/admin_docs/models.py
82
1592
""" Models for testing various aspects of the djang.contrib.admindocs app """ from django.db import models class Company(models.Model): name = models.CharField(max_length=200) class Group(models.Model): name = models.CharField(max_length=200) class Family(models.Model): last_name = models.CharField(max_length=200) class Person(models.Model): """ Stores information about a person, related to :model:`myapp.Company`. **Notes** Use ``save_changes()`` when saving this object. ``company`` Field storing :model:`myapp.Company` where the person works. (DESCRIPTION) .. raw:: html :file: admin_docs/evilfile.txt .. include:: admin_docs/evilfile.txt """ first_name = models.CharField(max_length=200, help_text="The person's first name") last_name = models.CharField(max_length=200, help_text="The person's last name") company = models.ForeignKey(Company, help_text="place of work") family = models.ForeignKey(Family, related_name='+', null=True) groups = models.ManyToManyField(Group, help_text="has membership") def _get_full_name(self): return "%s %s" % (self.first_name, self.last_name) def add_image(self): pass def delete_image(self): pass def save_changes(self): pass def set_status(self): pass def get_full_name(self): """ Get the full name of the person """ return self._get_full_name() def get_status_count(self): return 0 def get_groups_list(self): return []
bsd-3-clause
zero-ui/miniblink49
third_party/jinja2/utils.py
598
16165
# -*- coding: utf-8 -*- """ jinja2.utils ~~~~~~~~~~~~ Utility functions. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import re import errno from collections import deque from jinja2._compat import text_type, string_types, implements_iterator, \ allocate_lock, url_quote _word_split_re = re.compile(r'(\s+)') _punctuation_re = re.compile( '^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % ( '|'.join(map(re.escape, ('(', '<', '&lt;'))), '|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '&gt;'))) ) ) _simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$') _striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)') _entity_re = re.compile(r'&([^;]+);') _letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' _digits = '0123456789' # special singleton representing missing values for the runtime missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})() # internal code internal_code = set() concat = u''.join def contextfunction(f): """This decorator can be used to mark a function or method context callable. A context callable is passed the active :class:`Context` as first argument when called from the template. This is useful if a function wants to get access to the context or functions provided on the context object. For example a function that returns a sorted list of template variables the current template exports could look like this:: @contextfunction def get_exported_names(context): return sorted(context.exported_vars) """ f.contextfunction = True return f def evalcontextfunction(f): """This decorator can be used to mark a function or method as an eval context callable. This is similar to the :func:`contextfunction` but instead of passing the context, an evaluation context object is passed. For more information about the eval context, see :ref:`eval-context`. .. versionadded:: 2.4 """ f.evalcontextfunction = True return f def environmentfunction(f): """This decorator can be used to mark a function or method as environment callable. This decorator works exactly like the :func:`contextfunction` decorator just that the first argument is the active :class:`Environment` and not context. """ f.environmentfunction = True return f def internalcode(f): """Marks the function as internally used""" internal_code.add(f.__code__) return f def is_undefined(obj): """Check if the object passed is undefined. This does nothing more than performing an instance check against :class:`Undefined` but looks nicer. This can be used for custom filters or tests that want to react to undefined variables. For example a custom default filter can look like this:: def default(var, default=''): if is_undefined(var): return default return var """ from jinja2.runtime import Undefined return isinstance(obj, Undefined) def consume(iterable): """Consumes an iterable without doing anything with it.""" for event in iterable: pass def clear_caches(): """Jinja2 keeps internal caches for environments and lexers. These are used so that Jinja2 doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are messuring memory consumption you may want to clean the caches. """ from jinja2.environment import _spontaneous_environments from jinja2.lexer import _lexer_cache _spontaneous_environments.clear() _lexer_cache.clear() def import_string(import_name, silent=False): """Imports an object based on a string. This is useful if you want to use import paths as endpoints or something similar. An import path can be specified either in dotted notation (``xml.sax.saxutils.escape``) or with a colon as object delimiter (``xml.sax.saxutils:escape``). If the `silent` is True the return value will be `None` if the import fails. :return: imported object """ try: if ':' in import_name: module, obj = import_name.split(':', 1) elif '.' in import_name: items = import_name.split('.') module = '.'.join(items[:-1]) obj = items[-1] else: return __import__(import_name) return getattr(__import__(module, None, None, [obj]), obj) except (ImportError, AttributeError): if not silent: raise def open_if_exists(filename, mode='rb'): """Returns a file descriptor for the filename if that file exists, otherwise `None`. """ try: return open(filename, mode) except IOError as e: if e.errno not in (errno.ENOENT, errno.EISDIR): raise def object_type_repr(obj): """Returns the name of the object's type. For some recognized singletons the name of the object is returned instead. (For example for `None` and `Ellipsis`). """ if obj is None: return 'None' elif obj is Ellipsis: return 'Ellipsis' # __builtin__ in 2.x, builtins in 3.x if obj.__class__.__module__ in ('__builtin__', 'builtins'): name = obj.__class__.__name__ else: name = obj.__class__.__module__ + '.' + obj.__class__.__name__ return '%s object' % name def pformat(obj, verbose=False): """Prettyprint an object. Either use the `pretty` library or the builtin `pprint`. """ try: from pretty import pretty return pretty(obj, verbose=verbose) except ImportError: from pprint import pformat return pformat(obj) def urlize(text, trim_url_limit=None, nofollow=False): """Converts any URLs in text into clickable links. Works on http://, https:// and www. links. Links can have trailing punctuation (periods, commas, close-parens) and leading punctuation (opening parens) and it'll still do the right thing. If trim_url_limit is not None, the URLs in link text will be limited to trim_url_limit characters. If nofollow is True, the URLs in link text will get a rel="nofollow" attribute. """ trim_url = lambda x, limit=trim_url_limit: limit is not None \ and (x[:limit] + (len(x) >=limit and '...' or '')) or x words = _word_split_re.split(text_type(escape(text))) nofollow_attr = nofollow and ' rel="nofollow"' or '' for i, word in enumerate(words): match = _punctuation_re.match(word) if match: lead, middle, trail = match.groups() if middle.startswith('www.') or ( '@' not in middle and not middle.startswith('http://') and not middle.startswith('https://') and len(middle) > 0 and middle[0] in _letters + _digits and ( middle.endswith('.org') or middle.endswith('.net') or middle.endswith('.com') )): middle = '<a href="http://%s"%s>%s</a>' % (middle, nofollow_attr, trim_url(middle)) if middle.startswith('http://') or \ middle.startswith('https://'): middle = '<a href="%s"%s>%s</a>' % (middle, nofollow_attr, trim_url(middle)) if '@' in middle and not middle.startswith('www.') and \ not ':' in middle and _simple_email_re.match(middle): middle = '<a href="mailto:%s">%s</a>' % (middle, middle) if lead + middle + trail != word: words[i] = lead + middle + trail return u''.join(words) def generate_lorem_ipsum(n=5, html=True, min=20, max=100): """Generate some lorem impsum for the template.""" from jinja2.constants import LOREM_IPSUM_WORDS from random import choice, randrange words = LOREM_IPSUM_WORDS.split() result = [] for _ in range(n): next_capitalized = True last_comma = last_fullstop = 0 word = None last = None p = [] # each paragraph contains out of 20 to 100 words. for idx, _ in enumerate(range(randrange(min, max))): while True: word = choice(words) if word != last: last = word break if next_capitalized: word = word.capitalize() next_capitalized = False # add commas if idx - randrange(3, 8) > last_comma: last_comma = idx last_fullstop += 2 word += ',' # add end of sentences if idx - randrange(10, 20) > last_fullstop: last_comma = last_fullstop = idx word += '.' next_capitalized = True p.append(word) # ensure that the paragraph ends with a dot. p = u' '.join(p) if p.endswith(','): p = p[:-1] + '.' elif not p.endswith('.'): p += '.' result.append(p) if not html: return u'\n\n'.join(result) return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result)) def unicode_urlencode(obj, charset='utf-8'): """URL escapes a single bytestring or unicode string with the given charset if applicable to URL safe quoting under all rules that need to be considered under all supported Python versions. If non strings are provided they are converted to their unicode representation first. """ if not isinstance(obj, string_types): obj = text_type(obj) if isinstance(obj, text_type): obj = obj.encode(charset) return text_type(url_quote(obj)) class LRUCache(object): """A simple LRU Cache implementation.""" # this is fast for small capacities (something below 1000) but doesn't # scale. But as long as it's only used as storage for templates this # won't do any harm. def __init__(self, capacity): self.capacity = capacity self._mapping = {} self._queue = deque() self._postinit() def _postinit(self): # alias all queue methods for faster lookup self._popleft = self._queue.popleft self._pop = self._queue.pop self._remove = self._queue.remove self._wlock = allocate_lock() self._append = self._queue.append def __getstate__(self): return { 'capacity': self.capacity, '_mapping': self._mapping, '_queue': self._queue } def __setstate__(self, d): self.__dict__.update(d) self._postinit() def __getnewargs__(self): return (self.capacity,) def copy(self): """Return a shallow copy of the instance.""" rv = self.__class__(self.capacity) rv._mapping.update(self._mapping) rv._queue = deque(self._queue) return rv def get(self, key, default=None): """Return an item from the cache dict or `default`""" try: return self[key] except KeyError: return default def setdefault(self, key, default=None): """Set `default` if the key is not in the cache otherwise leave unchanged. Return the value of this key. """ self._wlock.acquire() try: try: return self[key] except KeyError: self[key] = default return default finally: self._wlock.release() def clear(self): """Clear the cache.""" self._wlock.acquire() try: self._mapping.clear() self._queue.clear() finally: self._wlock.release() def __contains__(self, key): """Check if a key exists in this cache.""" return key in self._mapping def __len__(self): """Return the current size of the cache.""" return len(self._mapping) def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self._mapping ) def __getitem__(self, key): """Get an item from the cache. Moves the item up so that it has the highest priority then. Raise a `KeyError` if it does not exist. """ self._wlock.acquire() try: rv = self._mapping[key] if self._queue[-1] != key: try: self._remove(key) except ValueError: # if something removed the key from the container # when we read, ignore the ValueError that we would # get otherwise. pass self._append(key) return rv finally: self._wlock.release() def __setitem__(self, key, value): """Sets the value for an item. Moves the item up so that it has the highest priority then. """ self._wlock.acquire() try: if key in self._mapping: self._remove(key) elif len(self._mapping) == self.capacity: del self._mapping[self._popleft()] self._append(key) self._mapping[key] = value finally: self._wlock.release() def __delitem__(self, key): """Remove an item from the cache dict. Raise a `KeyError` if it does not exist. """ self._wlock.acquire() try: del self._mapping[key] try: self._remove(key) except ValueError: # __getitem__ is not locked, it might happen pass finally: self._wlock.release() def items(self): """Return a list of items.""" result = [(key, self._mapping[key]) for key in list(self._queue)] result.reverse() return result def iteritems(self): """Iterate over all items.""" return iter(self.items()) def values(self): """Return a list of all values.""" return [x[1] for x in self.items()] def itervalue(self): """Iterate over all values.""" return iter(self.values()) def keys(self): """Return a list of all keys ordered by most recent usage.""" return list(self) def iterkeys(self): """Iterate over all keys in the cache dict, ordered by the most recent usage. """ return reversed(tuple(self._queue)) __iter__ = iterkeys def __reversed__(self): """Iterate over the values in the cache dict, oldest items coming first. """ return iter(tuple(self._queue)) __copy__ = copy # register the LRU cache as mutable mapping if possible try: from collections import MutableMapping MutableMapping.register(LRUCache) except ImportError: pass @implements_iterator class Cycler(object): """A cycle helper for templates.""" def __init__(self, *items): if not items: raise RuntimeError('at least one item has to be provided') self.items = items self.reset() def reset(self): """Resets the cycle.""" self.pos = 0 @property def current(self): """Returns the current item.""" return self.items[self.pos] def __next__(self): """Goes one item ahead and returns it.""" rv = self.current self.pos = (self.pos + 1) % len(self.items) return rv class Joiner(object): """A joining helper for templates.""" def __init__(self, sep=u', '): self.sep = sep self.used = False def __call__(self): if not self.used: self.used = True return u'' return self.sep # Imported here because that's where it was in the past from markupsafe import Markup, escape, soft_unicode
gpl-3.0
talha81/TACTIC-DEV
src/pyasm/widget/error_wdg.py
5
7309
########################################################### # # Copyright (c) 2005, Southpaw Technology # All Rights Reserved # # PROPRIETARY INFORMATION. This software is proprietary to # Southpaw Technology, and is not to be reproduced, transmitted, # or disclosed in any way without written permission. # # __all__ = ['Error403Wdg', 'Error404Wdg' ] from pyasm.web import Widget, DivWdg, HtmlElement, Table, SpanWdg, WebContainer from input_wdg import HiddenWdg, TextWdg, PasswordWdg from web_wdg import SignOutLinkWdg from header_wdg import ProjectSwitchWdg class ErrorWdg(Widget): LOGIN_MSG = 'login_message' def get_display(my): box = DivWdg(css='login') box.add_style("margin-top: auto") box.add_style("margin-bottom: auto") box.add_style("text-align: center") script = HtmlElement.script('''function login(e) { if (!e) var e = window.event; if (e.keyCode == 13) { submit_icon_button('Submit'); }} ''') div = DivWdg() div.add_style("margin: 0px 0px") div.add_class("centered") div.add( HtmlElement.br(6) ) sthpw = SpanWdg("SOUTHPAW TECHNOLOGY INC", css="login_sthpw") sthpw.add_style("color: #333") div.add( sthpw ) div.add( HtmlElement.br(2) ) div.add(my.get_error_wdg() ) box.add(div) widget = Widget() #widget.add( HtmlElement.br(3) ) table = Table() table.add_style("width: 100%") table.add_style("height: 85%") table.add_row() td = table.add_cell() td.add_style("vertical-align: middle") td.add_style("text-align: center") td.add_style("background: transparent") td.add(box) widget.add(table) return widget def get_error_wdg(my): '''function to override''' pass def set_message(my, message): my.message = message def set_status(my, status): my.status = status class Error404Wdg(ErrorWdg): ''' this should be displaying the error status and message, not necessarily 404''' def __init__(my): # just defaults to 404 my.status = 404 my.message = '' super(Error404Wdg, my).__init__() def get_error_wdg(my): div = DivWdg() error_div = DivWdg() error_div.add("Error %s" % my.status) div.add(error_div) error_div.add_style("font-size: 16px") error_div.add_style("font-weight: bold") error_div.add_style("width: auto") error_div.add_gradient("background", "background") error_div.add_border() error_div.add_style("margin-left: 5px") error_div.add_style("margin-right: 5px") error_div.add_style("margin-top: -10px") div.add("<br/>") span = DivWdg() #span.add_color("color", "color") span.add_style("color", "#FFF") if my.status == 404: span.add(HtmlElement.b("You have tried to access a url that is not recognized.")) else: span.add(HtmlElement.b(my.message)) span.add(HtmlElement.br(2)) web = WebContainer.get_web() root = web.get_site_root() if my.message.startswith('No project ['): label = 'You may need to correct the default_project setting in the TACTIC config.' else: label = "Go to the Main page for a list of valid projects" span.add(label) div.add(span) div.add(HtmlElement.br()) from tactic.ui.widget import ActionButtonWdg button_div = DivWdg() button_div.add_style("width: 90px") button_div.add_style("margin: 0px auto") div.add(button_div) button = ActionButtonWdg(title="Go to Main", tip='Click to go to main page') button_div.add(button) button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' document.location = '/'; ''' } ) button.add_event("onmouseup", "document.location='/'") return div """ class Error403Wdg(ErrorWdg): def get_error_wdg(my): div = DivWdg() #div.add_color("color", "color3") div.add_style("color", "white") div.add("<h3>Error 403: Permission Denied</h3>") span = SpanWdg() span.add("<b>You have tried to access a url that is not permitted.</b>") span.add(HtmlElement.br(2)) #inner_div = DivWdg(my._get_project_switch()) inner_div.add_style('margin-right', '130px') span.add(inner_div) #span.add(SignOutLinkWdg()) div.add(span) return div def _get_project_switch(my): # can't use anything relying on behavior here widget = ProjectSwitchWdg() return widget """ class Error403Wdg(ErrorWdg): ''' this should be displaying the error status and message, not necessarily 404''' def __init__(my): # just defaults to 404 my.status = 403 my.message = '' super(Error403Wdg, my).__init__() def get_error_wdg(my): div = DivWdg() error_div = DivWdg() error_div.add("Error %s - Permission Denied" % my.status) div.add(error_div) error_div.add_style("font-size: 16px") error_div.add_style("font-weight: bold") error_div.add_style("width: 97%") error_div.add_gradient("background", "background") error_div.add_border() error_div.add_style("margin-left: 5px") error_div.add_style("margin-top: -10px") div.add("<br/>") span = DivWdg() #span.add_color("color", "color") span.add_style("color", "#FFF") if my.status == 403: span.add("<b>You have tried to access a url that is not permitted.</b>") else: span.add(HtmlElement.b(my.message)) span.add(HtmlElement.br(2)) web = WebContainer.get_web() root = web.get_site_root() span.add("Go back to the Main page for a list of valid projects") div.add(span) div.add(HtmlElement.br()) table = Table() div.add(table) table.add_row() table.add_style("margin-left: auto") table.add_style("margin-right: auto") from tactic.ui.widget import ActionButtonWdg button = ActionButtonWdg(title="Go to Main", tip='Click to go to main page') table.add_cell(button) button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' document.location = '/projects'; ''' } ) button.add_style("margin-left: auto") button.add_style("margin-right: auto") button = ActionButtonWdg(title="Sign Out", tip='Click to Sign Out') table.add_cell(button) button.add_behavior( { 'type': 'click_up', 'login': web.get_user_name(), 'cbjs_action': ''' var server = TacticServerStub.get(); server.execute_cmd("SignOutCmd", {login: bvr.login} ); window.location.href='%s'; ''' % root } ) button.add_style("margin-left: auto") button.add_style("margin-right: auto") return div
epl-1.0
chen0031/nupic
nupic/regions/ImageSensorFilters/AddBackgroundImage.py
17
5218
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ ## @file """ import os import random from nupic.image import (createMask, isSimpleBBox) from nupic.frameworks.vision2 import VisionUtils from PIL import (Image, ImageChops) from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter class AddBackgroundImage(BaseFilter): """ Fill in the background (around the mask or around the bounding box). """ def __init__(self, img=None, threshold=10, maskScale=1.0, blurRadius=0.0): """ @param img -- path to background image(s) to use """ BaseFilter.__init__(self) self.bgPath = img self.bgImgs = None self._rng = random.Random() self._rng.seed(42) self._threshold = threshold self._maskScale = maskScale self._blurRadius = blurRadius def process(self, image): """ @param image -- The image to process. Returns a single image, or a list containing one or more images. """ BaseFilter.process(self, image) # If no background image, just return the input image as-is if self.bgPath is None: return image # --------------------------------------------------------------------------- # Open the background image(s) if we haven't done so already if self.bgImgs is None: # If given a relative path, make it relative to the vision data directory if not os.path.isabs(self.bgPath): basePath = os.path.abspath(os.curdir) basePath = os.path.split(basePath) while(basePath[0]): if basePath[1] == 'vision': break basePath = os.path.split(basePath[0]) # Did we find the vision directory? if basePath[1] == 'vision': fullPath = VisionUtils.findData(os.path.join(basePath[0], 'vision'), self.bgPath, 'backgound', 'background images', True) #basePath = os.path.join(basePath[0], 'vision', 'data') else: fullPath = self.bgPath else: fullPath = self.bgPath # If given a filename, we only have 1 image if os.path.isfile(fullPath): self.bgImgs = [Image.open(fullPath).convert('LA')] # Else, open up all images in this directory else: self.bgImgs = [] w = os.walk(fullPath) while True: try: dirpath, dirnames, filenames = w.next() except StopIteration: break # Don't enter directories that begin with '.' for d in dirnames[:]: if d.startswith('.'): dirnames.remove(d) dirnames.sort() # Ignore files that begin with '.' filenames = [f for f in filenames if not f.startswith('.')] filenames.sort() imageFilenames = [os.path.join(dirpath, f) for f in filenames] # Process each image for filename in imageFilenames: self.bgImgs.append(Image.open(filename).convert('L')) # Keep a cache of all images, scaled to the input image size self.scaledBGImgs = [x.copy() for x in self.bgImgs] # Pick a background at random. idx = self._rng.randint(0, len(self.bgImgs)-1) bgImg = self.scaledBGImgs[idx] # --------------------------------------------------------------------------- # re-scale the background to the source image if necessary if bgImg.size != image.size: bgImg = self.scaledBGImgs[idx] = self.bgImgs[idx].resize(image.size, Image.ANTIALIAS) # --------------------------------------------------------------------------- # Create the mask around the source image mask = image.split()[-1] if image.mode[-1] != 'A' or isSimpleBBox(mask): mask = createMask(image, threshold=self._threshold, fillHoles=True, backgroundColor=self.background, blurRadius=self._blurRadius, maskScale=self._maskScale) # --------------------------------------------------------------------------- # Paste the image onto the background newImage = bgImg.copy() newImage.paste(image, (0,0), mask) # Put an "all-on" alpha channel because we now want the network to consider the entire # image newImage.putalpha(ImageChops.constant(newImage, 255)) return newImage
agpl-3.0
crosswalk-project/chromium-crosswalk-efl
chrome/test/mini_installer/test_installer.py
27
14437
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This script tests the installer with test cases specified in the config file. For each test case, it checks that the machine states after the execution of each command match the expected machine states. For more details, take a look at the design documentation at http://goo.gl/Q0rGM6 """ import argparse import datetime import inspect import json import os import subprocess import sys import time import unittest import _winreg from variable_expander import VariableExpander import verifier_runner def LogMessage(message): """Logs a message to stderr. Args: message: The message string to be logged. """ now = datetime.datetime.now() frameinfo = inspect.getframeinfo(inspect.currentframe().f_back) filename = os.path.basename(frameinfo.filename) line = frameinfo.lineno sys.stderr.write('[%s:%s(%s)] %s\n' % (now.strftime('%m%d/%H%M%S'), filename, line, message)) class Config: """Describes the machine states, actions, and test cases. Attributes: states: A dictionary where each key is a state name and the associated value is a property dictionary describing that state. actions: A dictionary where each key is an action name and the associated value is the action's command. tests: An array of test cases. """ def __init__(self): self.states = {} self.actions = {} self.tests = [] class InstallerTest(unittest.TestCase): """Tests a test case in the config file.""" def __init__(self, name, test, config, variable_expander, quiet): """Constructor. Args: name: The name of this test. test: An array of alternating state names and action names, starting and ending with state names. config: The Config object. variable_expander: A VariableExpander object. """ super(InstallerTest, self).__init__() self._name = name self._test = test self._config = config self._variable_expander = variable_expander self._quiet = quiet self._verifier_runner = verifier_runner.VerifierRunner() self._clean_on_teardown = True def __str__(self): """Returns a string representing the test case. Returns: A string created by joining state names and action names together with ' -> ', for example, 'Test: clean -> install chrome -> chrome_installed'. """ return '%s: %s\n' % (self._name, ' -> '.join(self._test)) def id(self): """Returns the name of the test.""" # Overridden from unittest.TestCase so that id() contains the name of the # test case from the config file in place of the name of this class's test # function. return unittest.TestCase.id(self).replace(self._testMethodName, self._name) def runTest(self): """Run the test case.""" # |test| is an array of alternating state names and action names, starting # and ending with state names. Therefore, its length must be odd. self.assertEqual(1, len(self._test) % 2, 'The length of test array must be odd') state = self._test[0] self._VerifyState(state) # Starting at index 1, we loop through pairs of (action, state). for i in range(1, len(self._test), 2): action = self._test[i] if not self._quiet: LogMessage('Beginning action %s' % action) RunCommand(self._config.actions[action], self._variable_expander) if not self._quiet: LogMessage('Finished action %s' % action) state = self._test[i + 1] self._VerifyState(state) # If the test makes it here, it means it was successful, because RunCommand # and _VerifyState throw an exception on failure. self._clean_on_teardown = False def tearDown(self): """Cleans up the machine if the test case fails.""" if self._clean_on_teardown: RunCleanCommand(True, self._variable_expander) def shortDescription(self): """Overridden from unittest.TestCase. We return None as the short description to suppress its printing. The default implementation of this method returns the docstring of the runTest method, which is not useful since it's the same for every test case. The description from the __str__ method is informative enough. """ return None def _VerifyState(self, state): """Verifies that the current machine state matches a given state. Args: state: A state name. """ if not self._quiet: LogMessage('Verifying state %s' % state) try: self._verifier_runner.VerifyAll(self._config.states[state], self._variable_expander) except AssertionError as e: # If an AssertionError occurs, we intercept it and add the state name # to the error message so that we know where the test fails. raise AssertionError("In state '%s', %s" % (state, e)) def RunCommand(command, variable_expander): """Runs the given command from the current file's directory. This function throws an Exception if the command returns with non-zero exit status. Args: command: A command to run. It is expanded using Expand. variable_expander: A VariableExpander object. """ expanded_command = variable_expander.Expand(command) script_dir = os.path.dirname(os.path.abspath(__file__)) exit_status = subprocess.call(expanded_command, shell=True, cwd=script_dir) if exit_status != 0: raise Exception('Command %s returned non-zero exit status %s' % ( expanded_command, exit_status)) def DeleteGoogleUpdateRegistration(system_level, variable_expander): """Deletes Chrome's registration with Google Update. Args: system_level: True if system-level Chrome is to be deleted. variable_expander: A VariableExpander object. """ root = (_winreg.HKEY_LOCAL_MACHINE if system_level else _winreg.HKEY_CURRENT_USER) key_name = variable_expander.Expand('$CHROME_UPDATE_REGISTRY_SUBKEY') try: key_handle = _winreg.OpenKey(root, key_name, 0, _winreg.KEY_SET_VALUE | _winreg.KEY_WOW64_32KEY) _winreg.DeleteValue(key_handle, 'pv') except WindowsError: # The key isn't present, so there is no value to delete. pass def RunCleanCommand(force_clean, variable_expander): """Puts the machine in the clean state (i.e. Chrome not installed). Args: force_clean: A boolean indicating whether to force cleaning existing installations. variable_expander: A VariableExpander object. """ # TODO(sukolsak): Handle Chrome SxS installs. interactive_option = '--interactive' if not force_clean else '' for system_level in (False, True): level_option = '--system-level' if system_level else '' command = ('python uninstall_chrome.py ' '--chrome-long-name="$CHROME_LONG_NAME" ' '--no-error-if-absent %s %s' % (level_option, interactive_option)) RunCommand(command, variable_expander) if force_clean: DeleteGoogleUpdateRegistration(system_level, variable_expander) def MergePropertyDictionaries(current_property, new_property): """Merges the new property dictionary into the current property dictionary. This is different from general dictionary merging in that, in case there are keys with the same name, we merge values together in the first level, and we override earlier values in the second level. For more details, take a look at http://goo.gl/uE0RoR Args: current_property: The property dictionary to be modified. new_property: The new property dictionary. """ for key, value in new_property.iteritems(): if key not in current_property: current_property[key] = value else: assert(isinstance(current_property[key], dict) and isinstance(value, dict)) # This merges two dictionaries together. In case there are keys with # the same name, the latter will override the former. current_property[key] = dict( current_property[key].items() + value.items()) def ParsePropertyFiles(directory, filenames): """Parses an array of .prop files. Args: property_filenames: An array of Property filenames. directory: The directory where the Config file and all Property files reside in. Returns: A property dictionary created by merging all property dictionaries specified in the array. """ current_property = {} for filename in filenames: path = os.path.join(directory, filename) new_property = json.load(open(path)) MergePropertyDictionaries(current_property, new_property) return current_property def ParseConfigFile(filename): """Parses a .config file. Args: config_filename: A Config filename. Returns: A Config object. """ with open(filename, 'r') as fp: config_data = json.load(fp) directory = os.path.dirname(os.path.abspath(filename)) config = Config() config.tests = config_data['tests'] for state_name, state_property_filenames in config_data['states']: config.states[state_name] = ParsePropertyFiles(directory, state_property_filenames) for action_name, action_command in config_data['actions']: config.actions[action_name] = action_command return config def IsComponentBuild(mini_installer_path): """ Invokes the mini_installer asking whether it is a component build. Args: mini_installer_path: The path to mini_installer.exe. Returns: True if the mini_installer is a component build, False otherwise. """ query_command = [ mini_installer_path, '--query-component-build' ] exit_status = subprocess.call(query_command) return exit_status == 0 def main(): parser = argparse.ArgumentParser() parser.add_argument('--build-dir', default='out', help='Path to main build directory (the parent of the ' 'Release or Debug directory)') parser.add_argument('--target', default='Release', help='Build target (Release or Debug)') parser.add_argument('--force-clean', action='store_true', default=False, help='Force cleaning existing installations') parser.add_argument('-q', '--quiet', action='store_true', default=False, help='Reduce test runner output') parser.add_argument('--write-full-results-to', metavar='FILENAME', help='Path to write the list of full results to.') parser.add_argument('--config', metavar='FILENAME', help='Path to test configuration file') parser.add_argument('test', nargs='*', help='Name(s) of tests to run.') args = parser.parse_args() if not args.config: parser.error('missing mandatory --config FILENAME argument') mini_installer_path = os.path.join(args.build_dir, args.target, 'mini_installer.exe') assert os.path.exists(mini_installer_path), ('Could not find file %s' % mini_installer_path) suite = unittest.TestSuite() # Set the env var used by mini_installer.exe to decide to not show UI. os.environ['MINI_INSTALLER_TEST'] = '1' is_component_build = IsComponentBuild(mini_installer_path) if not is_component_build: config = ParseConfigFile(args.config) variable_expander = VariableExpander(mini_installer_path) RunCleanCommand(args.force_clean, variable_expander) for test in config.tests: # If tests were specified via |tests|, their names are formatted like so: test_name = '%s/%s/%s' % (InstallerTest.__module__, InstallerTest.__name__, test['name']) if not args.test or test_name in args.test: suite.addTest(InstallerTest(test['name'], test['traversal'], config, variable_expander, args.quiet)) verbosity = 2 if not args.quiet else 1 result = unittest.TextTestRunner(verbosity=verbosity).run(suite) if is_component_build: sys.stderr.write('Component build is currently unsupported by the ' 'mini_installer: http://crbug.com/377839\n') if args.write_full_results_to: with open(args.write_full_results_to, 'w') as fp: json.dump(_FullResults(suite, result, {}), fp, indent=2) fp.write('\n') return 0 if result.wasSuccessful() else 1 # TODO(dpranke): Find a way for this to be shared with the mojo and other tests. TEST_SEPARATOR = '.' def _FullResults(suite, result, metadata): """Convert the unittest results to the Chromium JSON test result format. This matches run-webkit-tests (the layout tests) and the flakiness dashboard. """ full_results = {} full_results['interrupted'] = False full_results['path_delimiter'] = TEST_SEPARATOR full_results['version'] = 3 full_results['seconds_since_epoch'] = time.time() for md in metadata: key, val = md.split('=', 1) full_results[key] = val all_test_names = _AllTestNames(suite) failed_test_names = _FailedTestNames(result) full_results['num_failures_by_type'] = { 'FAIL': len(failed_test_names), 'PASS': len(all_test_names) - len(failed_test_names), } full_results['tests'] = {} for test_name in all_test_names: value = {} value['expected'] = 'PASS' if test_name in failed_test_names: value['actual'] = 'FAIL' value['is_unexpected'] = True else: value['actual'] = 'PASS' _AddPathToTrie(full_results['tests'], test_name, value) return full_results def _AllTestNames(suite): test_names = [] # _tests is protected pylint: disable=W0212 for test in suite._tests: if isinstance(test, unittest.suite.TestSuite): test_names.extend(_AllTestNames(test)) else: test_names.append(test.id()) return test_names def _FailedTestNames(result): return set(test.id() for test, _ in result.failures + result.errors) def _AddPathToTrie(trie, path, value): if TEST_SEPARATOR not in path: trie[path] = value return directory, rest = path.split(TEST_SEPARATOR, 1) if directory not in trie: trie[directory] = {} _AddPathToTrie(trie[directory], rest, value) if __name__ == '__main__': sys.exit(main())
bsd-3-clause
aronsky/home-assistant
tests/components/auth/test_login_flow.py
13
3388
"""Tests for the login flow.""" from unittest.mock import patch from . import async_setup_auth from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI async def test_fetch_auth_providers(hass, aiohttp_client): """Test fetching auth providers.""" client = await async_setup_auth(hass, aiohttp_client) resp = await client.get('/auth/providers') assert resp.status == 200 assert await resp.json() == [{ 'name': 'Example', 'type': 'insecure_example', 'id': None }] async def test_fetch_auth_providers_onboarding(hass, aiohttp_client): """Test fetching auth providers.""" client = await async_setup_auth(hass, aiohttp_client) with patch('homeassistant.components.onboarding.async_is_onboarded', return_value=False): resp = await client.get('/auth/providers') assert resp.status == 400 assert await resp.json() == { 'message': 'Onboarding not finished', 'code': 'onboarding_required', } async def test_cannot_get_flows_in_progress(hass, aiohttp_client): """Test we cannot get flows in progress.""" client = await async_setup_auth(hass, aiohttp_client, []) resp = await client.get('/auth/login_flow') assert resp.status == 405 async def test_invalid_username_password(hass, aiohttp_client): """Test we cannot get flows in progress.""" client = await async_setup_auth(hass, aiohttp_client) resp = await client.post('/auth/login_flow', json={ 'client_id': CLIENT_ID, 'handler': ['insecure_example', None], 'redirect_uri': CLIENT_REDIRECT_URI }) assert resp.status == 200 step = await resp.json() # Incorrect username resp = await client.post( '/auth/login_flow/{}'.format(step['flow_id']), json={ 'client_id': CLIENT_ID, 'username': 'wrong-user', 'password': 'test-pass', }) assert resp.status == 200 step = await resp.json() assert step['step_id'] == 'init' assert step['errors']['base'] == 'invalid_auth' # Incorrect password resp = await client.post( '/auth/login_flow/{}'.format(step['flow_id']), json={ 'client_id': CLIENT_ID, 'username': 'test-user', 'password': 'wrong-pass', }) assert resp.status == 200 step = await resp.json() assert step['step_id'] == 'init' assert step['errors']['base'] == 'invalid_auth' async def test_login_exist_user(hass, aiohttp_client): """Test logging in with exist user.""" client = await async_setup_auth(hass, aiohttp_client, setup_api=True) cred = await hass.auth.auth_providers[0].async_get_or_create_credentials( {'username': 'test-user'}) await hass.auth.async_get_or_create_user(cred) resp = await client.post('/auth/login_flow', json={ 'client_id': CLIENT_ID, 'handler': ['insecure_example', None], 'redirect_uri': CLIENT_REDIRECT_URI, }) assert resp.status == 200 step = await resp.json() resp = await client.post( '/auth/login_flow/{}'.format(step['flow_id']), json={ 'client_id': CLIENT_ID, 'username': 'test-user', 'password': 'test-pass', }) assert resp.status == 200 step = await resp.json() assert step['type'] == 'create_entry' assert len(step['result']) > 1
apache-2.0
Endika/sale-workflow
sale_stock_global_delivery_lead_time/sale_stock.py
37
4849
# -*- coding: utf-8 -*- # # # Author: Alexandre Fayolle # Copyright 2013 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # from datetime import datetime from dateutil.relativedelta import relativedelta from openerp.osv import orm, fields from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT class sale_order(orm.Model): _inherit = 'sale.order' def _min_max_date_planned( self, cr, uid, ids, field_names, arg, context=None ): res = {} if not ids: return res order_line_obj = self.pool.get('sale.order.line') sale_infos = self.read(cr, uid, ids, ['delay', 'date_order'], context=context, load='_classic_write') line_ids = order_line_obj.search(cr, uid, [('order_id', 'in', ids)], context=context) line_delays = order_line_obj.read(cr, uid, line_ids, ['order_id', 'delay'], context=context, load='_classic_write') order_line_delays = {} # dict order_id: [line delays] for line_info in line_delays: order_line_delays.setdefault( line_info['order_id'], []).append(line_info['delay']) for sale_info in sale_infos: sale_id = sale_info['id'] res[sale_id] = {} start_date = datetime.strptime( self.date_to_datetime( cr, uid, sale_info['date_order'], context), DEFAULT_SERVER_DATETIME_FORMAT) min_delay = sale_info['delay'] + min( order_line_delays.get(sale_id, [0])) max_delay = sale_info['delay'] + max( order_line_delays.get(sale_id, [0])) min_date = start_date + relativedelta(days=min_delay) max_date = start_date + relativedelta(days=max_delay) for name in field_names: if name == 'min_date_planned': date = min_date elif name == 'max_date_planned': date = max_date else: continue res[sale_id][name] = date.strftime( DEFAULT_SERVER_DATETIME_FORMAT) return res _columns = { 'delay': fields.float('Delivery Lead Time', required=True, help="Number of days between the order " "confirmation and the shipping of the " "products " "to the customer. This lead time is added " "to the lead time of each line.", readonly=True, states={'draft': [('readonly', False)]}), 'min_date_planned': fields.function(_min_max_date_planned, type='date', string='Earliest date planned', method=True, multi='date_planned'), 'max_date_planned': fields.function(_min_max_date_planned, type='date', string='Latest date planned', method=True, multi='date_planned'), } _defaults = {'delay': 0, } def _get_date_planned( self, cr, uid, order, line, start_date, context=None ): date_planned = super( sale_order, self)._get_date_planned(cr, uid, order, line, start_date, context) date_planned = datetime.strptime(date_planned, DEFAULT_SERVER_DATETIME_FORMAT) date_planned += relativedelta(days=order.delay or 0.0) return date_planned.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
agpl-3.0
sarvex/tensorflow
tensorflow/python/ops/distributions/distribution.py
9
46676
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Base classes for probability distributions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import contextlib import types import numpy as np import six from tensorflow.python.eager import context from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import kullback_leibler from tensorflow.python.ops.distributions import util from tensorflow.python.util import deprecation from tensorflow.python.util import tf_inspect from tensorflow.python.util.tf_export import tf_export __all__ = [ "ReparameterizationType", "FULLY_REPARAMETERIZED", "NOT_REPARAMETERIZED", "Distribution", ] _DISTRIBUTION_PUBLIC_METHOD_WRAPPERS = [ "batch_shape", "batch_shape_tensor", "cdf", "covariance", "cross_entropy", "entropy", "event_shape", "event_shape_tensor", "kl_divergence", "log_cdf", "log_prob", "log_survival_function", "mean", "mode", "prob", "sample", "stddev", "survival_function", "variance", ] @six.add_metaclass(abc.ABCMeta) class _BaseDistribution(object): """Abstract base class needed for resolving subclass hierarchy.""" pass def _copy_fn(fn): """Create a deep copy of fn. Args: fn: a callable Returns: A `FunctionType`: a deep copy of fn. Raises: TypeError: if `fn` is not a callable. """ if not callable(fn): raise TypeError("fn is not callable: %s" % fn) # The blessed way to copy a function. copy.deepcopy fails to create a # non-reference copy. Since: # types.FunctionType == type(lambda: None), # and the docstring for the function type states: # # function(code, globals[, name[, argdefs[, closure]]]) # # Create a function object from a code object and a dictionary. # ... # # Here we can use this to create a new function with the old function's # code, globals, closure, etc. return types.FunctionType( code=fn.__code__, globals=fn.__globals__, name=fn.__name__, argdefs=fn.__defaults__, closure=fn.__closure__) def _update_docstring(old_str, append_str): """Update old_str by inserting append_str just before the "Args:" section.""" old_str = old_str or "" old_str_lines = old_str.split("\n") # Step 0: Prepend spaces to all lines of append_str. This is # necessary for correct markdown generation. append_str = "\n".join(" %s" % line for line in append_str.split("\n")) # Step 1: Find mention of "Args": has_args_ix = [ ix for ix, line in enumerate(old_str_lines) if line.strip().lower() == "args:"] if has_args_ix: final_args_ix = has_args_ix[-1] return ("\n".join(old_str_lines[:final_args_ix]) + "\n\n" + append_str + "\n\n" + "\n".join(old_str_lines[final_args_ix:])) else: return old_str + "\n\n" + append_str def _convert_to_tensor(value, name=None, preferred_dtype=None): """Converts to tensor avoiding an eager bug that loses float precision.""" # TODO(b/116672045): Remove this function. if (context.executing_eagerly() and preferred_dtype is not None and (preferred_dtype.is_integer or preferred_dtype.is_bool)): v = ops.convert_to_tensor(value, name=name) if v.dtype.is_floating: return v return ops.convert_to_tensor( value, name=name, preferred_dtype=preferred_dtype) class _DistributionMeta(abc.ABCMeta): def __new__(mcs, classname, baseclasses, attrs): """Control the creation of subclasses of the Distribution class. The main purpose of this method is to properly propagate docstrings from private Distribution methods, like `_log_prob`, into their public wrappers as inherited by the Distribution base class (e.g. `log_prob`). Args: classname: The name of the subclass being created. baseclasses: A tuple of parent classes. attrs: A dict mapping new attributes to their values. Returns: The class object. Raises: TypeError: If `Distribution` is not a subclass of `BaseDistribution`, or the new class is derived via multiple inheritance and the first parent class is not a subclass of `BaseDistribution`. AttributeError: If `Distribution` does not implement e.g. `log_prob`. ValueError: If a `Distribution` public method lacks a docstring. """ if not baseclasses: # Nothing to be done for Distribution raise TypeError("Expected non-empty baseclass. Does Distribution " "not subclass _BaseDistribution?") which_base = [ base for base in baseclasses if base == _BaseDistribution or issubclass(base, Distribution)] base = which_base[0] if base == _BaseDistribution: # Nothing to be done for Distribution return abc.ABCMeta.__new__(mcs, classname, baseclasses, attrs) if not issubclass(base, Distribution): raise TypeError("First parent class declared for %s must be " "Distribution, but saw '%s'" % (classname, base.__name__)) for attr in _DISTRIBUTION_PUBLIC_METHOD_WRAPPERS: special_attr = "_%s" % attr class_attr_value = attrs.get(attr, None) if attr in attrs: # The method is being overridden, do not update its docstring continue base_attr_value = getattr(base, attr, None) if not base_attr_value: raise AttributeError( "Internal error: expected base class '%s' to implement method '%s'" % (base.__name__, attr)) class_special_attr_value = attrs.get(special_attr, None) if class_special_attr_value is None: # No _special method available, no need to update the docstring. continue class_special_attr_docstring = tf_inspect.getdoc(class_special_attr_value) if not class_special_attr_docstring: # No docstring to append. continue class_attr_value = _copy_fn(base_attr_value) class_attr_docstring = tf_inspect.getdoc(base_attr_value) if class_attr_docstring is None: raise ValueError( "Expected base class fn to contain a docstring: %s.%s" % (base.__name__, attr)) class_attr_value.__doc__ = _update_docstring( class_attr_value.__doc__, ("Additional documentation from `%s`:\n\n%s" % (classname, class_special_attr_docstring))) attrs[attr] = class_attr_value return abc.ABCMeta.__new__(mcs, classname, baseclasses, attrs) @tf_export(v1=["distributions.ReparameterizationType"]) class ReparameterizationType(object): """Instances of this class represent how sampling is reparameterized. Two static instances exist in the distributions library, signifying one of two possible properties for samples from a distribution: `FULLY_REPARAMETERIZED`: Samples from the distribution are fully reparameterized, and straight-through gradients are supported. `NOT_REPARAMETERIZED`: Samples from the distribution are not fully reparameterized, and straight-through gradients are either partially unsupported or are not supported at all. In this case, for purposes of e.g. RL or variational inference, it is generally safest to wrap the sample results in a `stop_gradients` call and use policy gradients / surrogate loss instead. """ @deprecation.deprecated( "2019-01-01", "The TensorFlow Distributions library has moved to " "TensorFlow Probability " "(https://github.com/tensorflow/probability). You " "should update all references to use `tfp.distributions` " "instead of `tf.distributions`.", warn_once=True) def __init__(self, rep_type): self._rep_type = rep_type def __repr__(self): return "<Reparameterization Type: %s>" % self._rep_type def __eq__(self, other): """Determine if this `ReparameterizationType` is equal to another. Since ReparameterizationType instances are constant static global instances, equality checks if two instances' id() values are equal. Args: other: Object to compare against. Returns: `self is other`. """ return self is other # Fully reparameterized distribution: samples from a fully # reparameterized distribution support straight-through gradients with # respect to all parameters. FULLY_REPARAMETERIZED = ReparameterizationType("FULLY_REPARAMETERIZED") tf_export(v1=["distributions.FULLY_REPARAMETERIZED"]).export_constant( __name__, "FULLY_REPARAMETERIZED") # Not reparameterized distribution: samples from a non- # reparameterized distribution do not support straight-through gradients for # at least some of the parameters. NOT_REPARAMETERIZED = ReparameterizationType("NOT_REPARAMETERIZED") tf_export(v1=["distributions.NOT_REPARAMETERIZED"]).export_constant( __name__, "NOT_REPARAMETERIZED") @six.add_metaclass(_DistributionMeta) @tf_export(v1=["distributions.Distribution"]) class Distribution(_BaseDistribution): """A generic probability distribution base class. `Distribution` is a base class for constructing and organizing properties (e.g., mean, variance) of random variables (e.g, Bernoulli, Gaussian). #### Subclassing Subclasses are expected to implement a leading-underscore version of the same-named function. The argument signature should be identical except for the omission of `name="..."`. For example, to enable `log_prob(value, name="log_prob")` a subclass should implement `_log_prob(value)`. Subclasses can append to public-level docstrings by providing docstrings for their method specializations. For example: ```python @util.AppendDocstring("Some other details.") def _log_prob(self, value): ... ``` would add the string "Some other details." to the `log_prob` function docstring. This is implemented as a simple decorator to avoid python linter complaining about missing Args/Returns/Raises sections in the partial docstrings. #### Broadcasting, batching, and shapes All distributions support batches of independent distributions of that type. The batch shape is determined by broadcasting together the parameters. The shape of arguments to `__init__`, `cdf`, `log_cdf`, `prob`, and `log_prob` reflect this broadcasting, as does the return value of `sample` and `sample_n`. `sample_n_shape = [n] + batch_shape + event_shape`, where `sample_n_shape` is the shape of the `Tensor` returned from `sample_n`, `n` is the number of samples, `batch_shape` defines how many independent distributions there are, and `event_shape` defines the shape of samples from each of those independent distributions. Samples are independent along the `batch_shape` dimensions, but not necessarily so along the `event_shape` dimensions (depending on the particulars of the underlying distribution). Using the `Uniform` distribution as an example: ```python minval = 3.0 maxval = [[4.0, 6.0], [10.0, 12.0]] # Broadcasting: # This instance represents 4 Uniform distributions. Each has a lower bound at # 3.0 as the `minval` parameter was broadcasted to match `maxval`'s shape. u = Uniform(minval, maxval) # `event_shape` is `TensorShape([])`. event_shape = u.event_shape # `event_shape_t` is a `Tensor` which will evaluate to []. event_shape_t = u.event_shape_tensor() # Sampling returns a sample per distribution. `samples` has shape # [5, 2, 2], which is [n] + batch_shape + event_shape, where n=5, # batch_shape=[2, 2], and event_shape=[]. samples = u.sample_n(5) # The broadcasting holds across methods. Here we use `cdf` as an example. The # same holds for `log_cdf` and the likelihood functions. # `cum_prob` has shape [2, 2] as the `value` argument was broadcasted to the # shape of the `Uniform` instance. cum_prob_broadcast = u.cdf(4.0) # `cum_prob`'s shape is [2, 2], one per distribution. No broadcasting # occurred. cum_prob_per_dist = u.cdf([[4.0, 5.0], [6.0, 7.0]]) # INVALID as the `value` argument is not broadcastable to the distribution's # shape. cum_prob_invalid = u.cdf([4.0, 5.0, 6.0]) ``` #### Shapes There are three important concepts associated with TensorFlow Distributions shapes: - Event shape describes the shape of a single draw from the distribution; it may be dependent across dimensions. For scalar distributions, the event shape is `[]`. For a 5-dimensional MultivariateNormal, the event shape is `[5]`. - Batch shape describes independent, not identically distributed draws, aka a "collection" or "bunch" of distributions. - Sample shape describes independent, identically distributed draws of batches from the distribution family. The event shape and the batch shape are properties of a Distribution object, whereas the sample shape is associated with a specific call to `sample` or `log_prob`. For detailed usage examples of TensorFlow Distributions shapes, see [this tutorial]( https://github.com/tensorflow/probability/blob/master/tensorflow_probability/examples/jupyter_notebooks/Understanding_TensorFlow_Distributions_Shapes.ipynb) #### Parameter values leading to undefined statistics or distributions. Some distributions do not have well-defined statistics for all initialization parameter values. For example, the beta distribution is parameterized by positive real numbers `concentration1` and `concentration0`, and does not have well-defined mode if `concentration1 < 1` or `concentration0 < 1`. The user is given the option of raising an exception or returning `NaN`. ```python a = tf.exp(tf.matmul(logits, weights_a)) b = tf.exp(tf.matmul(logits, weights_b)) # Will raise exception if ANY batch member has a < 1 or b < 1. dist = distributions.beta(a, b, allow_nan_stats=False) mode = dist.mode().eval() # Will return NaN for batch members with either a < 1 or b < 1. dist = distributions.beta(a, b, allow_nan_stats=True) # Default behavior mode = dist.mode().eval() ``` In all cases, an exception is raised if *invalid* parameters are passed, e.g. ```python # Will raise an exception if any Op is run. negative_a = -1.0 * a # beta distribution by definition has a > 0. dist = distributions.beta(negative_a, b, allow_nan_stats=True) dist.mean().eval() ``` """ @deprecation.deprecated( "2019-01-01", "The TensorFlow Distributions library has moved to " "TensorFlow Probability " "(https://github.com/tensorflow/probability). You " "should update all references to use `tfp.distributions` " "instead of `tf.distributions`.", warn_once=True) def __init__(self, dtype, reparameterization_type, validate_args, allow_nan_stats, parameters=None, graph_parents=None, name=None): """Constructs the `Distribution`. **This is a private method for subclass use.** Args: dtype: The type of the event samples. `None` implies no type-enforcement. reparameterization_type: Instance of `ReparameterizationType`. If `distributions.FULLY_REPARAMETERIZED`, this `Distribution` can be reparameterized in terms of some standard distribution with a function whose Jacobian is constant for the support of the standard distribution. If `distributions.NOT_REPARAMETERIZED`, then no such reparameterization is available. validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. parameters: Python `dict` of parameters used to instantiate this `Distribution`. graph_parents: Python `list` of graph prerequisites of this `Distribution`. name: Python `str` name prefixed to Ops created by this class. Default: subclass name. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not tensor_util.is_tf_type(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) if not name or name[-1] != "/": # `name` is not a name scope non_unique_name = name or type(self).__name__ with ops.name_scope(non_unique_name) as name: pass self._dtype = dtype self._reparameterization_type = reparameterization_type self._allow_nan_stats = allow_nan_stats self._validate_args = validate_args self._parameters = parameters or {} self._graph_parents = graph_parents self._name = name @property def _parameters(self): return self._parameter_dict @_parameters.setter def _parameters(self, value): """Intercept assignments to self._parameters to avoid reference cycles. Parameters are often created using locals(), so we need to clean out any references to `self` before assigning it to an attribute. Args: value: A dictionary of parameters to assign to the `_parameters` property. """ if "self" in value: del value["self"] self._parameter_dict = value @classmethod def param_shapes(cls, sample_shape, name="DistributionParamShapes"): """Shapes of parameters given the desired shape of a call to `sample()`. This is a class method that describes what key/value arguments are required to instantiate the given `Distribution` so that a particular shape is returned for that instance's call to `sample()`. Subclasses should override class method `_param_shapes`. Args: sample_shape: `Tensor` or python list/tuple. Desired shape of a call to `sample()`. name: name to prepend ops with. Returns: `dict` of parameter name to `Tensor` shapes. """ with ops.name_scope(name, values=[sample_shape]): return cls._param_shapes(sample_shape) @classmethod def param_static_shapes(cls, sample_shape): """param_shapes with static (i.e. `TensorShape`) shapes. This is a class method that describes what key/value arguments are required to instantiate the given `Distribution` so that a particular shape is returned for that instance's call to `sample()`. Assumes that the sample's shape is known statically. Subclasses should override class method `_param_shapes` to return constant-valued tensors when constant values are fed. Args: sample_shape: `TensorShape` or python list/tuple. Desired shape of a call to `sample()`. Returns: `dict` of parameter name to `TensorShape`. Raises: ValueError: if `sample_shape` is a `TensorShape` and is not fully defined. """ if isinstance(sample_shape, tensor_shape.TensorShape): if not sample_shape.is_fully_defined(): raise ValueError("TensorShape sample_shape must be fully defined") sample_shape = sample_shape.as_list() params = cls.param_shapes(sample_shape) static_params = {} for name, shape in params.items(): static_shape = tensor_util.constant_value(shape) if static_shape is None: raise ValueError( "sample_shape must be a fully-defined TensorShape or list/tuple") static_params[name] = tensor_shape.TensorShape(static_shape) return static_params @staticmethod def _param_shapes(sample_shape): raise NotImplementedError("_param_shapes not implemented") @property def name(self): """Name prepended to all ops created by this `Distribution`.""" return self._name @property def dtype(self): """The `DType` of `Tensor`s handled by this `Distribution`.""" return self._dtype @property def parameters(self): """Dictionary of parameters used to instantiate this `Distribution`.""" # Remove "self", "__class__", or other special variables. These can appear # if the subclass used: # `parameters = dict(locals())`. return {k: v for k, v in self._parameters.items() if not k.startswith("__") and k != "self"} @property def reparameterization_type(self): """Describes how samples from the distribution are reparameterized. Currently this is one of the static instances `distributions.FULLY_REPARAMETERIZED` or `distributions.NOT_REPARAMETERIZED`. Returns: An instance of `ReparameterizationType`. """ return self._reparameterization_type @property def allow_nan_stats(self): """Python `bool` describing behavior when a stat is undefined. Stats return +/- infinity when it makes sense. E.g., the variance of a Cauchy distribution is infinity. However, sometimes the statistic is undefined, e.g., if a distribution's pdf does not achieve a maximum within the support of the distribution, the mode is undefined. If the mean is undefined, then by definition the variance is undefined. E.g. the mean for Student's T for df = 1 is undefined (no clear way to say it is either + or - infinity), so the variance = E[(X - mean)**2] is also undefined. Returns: allow_nan_stats: Python `bool`. """ return self._allow_nan_stats @property def validate_args(self): """Python `bool` indicating possibly expensive checks are enabled.""" return self._validate_args def copy(self, **override_parameters_kwargs): """Creates a deep copy of the distribution. Note: the copy distribution may continue to depend on the original initialization arguments. Args: **override_parameters_kwargs: String/value dictionary of initialization arguments to override with new values. Returns: distribution: A new instance of `type(self)` initialized from the union of self.parameters and override_parameters_kwargs, i.e., `dict(self.parameters, **override_parameters_kwargs)`. """ parameters = dict(self.parameters, **override_parameters_kwargs) return type(self)(**parameters) def _batch_shape_tensor(self): raise NotImplementedError( "batch_shape_tensor is not implemented: {}".format(type(self).__name__)) def batch_shape_tensor(self, name="batch_shape_tensor"): """Shape of a single sample from a single event index as a 1-D `Tensor`. The batch dimensions are indexes into independent, non-identical parameterizations of this distribution. Args: name: name to give to the op Returns: batch_shape: `Tensor`. """ with self._name_scope(name): if self.batch_shape.is_fully_defined(): return ops.convert_to_tensor(self.batch_shape.as_list(), dtype=dtypes.int32, name="batch_shape") return self._batch_shape_tensor() def _batch_shape(self): return tensor_shape.TensorShape(None) @property def batch_shape(self): """Shape of a single sample from a single event index as a `TensorShape`. May be partially defined or unknown. The batch dimensions are indexes into independent, non-identical parameterizations of this distribution. Returns: batch_shape: `TensorShape`, possibly unknown. """ return tensor_shape.as_shape(self._batch_shape()) def _event_shape_tensor(self): raise NotImplementedError( "event_shape_tensor is not implemented: {}".format(type(self).__name__)) def event_shape_tensor(self, name="event_shape_tensor"): """Shape of a single sample from a single batch as a 1-D int32 `Tensor`. Args: name: name to give to the op Returns: event_shape: `Tensor`. """ with self._name_scope(name): if self.event_shape.is_fully_defined(): return ops.convert_to_tensor(self.event_shape.as_list(), dtype=dtypes.int32, name="event_shape") return self._event_shape_tensor() def _event_shape(self): return tensor_shape.TensorShape(None) @property def event_shape(self): """Shape of a single sample from a single batch as a `TensorShape`. May be partially defined or unknown. Returns: event_shape: `TensorShape`, possibly unknown. """ return tensor_shape.as_shape(self._event_shape()) def is_scalar_event(self, name="is_scalar_event"): """Indicates that `event_shape == []`. Args: name: Python `str` prepended to names of ops created by this function. Returns: is_scalar_event: `bool` scalar `Tensor`. """ with self._name_scope(name): return ops.convert_to_tensor( self._is_scalar_helper(self.event_shape, self.event_shape_tensor), name="is_scalar_event") def is_scalar_batch(self, name="is_scalar_batch"): """Indicates that `batch_shape == []`. Args: name: Python `str` prepended to names of ops created by this function. Returns: is_scalar_batch: `bool` scalar `Tensor`. """ with self._name_scope(name): return ops.convert_to_tensor( self._is_scalar_helper(self.batch_shape, self.batch_shape_tensor), name="is_scalar_batch") def _sample_n(self, n, seed=None): raise NotImplementedError("sample_n is not implemented: {}".format( type(self).__name__)) def _call_sample_n(self, sample_shape, seed, name, **kwargs): with self._name_scope(name, values=[sample_shape]): sample_shape = ops.convert_to_tensor( sample_shape, dtype=dtypes.int32, name="sample_shape") sample_shape, n = self._expand_sample_shape_to_vector( sample_shape, "sample_shape") samples = self._sample_n(n, seed, **kwargs) batch_event_shape = array_ops.shape(samples)[1:] final_shape = array_ops.concat([sample_shape, batch_event_shape], 0) samples = array_ops.reshape(samples, final_shape) samples = self._set_sample_static_shape(samples, sample_shape) return samples def sample(self, sample_shape=(), seed=None, name="sample"): """Generate samples of the specified shape. Note that a call to `sample()` without arguments will generate a single sample. Args: sample_shape: 0D or 1D `int32` `Tensor`. Shape of the generated samples. seed: Python integer seed for RNG name: name to give to the op. Returns: samples: a `Tensor` with prepended dimensions `sample_shape`. """ return self._call_sample_n(sample_shape, seed, name) def _log_prob(self, value): raise NotImplementedError("log_prob is not implemented: {}".format( type(self).__name__)) def _call_log_prob(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._log_prob(value, **kwargs) except NotImplementedError as original_exception: try: return math_ops.log(self._prob(value, **kwargs)) except NotImplementedError: raise original_exception def log_prob(self, value, name="log_prob"): """Log probability density/mass function. Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: log_prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_log_prob(value, name) def _prob(self, value): raise NotImplementedError("prob is not implemented: {}".format( type(self).__name__)) def _call_prob(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._prob(value, **kwargs) except NotImplementedError as original_exception: try: return math_ops.exp(self._log_prob(value, **kwargs)) except NotImplementedError: raise original_exception def prob(self, value, name="prob"): """Probability density/mass function. Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_prob(value, name) def _log_cdf(self, value): raise NotImplementedError("log_cdf is not implemented: {}".format( type(self).__name__)) def _call_log_cdf(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._log_cdf(value, **kwargs) except NotImplementedError as original_exception: try: return math_ops.log(self._cdf(value, **kwargs)) except NotImplementedError: raise original_exception def log_cdf(self, value, name="log_cdf"): """Log cumulative distribution function. Given random variable `X`, the cumulative distribution function `cdf` is: ```none log_cdf(x) := Log[ P[X <= x] ] ``` Often, a numerical approximation can be used for `log_cdf(x)` that yields a more accurate answer than simply taking the logarithm of the `cdf` when `x << -1`. Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: logcdf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_log_cdf(value, name) def _cdf(self, value): raise NotImplementedError("cdf is not implemented: {}".format( type(self).__name__)) def _call_cdf(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._cdf(value, **kwargs) except NotImplementedError as original_exception: try: return math_ops.exp(self._log_cdf(value, **kwargs)) except NotImplementedError: raise original_exception def cdf(self, value, name="cdf"): """Cumulative distribution function. Given random variable `X`, the cumulative distribution function `cdf` is: ```none cdf(x) := P[X <= x] ``` Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: cdf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_cdf(value, name) def _log_survival_function(self, value): raise NotImplementedError( "log_survival_function is not implemented: {}".format( type(self).__name__)) def _call_log_survival_function(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._log_survival_function(value, **kwargs) except NotImplementedError as original_exception: try: return math_ops.log1p(-self.cdf(value, **kwargs)) except NotImplementedError: raise original_exception def log_survival_function(self, value, name="log_survival_function"): """Log survival function. Given random variable `X`, the survival function is defined: ```none log_survival_function(x) = Log[ P[X > x] ] = Log[ 1 - P[X <= x] ] = Log[ 1 - cdf(x) ] ``` Typically, different numerical approximations can be used for the log survival function, which are more accurate than `1 - cdf(x)` when `x >> 1`. Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_log_survival_function(value, name) def _survival_function(self, value): raise NotImplementedError("survival_function is not implemented: {}".format( type(self).__name__)) def _call_survival_function(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) try: return self._survival_function(value, **kwargs) except NotImplementedError as original_exception: try: return 1. - self.cdf(value, **kwargs) except NotImplementedError: raise original_exception def survival_function(self, value, name="survival_function"): """Survival function. Given random variable `X`, the survival function is defined: ```none survival_function(x) = P[X > x] = 1 - P[X <= x] = 1 - cdf(x). ``` Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_survival_function(value, name) def _entropy(self): raise NotImplementedError("entropy is not implemented: {}".format( type(self).__name__)) def entropy(self, name="entropy"): """Shannon entropy in nats.""" with self._name_scope(name): return self._entropy() def _mean(self): raise NotImplementedError("mean is not implemented: {}".format( type(self).__name__)) def mean(self, name="mean"): """Mean.""" with self._name_scope(name): return self._mean() def _quantile(self, value): raise NotImplementedError("quantile is not implemented: {}".format( type(self).__name__)) def _call_quantile(self, value, name, **kwargs): with self._name_scope(name, values=[value]): value = _convert_to_tensor( value, name="value", preferred_dtype=self.dtype) return self._quantile(value, **kwargs) def quantile(self, value, name="quantile"): """Quantile function. Aka "inverse cdf" or "percent point function". Given random variable `X` and `p in [0, 1]`, the `quantile` is: ```none quantile(p) := x such that P[X <= x] == p ``` Args: value: `float` or `double` `Tensor`. name: Python `str` prepended to names of ops created by this function. Returns: quantile: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type `self.dtype`. """ return self._call_quantile(value, name) def _variance(self): raise NotImplementedError("variance is not implemented: {}".format( type(self).__name__)) def variance(self, name="variance"): """Variance. Variance is defined as, ```none Var = E[(X - E[X])**2] ``` where `X` is the random variable associated with this distribution, `E` denotes expectation, and `Var.shape = batch_shape + event_shape`. Args: name: Python `str` prepended to names of ops created by this function. Returns: variance: Floating-point `Tensor` with shape identical to `batch_shape + event_shape`, i.e., the same shape as `self.mean()`. """ with self._name_scope(name): try: return self._variance() except NotImplementedError as original_exception: try: return math_ops.square(self._stddev()) except NotImplementedError: raise original_exception def _stddev(self): raise NotImplementedError("stddev is not implemented: {}".format( type(self).__name__)) def stddev(self, name="stddev"): """Standard deviation. Standard deviation is defined as, ```none stddev = E[(X - E[X])**2]**0.5 ``` where `X` is the random variable associated with this distribution, `E` denotes expectation, and `stddev.shape = batch_shape + event_shape`. Args: name: Python `str` prepended to names of ops created by this function. Returns: stddev: Floating-point `Tensor` with shape identical to `batch_shape + event_shape`, i.e., the same shape as `self.mean()`. """ with self._name_scope(name): try: return self._stddev() except NotImplementedError as original_exception: try: return math_ops.sqrt(self._variance()) except NotImplementedError: raise original_exception def _covariance(self): raise NotImplementedError("covariance is not implemented: {}".format( type(self).__name__)) def covariance(self, name="covariance"): """Covariance. Covariance is (possibly) defined only for non-scalar-event distributions. For example, for a length-`k`, vector-valued distribution, it is calculated as, ```none Cov[i, j] = Covariance(X_i, X_j) = E[(X_i - E[X_i]) (X_j - E[X_j])] ``` where `Cov` is a (batch of) `k x k` matrix, `0 <= (i, j) < k`, and `E` denotes expectation. Alternatively, for non-vector, multivariate distributions (e.g., matrix-valued, Wishart), `Covariance` shall return a (batch of) matrices under some vectorization of the events, i.e., ```none Cov[i, j] = Covariance(Vec(X)_i, Vec(X)_j) = [as above] ``` where `Cov` is a (batch of) `k' x k'` matrices, `0 <= (i, j) < k' = reduce_prod(event_shape)`, and `Vec` is some function mapping indices of this distribution's event dimensions to indices of a length-`k'` vector. Args: name: Python `str` prepended to names of ops created by this function. Returns: covariance: Floating-point `Tensor` with shape `[B1, ..., Bn, k', k']` where the first `n` dimensions are batch coordinates and `k' = reduce_prod(self.event_shape)`. """ with self._name_scope(name): return self._covariance() def _mode(self): raise NotImplementedError("mode is not implemented: {}".format( type(self).__name__)) def mode(self, name="mode"): """Mode.""" with self._name_scope(name): return self._mode() def _cross_entropy(self, other): return kullback_leibler.cross_entropy( self, other, allow_nan_stats=self.allow_nan_stats) def cross_entropy(self, other, name="cross_entropy"): """Computes the (Shannon) cross entropy. Denote this distribution (`self`) by `P` and the `other` distribution by `Q`. Assuming `P, Q` are absolutely continuous with respect to one another and permit densities `p(x) dr(x)` and `q(x) dr(x)`, (Shanon) cross entropy is defined as: ```none H[P, Q] = E_p[-log q(X)] = -int_F p(x) log q(x) dr(x) ``` where `F` denotes the support of the random variable `X ~ P`. Args: other: `tfp.distributions.Distribution` instance. name: Python `str` prepended to names of ops created by this function. Returns: cross_entropy: `self.dtype` `Tensor` with shape `[B1, ..., Bn]` representing `n` different calculations of (Shanon) cross entropy. """ with self._name_scope(name): return self._cross_entropy(other) def _kl_divergence(self, other): return kullback_leibler.kl_divergence( self, other, allow_nan_stats=self.allow_nan_stats) def kl_divergence(self, other, name="kl_divergence"): """Computes the Kullback--Leibler divergence. Denote this distribution (`self`) by `p` and the `other` distribution by `q`. Assuming `p, q` are absolutely continuous with respect to reference measure `r`, the KL divergence is defined as: ```none KL[p, q] = E_p[log(p(X)/q(X))] = -int_F p(x) log q(x) dr(x) + int_F p(x) log p(x) dr(x) = H[p, q] - H[p] ``` where `F` denotes the support of the random variable `X ~ p`, `H[., .]` denotes (Shanon) cross entropy, and `H[.]` denotes (Shanon) entropy. Args: other: `tfp.distributions.Distribution` instance. name: Python `str` prepended to names of ops created by this function. Returns: kl_divergence: `self.dtype` `Tensor` with shape `[B1, ..., Bn]` representing `n` different calculations of the Kullback-Leibler divergence. """ with self._name_scope(name): return self._kl_divergence(other) def __str__(self): return ("tfp.distributions.{type_name}(" "\"{self_name}\"" "{maybe_batch_shape}" "{maybe_event_shape}" ", dtype={dtype})".format( type_name=type(self).__name__, self_name=self.name, maybe_batch_shape=(", batch_shape={}".format(self.batch_shape) if self.batch_shape.ndims is not None else ""), maybe_event_shape=(", event_shape={}".format(self.event_shape) if self.event_shape.ndims is not None else ""), dtype=self.dtype.name)) def __repr__(self): return ("<tfp.distributions.{type_name} " "'{self_name}'" " batch_shape={batch_shape}" " event_shape={event_shape}" " dtype={dtype}>".format( type_name=type(self).__name__, self_name=self.name, batch_shape=self.batch_shape, event_shape=self.event_shape, dtype=self.dtype.name)) @contextlib.contextmanager def _name_scope(self, name=None, values=None): """Helper function to standardize op scope.""" with ops.name_scope(self.name): with ops.name_scope(name, values=( ([] if values is None else values) + self._graph_parents)) as scope: yield scope def _expand_sample_shape_to_vector(self, x, name): """Helper to `sample` which ensures input is 1D.""" x_static_val = tensor_util.constant_value(x) if x_static_val is None: prod = math_ops.reduce_prod(x) else: prod = np.prod(x_static_val, dtype=x.dtype.as_numpy_dtype()) ndims = x.get_shape().ndims # != sample_ndims if ndims is None: # Maybe expand_dims. ndims = array_ops.rank(x) expanded_shape = util.pick_vector( math_ops.equal(ndims, 0), np.array([1], dtype=np.int32), array_ops.shape(x)) x = array_ops.reshape(x, expanded_shape) elif ndims == 0: # Definitely expand_dims. if x_static_val is not None: x = ops.convert_to_tensor( np.array([x_static_val], dtype=x.dtype.as_numpy_dtype()), name=name) else: x = array_ops.reshape(x, [1]) elif ndims != 1: raise ValueError("Input is neither scalar nor vector.") return x, prod def _set_sample_static_shape(self, x, sample_shape): """Helper to `sample`; sets static shape info.""" # Set shape hints. sample_shape = tensor_shape.TensorShape( tensor_util.constant_value(sample_shape)) ndims = x.get_shape().ndims sample_ndims = sample_shape.ndims batch_ndims = self.batch_shape.ndims event_ndims = self.event_shape.ndims # Infer rank(x). if (ndims is None and sample_ndims is not None and batch_ndims is not None and event_ndims is not None): ndims = sample_ndims + batch_ndims + event_ndims x.set_shape([None] * ndims) # Infer sample shape. if ndims is not None and sample_ndims is not None: shape = sample_shape.concatenate([None]*(ndims - sample_ndims)) x.set_shape(x.get_shape().merge_with(shape)) # Infer event shape. if ndims is not None and event_ndims is not None: shape = tensor_shape.TensorShape( [None]*(ndims - event_ndims)).concatenate(self.event_shape) x.set_shape(x.get_shape().merge_with(shape)) # Infer batch shape. if batch_ndims is not None: if ndims is not None: if sample_ndims is None and event_ndims is not None: sample_ndims = ndims - batch_ndims - event_ndims elif event_ndims is None and sample_ndims is not None: event_ndims = ndims - batch_ndims - sample_ndims if sample_ndims is not None and event_ndims is not None: shape = tensor_shape.TensorShape([None]*sample_ndims).concatenate( self.batch_shape).concatenate([None]*event_ndims) x.set_shape(x.get_shape().merge_with(shape)) return x def _is_scalar_helper(self, static_shape, dynamic_shape_fn): """Implementation for `is_scalar_batch` and `is_scalar_event`.""" if static_shape.ndims is not None: return static_shape.ndims == 0 shape = dynamic_shape_fn() if (shape.get_shape().ndims is not None and shape.get_shape().dims[0].value is not None): # If the static_shape is correctly written then we should never execute # this branch. We keep it just in case there's some unimagined corner # case. return shape.get_shape().as_list() == [0] return math_ops.equal(array_ops.shape(shape)[0], 0)
apache-2.0
Jorge-Rodriguez/ansible
test/units/modules/network/f5/test_bigip_firewall_dos_profile.py
21
3389
# -*- coding: utf-8 -*- # # Copyright: (c) 2018, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_firewall_dos_profile import ApiParameters from library.modules.bigip_firewall_dos_profile import ModuleParameters from library.modules.bigip_firewall_dos_profile import ModuleManager from library.modules.bigip_firewall_dos_profile import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_firewall_dos_profile import ApiParameters from ansible.modules.network.f5.bigip_firewall_dos_profile import ModuleParameters from ansible.modules.network.f5.bigip_firewall_dos_profile import ModuleManager from ansible.modules.network.f5.bigip_firewall_dos_profile import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', description='my description', threshold_sensitivity='low', default_whitelist='whitelist1' ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.description == 'my description' assert p.threshold_sensitivity == 'low' assert p.default_whitelist == '/Common/whitelist1' class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create(self, *args): set_module_args(dict( name='foo', description='this is a description', threshold_sensitivity='low', default_whitelist='whitelist1', password='password', server='localhost', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) mm = ModuleManager(module=module) # Override methods to force specific logic in the module to happen mm.exists = Mock(side_effect=[False, True]) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True assert results['description'] == 'this is a description'
gpl-3.0
skidzo/sympy
sympy/concrete/expr_with_limits.py
9
15677
from __future__ import print_function, division from sympy.core.add import Add from sympy.core.expr import Expr from sympy.core.mul import Mul from sympy.core.relational import Equality from sympy.sets.sets import Interval from sympy.core.singleton import S from sympy.core.symbol import Symbol from sympy.core.sympify import sympify from sympy.core.compatibility import is_sequence, range from sympy.core.containers import Tuple from sympy.functions.elementary.piecewise import piecewise_fold from sympy.utilities import flatten from sympy.utilities.iterables import sift from sympy.matrices import Matrix from sympy.tensor.indexed import Idx def _process_limits(*symbols): """Process the list of symbols and convert them to canonical limits, storing them as Tuple(symbol, lower, upper). The orientation of the function is also returned when the upper limit is missing so (x, 1, None) becomes (x, None, 1) and the orientation is changed. """ limits = [] orientation = 1 for V in symbols: if isinstance(V, Symbol) or getattr(V, '_diff_wrt', False): if isinstance(V, Idx): if V.lower is None or V.upper is None: limits.append(Tuple(V)) else: limits.append(Tuple(V, V.lower, V.upper)) else: limits.append(Tuple(V)) continue elif is_sequence(V, Tuple): V = sympify(flatten(V)) if isinstance(V[0], (Symbol, Idx)) or getattr(V[0], '_diff_wrt', False): newsymbol = V[0] if len(V) == 2 and isinstance(V[1], Interval): V[1:] = [V[1].start, V[1].end] if len(V) == 3: if V[1] is None and V[2] is not None: nlim = [V[2]] elif V[1] is not None and V[2] is None: orientation *= -1 nlim = [V[1]] elif V[1] is None and V[2] is None: nlim = [] else: nlim = V[1:] limits.append(Tuple(newsymbol, *nlim)) if isinstance(V[0], Idx): if V[0].lower is not None and not bool(nlim[0] >= V[0].lower): raise ValueError("Summation exceeds Idx lower range.") if V[0].upper is not None and not bool(nlim[1] <= V[0].upper): raise ValueError("Summation exceeds Idx upper range.") continue elif len(V) == 1 or (len(V) == 2 and V[1] is None): limits.append(Tuple(newsymbol)) continue elif len(V) == 2: limits.append(Tuple(newsymbol, V[1])) continue raise ValueError('Invalid limits given: %s' % str(symbols)) return limits, orientation class ExprWithLimits(Expr): __slots__ = ['is_commutative'] def __new__(cls, function, *symbols, **assumptions): # Any embedded piecewise functions need to be brought out to the # top level so that integration can go into piecewise mode at the # earliest possible moment. function = sympify(function) if hasattr(function, 'func') and function.func is Equality: lhs = function.lhs rhs = function.rhs return Equality(cls(lhs, *symbols, **assumptions), \ cls(rhs, *symbols, **assumptions)) function = piecewise_fold(function) if function is S.NaN: return S.NaN if symbols: limits, orientation = _process_limits(*symbols) else: # symbol not provided -- we can still try to compute a general form free = function.free_symbols if len(free) != 1: raise ValueError( "specify dummy variables for %s" % function) limits, orientation = [Tuple(s) for s in free], 1 # denest any nested calls while cls == type(function): limits = list(function.limits) + limits function = function.function # Only limits with lower and upper bounds are supported; the indefinite form # is not supported if any(len(l) != 3 or None in l for l in limits): raise ValueError('ExprWithLimits requires values for lower and upper bounds.') obj = Expr.__new__(cls, **assumptions) arglist = [function] arglist.extend(limits) obj._args = tuple(arglist) obj.is_commutative = function.is_commutative # limits already checked return obj @property def function(self): """Return the function applied across limits. Examples ======== >>> from sympy import Integral >>> from sympy.abc import x >>> Integral(x**2, (x,)).function x**2 See Also ======== limits, variables, free_symbols """ return self._args[0] @property def limits(self): """Return the limits of expression. Examples ======== >>> from sympy import Integral >>> from sympy.abc import x, i >>> Integral(x**i, (i, 1, 3)).limits ((i, 1, 3),) See Also ======== function, variables, free_symbols """ return self._args[1:] @property def variables(self): """Return a list of the dummy variables >>> from sympy import Sum >>> from sympy.abc import x, i >>> Sum(x**i, (i, 1, 3)).variables [i] See Also ======== function, limits, free_symbols as_dummy : Rename dummy variables transform : Perform mapping on the dummy variable """ return [l[0] for l in self.limits] @property def free_symbols(self): """ This method returns the symbols in the object, excluding those that take on a specific value (i.e. the dummy symbols). Examples ======== >>> from sympy import Sum >>> from sympy.abc import x, y >>> Sum(x, (x, y, 1)).free_symbols {y} """ # don't test for any special values -- nominal free symbols # should be returned, e.g. don't return set() if the # function is zero -- treat it like an unevaluated expression. function, limits = self.function, self.limits isyms = function.free_symbols for xab in limits: if len(xab) == 1: isyms.add(xab[0]) continue # take out the target symbol if xab[0] in isyms: isyms.remove(xab[0]) # add in the new symbols for i in xab[1:]: isyms.update(i.free_symbols) return isyms @property def is_number(self): """Return True if the Sum has no free symbols, else False.""" return not self.free_symbols def as_dummy(self): """ Replace instances of the given dummy variables with explicit dummy counterparts to make clear what are dummy variables and what are real-world symbols in an object. Examples ======== >>> from sympy import Integral >>> from sympy.abc import x, y >>> Integral(x, (x, x, y), (y, x, y)).as_dummy() Integral(_x, (_x, x, _y), (_y, x, y)) If the object supperts the "integral at" limit ``(x,)`` it is not treated as a dummy, but the explicit form, ``(x, x)`` of length 2 does treat the variable as a dummy. >>> Integral(x, x).as_dummy() Integral(x, x) >>> Integral(x, (x, x)).as_dummy() Integral(_x, (_x, x)) If there were no dummies in the original expression, then the the symbols which cannot be changed by subs() are clearly seen as those with an underscore prefix. See Also ======== variables : Lists the integration variables transform : Perform mapping on the integration variable """ reps = {} f = self.function limits = list(self.limits) for i in range(-1, -len(limits) - 1, -1): xab = list(limits[i]) if len(xab) == 1: continue x = xab[0] xab[0] = x.as_dummy() for j in range(1, len(xab)): xab[j] = xab[j].subs(reps) reps[x] = xab[0] limits[i] = xab f = f.subs(reps) return self.func(f, *limits) def _eval_interval(self, x, a, b): limits = [(i if i[0] != x else (x, a, b)) for i in self.limits] integrand = self.function return self.func(integrand, *limits) def _eval_subs(self, old, new): """ Perform substitutions over non-dummy variables of an expression with limits. Also, can be used to specify point-evaluation of an abstract antiderivative. Examples ======== >>> from sympy import Sum, oo >>> from sympy.abc import s, n >>> Sum(1/n**s, (n, 1, oo)).subs(s, 2) Sum(n**(-2), (n, 1, oo)) >>> from sympy import Integral >>> from sympy.abc import x, a >>> Integral(a*x**2, x).subs(x, 4) Integral(a*x**2, (x, 4)) See Also ======== variables : Lists the integration variables transform : Perform mapping on the dummy variable for intgrals change_index : Perform mapping on the sum and product dummy variables """ from sympy.core.function import AppliedUndef, UndefinedFunction func, limits = self.function, list(self.limits) # If one of the expressions we are replacing is used as a func index # one of two things happens. # - the old variable first appears as a free variable # so we perform all free substitutions before it becomes # a func index. # - the old variable first appears as a func index, in # which case we ignore. See change_index. # Reorder limits to match standard mathematical practice for scoping limits.reverse() if not isinstance(old, Symbol) or \ old.free_symbols.intersection(self.free_symbols): sub_into_func = True for i, xab in enumerate(limits): if 1 == len(xab) and old == xab[0]: xab = (old, old) limits[i] = Tuple(xab[0], *[l._subs(old, new) for l in xab[1:]]) if len(xab[0].free_symbols.intersection(old.free_symbols)) != 0: sub_into_func = False break if isinstance(old, AppliedUndef) or isinstance(old, UndefinedFunction): sy2 = set(self.variables).intersection(set(new.atoms(Symbol))) sy1 = set(self.variables).intersection(set(old.args)) if not sy2.issubset(sy1): raise ValueError( "substitution can not create dummy dependencies") sub_into_func = True if sub_into_func: func = func.subs(old, new) else: # old is a Symbol and a dummy variable of some limit for i, xab in enumerate(limits): if len(xab) == 3: limits[i] = Tuple(xab[0], *[l._subs(old, new) for l in xab[1:]]) if old == xab[0]: break # simplify redundant limits (x, x) to (x, ) for i, xab in enumerate(limits): if len(xab) == 2 and (xab[0] - xab[1]).is_zero: limits[i] = Tuple(xab[0], ) # Reorder limits back to representation-form limits.reverse() return self.func(func, *limits) class AddWithLimits(ExprWithLimits): r"""Represents unevaluated oriented additions. Parent class for Integral and Sum. """ def __new__(cls, function, *symbols, **assumptions): # Any embedded piecewise functions need to be brought out to the # top level so that integration can go into piecewise mode at the # earliest possible moment. # # This constructor only differs from ExprWithLimits # in the application of the orientation variable. Perhaps merge? function = sympify(function) if hasattr(function, 'func') and function.func is Equality: lhs = function.lhs rhs = function.rhs return Equality(cls(lhs, *symbols, **assumptions), \ cls(rhs, *symbols, **assumptions)) function = piecewise_fold(function) if function is S.NaN: return S.NaN if symbols: limits, orientation = _process_limits(*symbols) else: # symbol not provided -- we can still try to compute a general form free = function.free_symbols if len(free) != 1: raise ValueError( " specify dummy variables for %s. If the integrand contains" " more than one free symbol, an integration variable should" " be supplied explicitly e.g., integrate(f(x, y), x)" % function) limits, orientation = [Tuple(s) for s in free], 1 # denest any nested calls while cls == type(function): limits = list(function.limits) + limits function = function.function obj = Expr.__new__(cls, **assumptions) arglist = [orientation*function] arglist.extend(limits) obj._args = tuple(arglist) obj.is_commutative = function.is_commutative # limits already checked return obj def _eval_adjoint(self): if all([x.is_real for x in flatten(self.limits)]): return self.func(self.function.adjoint(), *self.limits) return None def _eval_conjugate(self): if all([x.is_real for x in flatten(self.limits)]): return self.func(self.function.conjugate(), *self.limits) return None def _eval_transpose(self): if all([x.is_real for x in flatten(self.limits)]): return self.func(self.function.transpose(), *self.limits) return None def _eval_factor(self, **hints): if 1 == len(self.limits): summand = self.function.factor(**hints) if summand.is_Mul: out = sift(summand.args, lambda w: w.is_commutative \ and not set(self.variables) & w.free_symbols) return Mul(*out[True])*self.func(Mul(*out[False]), \ *self.limits) else: summand = self.func(self.function, self.limits[0:-1]).factor() if not summand.has(self.variables[-1]): return self.func(1, [self.limits[-1]]).doit()*summand elif isinstance(summand, Mul): return self.func(summand, self.limits[-1]).factor() return self def _eval_expand_basic(self, **hints): summand = self.function.expand(**hints) if summand.is_Add and summand.is_commutative: return Add(*[self.func(i, *self.limits) for i in summand.args]) elif summand.is_Matrix: return Matrix._new(summand.rows, summand.cols, [self.func(i, *self.limits) for i in summand._mat]) elif summand != self.function: return self.func(summand, *self.limits) return self
bsd-3-clause
newlawrence/poliastro
src/poliastro/twobody/orbit.py
1
20464
from warnings import warn import numpy as np from astropy import units as u from astropy import time from astropy.coordinates import ( Angle, CartesianRepresentation, CartesianDifferential, get_body_barycentric_posvel, ICRS, GCRS ) from poliastro.constants import J2000 from poliastro.twobody.angles import nu_to_M, E_to_nu from poliastro.twobody.propagation import propagate, mean_motion from poliastro.core.elements import rv2coe from poliastro.core.angles import nu_to_M as nu_to_M_fast from poliastro.twobody import rv from poliastro.twobody import classical from poliastro.twobody import equinoctial from poliastro.bodies import Sun, Earth, Moon from poliastro.frames import get_frame, Planes from ._base import BaseState # flake8: noqa ORBIT_FORMAT = "{r_p:.0f} x {r_a:.0f} x {inc:.1f} ({frame}) orbit around {body} at epoch {epoch} ({scale})" class TimeScaleWarning(UserWarning): pass class Orbit(object): """Position and velocity of a body with respect to an attractor at a given time (epoch). Regardless of how the Orbit is created, the implicit reference system is an inertial one. For the specific case of the Solar System, this can be assumed to be the International Celestial Reference System or ICRS. """ def __init__(self, state, epoch, plane): """Constructor. Parameters ---------- state : BaseState Position and velocity or orbital elements. epoch : ~astropy.time.Time Epoch of the orbit. """ self._state = state # type: BaseState self._epoch = epoch # type: time.Time self._plane = plane self._frame = None @property def state(self): """Position and velocity or orbital elements. """ return self._state @property def epoch(self): """Epoch of the orbit. """ return self._epoch @property def plane(self): """Fundamental plane of the frame. """ return self._plane @property def frame(self): """Reference frame of the orbit. .. versionadded:: 0.11.0 """ if self._frame is None: self._frame = get_frame(self.attractor, self._plane, self.epoch) return self._frame @classmethod @u.quantity_input(r=u.m, v=u.m / u.s) def from_vectors(cls, attractor, r, v, epoch=J2000, plane=Planes.EARTH_EQUATOR): """Return `Orbit` from position and velocity vectors. Parameters ---------- attractor : Body Main attractor. r : ~astropy.units.Quantity Position vector wrt attractor center. v : ~astropy.units.Quantity Velocity vector. epoch : ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame. """ assert np.any(r.value), "Position vector must be non zero" ss = rv.RVState( attractor, r, v) return cls(ss, epoch, plane) @classmethod @u.quantity_input(a=u.m, ecc=u.one, inc=u.rad, raan=u.rad, argp=u.rad, nu=u.rad) def from_classical(cls, attractor, a, ecc, inc, raan, argp, nu, epoch=J2000, plane=Planes.EARTH_EQUATOR): """Return `Orbit` from classical orbital elements. Parameters ---------- attractor : Body Main attractor. a : ~astropy.units.Quantity Semi-major axis. ecc : ~astropy.units.Quantity Eccentricity. inc : ~astropy.units.Quantity Inclination raan : ~astropy.units.Quantity Right ascension of the ascending node. argp : ~astropy.units.Quantity Argument of the pericenter. nu : ~astropy.units.Quantity True anomaly. epoch : ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame. """ if ecc == 1.0 * u.one: raise ValueError("For parabolic orbits use Orbit.parabolic instead") if not 0 * u.deg <= inc <= 180 * u.deg: raise ValueError("Inclination must be between 0 and 180 degrees") if ecc > 1 and a > 0: raise ValueError("Hyperbolic orbits have negative semimajor axis") ss = classical.ClassicalState( attractor, a * (1 - ecc ** 2), ecc, inc, raan, argp, nu) return cls(ss, epoch, plane) @classmethod @u.quantity_input(p=u.m, f=u.one, g=u.rad, h=u.rad, k=u.rad, L=u.rad) def from_equinoctial(cls, attractor, p, f, g, h, k, L, epoch=J2000, plane=Planes.EARTH_EQUATOR): """Return `Orbit` from modified equinoctial elements. Parameters ---------- attractor : Body Main attractor. p : ~astropy.units.Quantity Semilatus rectum. f : ~astropy.units.Quantity Second modified equinoctial element. g : ~astropy.units.Quantity Third modified equinoctial element. h : ~astropy.units.Quantity Fourth modified equinoctial element. k : ~astropy.units.Quantity Fifth modified equinoctial element. L : ~astropy.units.Quantity True longitude. epoch : ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame. """ ss = equinoctial.ModifiedEquinoctialState( attractor, p, f, g, h, k, L) return cls(ss, epoch, plane) @classmethod def from_body_ephem(cls, body, epoch=None): """Return osculating `Orbit` of a body at a given time. """ # TODO: https://github.com/poliastro/poliastro/issues/445 if not epoch: epoch = time.Time.now().tdb elif epoch.scale != 'tdb': epoch = epoch.tdb warn("Input time was converted to scale='tdb' with value " "{}. Use Time(..., scale='tdb') instead." .format(epoch.tdb.value), TimeScaleWarning) r, v = get_body_barycentric_posvel(body.name, epoch) if body == Moon: # TODO: The attractor is in fact the Earth-Moon Barycenter icrs_cart = r.with_differentials(v.represent_as(CartesianDifferential)) gcrs_cart = ICRS(icrs_cart).transform_to(GCRS(obstime=epoch)).represent_as(CartesianRepresentation) ss = cls.from_vectors( Earth, gcrs_cart.xyz.to(u.km), gcrs_cart.differentials['s'].d_xyz.to(u.km / u.day), epoch ) else: # TODO: The attractor is not really the Sun, but the Solar System Barycenter ss = cls.from_vectors(Sun, r.xyz.to(u.km), v.xyz.to(u.km / u.day), epoch) ss._frame = ICRS() # Hack! return ss @classmethod @u.quantity_input(alt=u.m, inc=u.rad, raan=u.rad, arglat=u.rad) def circular(cls, attractor, alt, inc=0 * u.deg, raan=0 * u.deg, arglat=0 * u.deg, epoch=J2000, plane=Planes.EARTH_EQUATOR): """Return circular `Orbit`. Parameters ---------- attractor : Body Main attractor. alt : ~astropy.units.Quantity Altitude over surface. inc : ~astropy.units.Quantity, optional Inclination, default to 0 deg (equatorial orbit). raan : ~astropy.units.Quantity, optional Right ascension of the ascending node, default to 0 deg. arglat : ~astropy.units.Quantity, optional Argument of latitude, default to 0 deg. epoch: ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame. """ a = attractor.R + alt ecc = 0 * u.one argp = 0 * u.deg return cls.from_classical(attractor, a, ecc, inc, raan, argp, arglat, epoch, plane) @classmethod @u.quantity_input(p=u.m, inc=u.rad, raan=u.rad, argp=u.rad, nu=u.rad) def parabolic(cls, attractor, p, inc, raan, argp, nu, epoch=J2000, plane=Planes.EARTH_EQUATOR): """Return parabolic `Orbit`. Parameters ---------- attractor : Body Main attractor. p : ~astropy.units.Quantity Semilatus rectum or parameter. inc : ~astropy.units.Quantity, optional Inclination. raan : ~astropy.units.Quantity Right ascension of the ascending node. argp : ~astropy.units.Quantity Argument of the pericenter. nu : ~astropy.units.Quantity True anomaly. epoch: ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame. """ ss = classical.ClassicalState( attractor, p, 1.0 * u.one, inc, raan, argp, nu) return cls(ss, epoch, plane) def represent_as(self, representation): """Converts the orbit to a specific representation. .. versionadded:: 0.11.0 Parameters ---------- representation : ~astropy.coordinates.BaseRepresentation Representation object to use. It must be a class, not an instance. Examples -------- >>> from poliastro.examples import iss >>> from astropy.coordinates import CartesianRepresentation, SphericalRepresentation >>> iss.represent_as(CartesianRepresentation) <CartesianRepresentation (x, y, z) in km (859.07256, -4137.20368, 5295.56871) (has differentials w.r.t.: 's')> >>> iss.represent_as(CartesianRepresentation).xyz <Quantity [ 859.07256, -4137.20368, 5295.56871] km> >>> iss.represent_as(CartesianRepresentation).differentials['s'] <CartesianDifferential (d_x, d_y, d_z) in km / s (7.37289205, 2.08223573, 0.43999979)> >>> iss.represent_as(CartesianRepresentation).differentials['s'].d_xyz <Quantity [7.37289205, 2.08223573, 0.43999979] km / s> >>> iss.represent_as(SphericalRepresentation) <SphericalRepresentation (lon, lat, distance) in (rad, rad, km) (4.91712525, 0.89732339, 6774.76995296) (has differentials w.r.t.: 's')> """ # As we do not know the differentials, we first convert to cartesian, # then let the frame represent_as do the rest # TODO: Perhaps this should be public API as well? cartesian = CartesianRepresentation( *self.r, differentials=CartesianDifferential(*self.v) ) # See Orbit._sample for reasoning about the usage of a protected method coords = self.frame._replicate(cartesian, representation_type='cartesian') return coords.represent_as(representation) def to_icrs(self): """Creates a new Orbit object with its coordinates transformed to ICRS. Notice that, strictly speaking, the center of ICRS is the Solar System Barycenter and not the Sun, and therefore these orbits cannot be propagated in the context of the two body problem. Therefore, this function exists merely for practical purposes. .. versionadded:: 0.11.0 """ coords = self.frame.realize_frame( self.represent_as(CartesianRepresentation) ) coords.representation_type = CartesianRepresentation icrs_cart = coords.transform_to(ICRS).represent_as(CartesianRepresentation) # TODO: The attractor is in fact the Solar System Barycenter ss = self.from_vectors( Sun, r=icrs_cart.xyz, v=icrs_cart.differentials['s'].d_xyz, epoch=self.epoch ) ss._frame = ICRS() # Hack! return ss def __str__(self): if self.a > 1e7 * u.km: unit = u.au else: unit = u.km return ORBIT_FORMAT.format( r_p=self.r_p.to(unit).value, r_a=self.r_a.to(unit), inc=self.inc.to(u.deg), frame=self.frame.__class__.__name__, body=self.attractor, epoch=self.epoch, scale=self.epoch.scale.upper(), ) def __repr__(self): return self.__str__() def propagate(self, value, method=mean_motion, rtol=1e-10, **kwargs): """Propagates an orbit. If value is true anomaly, propagate orbit to this anomaly and return the result. Otherwise, if time is provided, propagate this `Orbit` some `time` and return the result. Parameters ---------- value : Multiple options True anomaly values or time values. If given an angle, it will always propagate forward. rtol : float, optional Relative tolerance for the propagation algorithm, default to 1e-10. method : function, optional Method used for propagation **kwargs parameters used in perturbation models """ if hasattr(value, "unit") and value.unit in ('rad', 'deg'): p, ecc, inc, raan, argp, _ = rv2coe(self.attractor.k.to(u.km ** 3 / u.s ** 2).value, self.r.to(u.km).value, self.v.to(u.km / u.s).value) # Compute time of flight for correct epoch M = nu_to_M(self.nu, self.ecc) new_M = nu_to_M(value, self.ecc) time_of_flight = Angle(new_M - M).wrap_at(360 * u.deg) / self.n return self.from_classical(self.attractor, p / (1.0 - ecc ** 2) * u.km, ecc * u.one, inc * u.rad, raan * u.rad, argp * u.rad, value, epoch=self.epoch + time_of_flight, plane=self._plane) else: if isinstance(value, time.Time) and not isinstance(value, time.TimeDelta): time_of_flight = value - self.epoch else: time_of_flight = time.TimeDelta(value) return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs) def sample(self, values=None, method=mean_motion): """Samples an orbit to some specified time values. .. versionadded:: 0.8.0 Parameters ---------- values : Multiple options Number of interval points (default to 100), True anomaly values, Time values. method : function, optional Method used for propagation Returns ------- positions: ~astropy.coordinates.BaseCoordinateFrame Array of x, y, z positions, with proper times as the frame attributes if supported. Notes ----- When specifying a number of points, the initial and final position is present twice inside the result (first and last row). This is more useful for plotting. Examples -------- >>> from astropy import units as u >>> from poliastro.examples import iss >>> iss.sample() # doctest: +ELLIPSIS <GCRS Coordinate ...> >>> iss.sample(10) # doctest: +ELLIPSIS <GCRS Coordinate ...> >>> iss.sample([0, 180] * u.deg) # doctest: +ELLIPSIS <GCRS Coordinate ...> >>> iss.sample([0, 10, 20] * u.minute) # doctest: +ELLIPSIS <GCRS Coordinate ...> >>> iss.sample([iss.epoch + iss.period / 2]) # doctest: +ELLIPSIS <GCRS Coordinate ...> """ if values is None: return self.sample(100, method) elif isinstance(values, int): if self.ecc < 1: # first sample eccentric anomaly, then transform into true anomaly # why sampling eccentric anomaly uniformly to minimize error in the apocenter, see # http://www.dtic.mil/dtic/tr/fulltext/u2/a605040.pdf # Start from pericenter E_values = np.linspace(0, 2 * np.pi, values) * u.rad nu_values = E_to_nu(E_values, self.ecc) else: # Select a sensible limiting value for non-closed orbits # This corresponds to max(r = 3p, r = self.r) # We have to wrap nu in [-180, 180) to compare it with the output of # the arc cosine, which is in the range [0, 180) # Start from -nu_limit wrapped_nu = self.nu if self.nu < 180 * u.deg else self.nu - 360 * u.deg nu_limit = max(np.arccos(-(1 - 1 / 3.) / self.ecc), abs(wrapped_nu)) nu_values = np.linspace(-nu_limit, nu_limit, values) return self.sample(nu_values, method) elif hasattr(values, "unit") and values.unit in ('rad', 'deg'): values = self._generate_time_values(values) elif isinstance(values, time.Time): values = values - self.epoch elif isinstance(values, list): # A list of Times is assumed values = [(value - self.epoch).sec for value in values] * u.s return self._sample(values, method) def _sample(self, time_values, method=mean_motion): positions = method(self, time_values.to(u.s).value) data = CartesianRepresentation(positions[0] * u.km, xyz_axis=1) # If the frame supports obstime, set the time values kwargs = {} if 'obstime' in self.frame.frame_attributes: kwargs['obstime'] = self.epoch + time_values else: warn("Frame {} does not support 'obstime', time values were not returned".format(self.frame.__class__)) # Use of a protected method instead of frame.realize_frame # because the latter does not let the user choose the representation type # in one line despite its parameter names, see # https://github.com/astropy/astropy/issues/7784 return self.frame._replicate(data, representation_type='cartesian', **kwargs) def _generate_time_values(self, nu_vals): # Subtract current anomaly to start from the desired point ecc = self.ecc.value nu = self.nu.to(u.rad).value M_vals = [nu_to_M_fast(nu_val, ecc) - nu_to_M_fast(nu, ecc) for nu_val in nu_vals.to(u.rad).value] * u.rad time_values = (M_vals / self.n).decompose() return time_values def apply_maneuver(self, maneuver, intermediate=False): """Returns resulting `Orbit` after applying maneuver to self. Optionally return intermediate states (default to False). Parameters ---------- maneuver : Maneuver Maneuver to apply. intermediate : bool, optional Return intermediate states, default to False. """ orbit_new = self # Initialize states = [] attractor = self.attractor for delta_t, delta_v in maneuver: if not delta_t == 0 * u.s: orbit_new = orbit_new.propagate(delta_t) r, v = orbit_new.rv() vnew = v + delta_v orbit_new = self.from_vectors(attractor, r, vnew, orbit_new.epoch) states.append(orbit_new) if intermediate: res = states else: res = orbit_new return res # Delegated properties (syntactic sugar) def __getattr__(self, item): if hasattr(self.state, item): def delegated_(self_): return getattr(self_.state, item) # Use class docstring to properly translate properties, see # https://stackoverflow.com/a/38118315/554319 delegated_.__doc__ = getattr(self.state.__class__, item).__doc__ # Transform to a property delegated = property(delegated_) else: raise AttributeError("'{}' object has no attribute '{}'".format(self.__class__, item)) # Bind the attribute setattr(self.__class__, item, delegated) # Return the newly bound attribute return getattr(self, item) def __getstate__(self): return self.state, self.epoch def __setstate__(self, state): self._state = state[0] self._epoch = state[1]
mit
benoitsteiner/tensorflow-xsmm
tensorflow/contrib/cloud/python/ops/bigquery_reader_ops_test.py
75
9668
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for BigQueryReader Op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import json import os import re import socket import threading from six.moves import SimpleHTTPServer from six.moves import socketserver from tensorflow.contrib.cloud.python.ops import bigquery_reader_ops as cloud from tensorflow.core.example import example_pb2 from tensorflow.core.framework import types_pb2 from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import data_flow_ops from tensorflow.python.ops import parsing_ops from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging as logging from tensorflow.python.util import compat _PROJECT = "test-project" _DATASET = "test-dataset" _TABLE = "test-table" # List representation of the test rows in the 'test-table' in BigQuery. # The schema for each row is: [int64, string, float]. # The values for rows are generated such that some columns have null values. The # general formula here is: # - The int64 column is present in every row. # - The string column is only available in even rows. # - The float column is only available in every third row. _ROWS = [[0, "s_0", 0.1], [1, None, None], [2, "s_2", None], [3, None, 3.1], [4, "s_4", None], [5, None, None], [6, "s_6", 6.1], [7, None, None], [8, "s_8", None], [9, None, 9.1]] # Schema for 'test-table'. # The schema currently has three columns: int64, string, and float _SCHEMA = { "kind": "bigquery#table", "id": "test-project:test-dataset.test-table", "schema": { "fields": [{ "name": "int64_col", "type": "INTEGER", "mode": "NULLABLE" }, { "name": "string_col", "type": "STRING", "mode": "NULLABLE" }, { "name": "float_col", "type": "FLOAT", "mode": "NULLABLE" }] } } def _ConvertRowToExampleProto(row): """Converts the input row to an Example proto. Args: row: Input Row instance. Returns: An Example proto initialized with row values. """ example = example_pb2.Example() example.features.feature["int64_col"].int64_list.value.append(row[0]) if row[1] is not None: example.features.feature["string_col"].bytes_list.value.append( compat.as_bytes(row[1])) if row[2] is not None: example.features.feature["float_col"].float_list.value.append(row[2]) return example class IPv6TCPServer(socketserver.TCPServer): address_family = socket.AF_INET6 class FakeBigQueryServer(threading.Thread): """Fake http server to return schema and data for sample table.""" def __init__(self, address, port): """Creates a FakeBigQueryServer. Args: address: Server address port: Server port. Pass 0 to automatically pick an empty port. """ threading.Thread.__init__(self) self.handler = BigQueryRequestHandler try: self.httpd = socketserver.TCPServer((address, port), self.handler) self.host_port = "{}:{}".format(*self.httpd.server_address) except IOError: self.httpd = IPv6TCPServer((address, port), self.handler) self.host_port = "[{}]:{}".format(*self.httpd.server_address) def run(self): self.httpd.serve_forever() def shutdown(self): self.httpd.shutdown() self.httpd.socket.close() class BigQueryRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): """Responds to BigQuery HTTP requests. Attributes: num_rows: num_rows in the underlying table served by this class. """ num_rows = 0 def do_GET(self): if "data?maxResults=" not in self.path: # This is a schema request. _SCHEMA["numRows"] = self.num_rows response = json.dumps(_SCHEMA) else: # This is a data request. # # Extract max results and start index. max_results = int(re.findall(r"maxResults=(\d+)", self.path)[0]) start_index = int(re.findall(r"startIndex=(\d+)", self.path)[0]) # Send the rows as JSON. rows = [] for row in _ROWS[start_index:start_index + max_results]: row_json = { "f": [{ "v": str(row[0]) }, { "v": str(row[1]) if row[1] is not None else None }, { "v": str(row[2]) if row[2] is not None else None }] } rows.append(row_json) response = json.dumps({ "kind": "bigquery#table", "id": "test-project:test-dataset.test-table", "rows": rows }) self.send_response(200) self.end_headers() self.wfile.write(compat.as_bytes(response)) def _SetUpQueue(reader): """Sets up a queue for a reader.""" queue = data_flow_ops.FIFOQueue(8, [types_pb2.DT_STRING], shapes=()) key, value = reader.read(queue) queue.enqueue_many(reader.partitions()).run() queue.close().run() return key, value class BigQueryReaderOpsTest(test.TestCase): def setUp(self): super(BigQueryReaderOpsTest, self).setUp() self.server = FakeBigQueryServer("localhost", 0) self.server.start() logging.info("server address is %s", self.server.host_port) # An override to bypass the GCP auth token retrieval logic # in google_auth_provider.cc. os.environ["GOOGLE_AUTH_TOKEN_FOR_TESTING"] = "not-used" def tearDown(self): self.server.shutdown() super(BigQueryReaderOpsTest, self).tearDown() def _ReadAndCheckRowsUsingFeatures(self, num_rows): self.server.handler.num_rows = num_rows with self.test_session() as sess: feature_configs = { "int64_col": parsing_ops.FixedLenFeature( [1], dtype=dtypes.int64), "string_col": parsing_ops.FixedLenFeature( [1], dtype=dtypes.string, default_value="s_default"), } reader = cloud.BigQueryReader( project_id=_PROJECT, dataset_id=_DATASET, table_id=_TABLE, num_partitions=4, features=feature_configs, timestamp_millis=1, test_end_point=self.server.host_port) key, value = _SetUpQueue(reader) seen_rows = [] features = parsing_ops.parse_example( array_ops.reshape(value, [1]), feature_configs) for _ in range(num_rows): int_value, str_value = sess.run( [features["int64_col"], features["string_col"]]) # Parse values returned from the session. self.assertEqual(int_value.shape, (1, 1)) self.assertEqual(str_value.shape, (1, 1)) int64_col = int_value[0][0] string_col = str_value[0][0] seen_rows.append(int64_col) # Compare. expected_row = _ROWS[int64_col] self.assertEqual(int64_col, expected_row[0]) self.assertEqual( compat.as_str(string_col), ("s_%d" % int64_col) if expected_row[1] else "s_default") self.assertItemsEqual(seen_rows, range(num_rows)) with self.assertRaisesOpError("is closed and has insufficient elements " "\\(requested 1, current size 0\\)"): sess.run([key, value]) def testReadingSingleRowUsingFeatures(self): self._ReadAndCheckRowsUsingFeatures(1) def testReadingMultipleRowsUsingFeatures(self): self._ReadAndCheckRowsUsingFeatures(10) def testReadingMultipleRowsUsingColumns(self): num_rows = 10 self.server.handler.num_rows = num_rows with self.test_session() as sess: reader = cloud.BigQueryReader( project_id=_PROJECT, dataset_id=_DATASET, table_id=_TABLE, num_partitions=4, columns=["int64_col", "float_col", "string_col"], timestamp_millis=1, test_end_point=self.server.host_port) key, value = _SetUpQueue(reader) seen_rows = [] for row_index in range(num_rows): returned_row_id, example_proto = sess.run([key, value]) example = example_pb2.Example() example.ParseFromString(example_proto) self.assertIn("int64_col", example.features.feature) feature = example.features.feature["int64_col"] self.assertEqual(len(feature.int64_list.value), 1) int64_col = feature.int64_list.value[0] seen_rows.append(int64_col) # Create our expected Example. expected_example = example_pb2.Example() expected_example = _ConvertRowToExampleProto(_ROWS[int64_col]) # Compare. self.assertProtoEquals(example, expected_example) self.assertEqual(row_index, int(returned_row_id)) self.assertItemsEqual(seen_rows, range(num_rows)) with self.assertRaisesOpError("is closed and has insufficient elements " "\\(requested 1, current size 0\\)"): sess.run([key, value]) if __name__ == "__main__": test.main()
apache-2.0
ImageEngine/gaffer
python/GafferSceneUI/PointConstraintUI.py
13
2564
########################################################################## # # Copyright (c) 2015, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import Gaffer import GafferScene Gaffer.Metadata.registerNode( GafferScene.PointConstraint, "description", """ Translates objects so that they are constrained to the world space position of the target. Leaves the scale and orientation of the object untouched. """, plugs = { "offset" : [ "description", """ A world space translation offset applied on top of the target position. """, ], "xEnabled" : [ "description", """ Enables the constraint in the world space x axis. """, ], "yEnabled" : [ "description", """ Enables the constraint in the world space y axis. """, ], "zEnabled" : [ "description", """ Enables the constraint in the world space z axis. """, ], } )
bsd-3-clause
veger/ansible
lib/ansible/modules/cloud/rackspace/rax.py
39
33459
#!/usr/bin/python # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rax short_description: create / delete an instance in Rackspace Public Cloud description: - creates / deletes a Rackspace Public Cloud instance and optionally waits for it to be 'running'. version_added: "1.2" options: auto_increment: description: - Whether or not to increment a single number with the name of the created servers. Only applicable when used with the I(group) attribute or meta key. type: bool default: 'yes' version_added: 1.5 boot_from_volume: description: - Whether or not to boot the instance from a Cloud Block Storage volume. If C(yes) and I(image) is specified a new volume will be created at boot time. I(boot_volume_size) is required with I(image) to create a new volume at boot time. type: bool default: 'no' version_added: 1.9 boot_volume: description: - Cloud Block Storage ID or Name to use as the boot volume of the instance version_added: 1.9 boot_volume_size: description: - Size of the volume to create in Gigabytes. This is only required with I(image) and I(boot_from_volume). default: 100 version_added: 1.9 boot_volume_terminate: description: - Whether the I(boot_volume) or newly created volume from I(image) will be terminated when the server is terminated type: bool default: 'no' version_added: 1.9 config_drive: description: - Attach read-only configuration drive to server as label config-2 type: bool default: 'no' version_added: 1.7 count: description: - number of instances to launch default: 1 version_added: 1.4 count_offset: description: - number count to start at default: 1 version_added: 1.4 disk_config: description: - Disk partitioning strategy choices: - auto - manual version_added: '1.4' default: auto exact_count: description: - Explicitly ensure an exact count of instances, used with state=active/present. If specified as C(yes) and I(count) is less than the servers matched, servers will be deleted to match the count. If the number of matched servers is fewer than specified in I(count) additional servers will be added. type: bool default: 'no' version_added: 1.4 extra_client_args: description: - A hash of key/value pairs to be used when creating the cloudservers client. This is considered an advanced option, use it wisely and with caution. version_added: 1.6 extra_create_args: description: - A hash of key/value pairs to be used when creating a new server. This is considered an advanced option, use it wisely and with caution. version_added: 1.6 files: description: - Files to insert into the instance. remotefilename:localcontent flavor: description: - flavor to use for the instance group: description: - host group to assign to server, is also used for idempotent operations to ensure a specific number of instances version_added: 1.4 image: description: - image to use for the instance. Can be an C(id), C(human_id) or C(name). With I(boot_from_volume), a Cloud Block Storage volume will be created with this image instance_ids: description: - list of instance ids, currently only used when state='absent' to remove instances version_added: 1.4 key_name: description: - key pair to use on the instance aliases: - keypair meta: description: - A hash of metadata to associate with the instance name: description: - Name to give the instance networks: description: - The network to attach to the instances. If specified, you must include ALL networks including the public and private interfaces. Can be C(id) or C(label). default: - public - private version_added: 1.4 state: description: - Indicate desired state of the resource choices: - present - absent default: present user_data: description: - Data to be uploaded to the servers config drive. This option implies I(config_drive). Can be a file path or a string version_added: 1.7 wait: description: - wait for the instance to be in state 'running' before returning type: bool default: 'no' wait_timeout: description: - how long before wait gives up, in seconds default: 300 author: - "Jesse Keating (@omgjlk)" - "Matt Martz (@sivel)" notes: - I(exact_count) can be "destructive" if the number of running servers in the I(group) is larger than that specified in I(count). In such a case, the I(state) is effectively set to C(absent) and the extra servers are deleted. In the case of deletion, the returned data structure will have C(action) set to C(delete), and the oldest servers in the group will be deleted. extends_documentation_fragment: rackspace.openstack ''' EXAMPLES = ''' - name: Build a Cloud Server gather_facts: False tasks: - name: Server build request local_action: module: rax credentials: ~/.raxpub name: rax-test1 flavor: 5 image: b11d9567-e412-4255-96b9-bd63ab23bcfe key_name: my_rackspace_key files: /root/test.txt: /home/localuser/test.txt wait: yes state: present networks: - private - public register: rax - name: Build an exact count of cloud servers with incremented names hosts: local gather_facts: False tasks: - name: Server build requests local_action: module: rax credentials: ~/.raxpub name: test%03d.example.org flavor: performance1-1 image: ubuntu-1204-lts-precise-pangolin state: present count: 10 count_offset: 10 exact_count: yes group: test wait: yes register: rax ''' import json import os import re import time try: import pyrax HAS_PYRAX = True except ImportError: HAS_PYRAX = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.rax import (FINAL_STATUSES, rax_argument_spec, rax_find_bootable_volume, rax_find_image, rax_find_network, rax_find_volume, rax_required_together, rax_to_dict, setup_rax_module) from ansible.module_utils.six.moves import xrange from ansible.module_utils.six import string_types def rax_find_server_image(module, server, image, boot_volume): if not image and boot_volume: vol = rax_find_bootable_volume(module, pyrax, server, exit=False) if not vol: return None volume_image_metadata = vol.volume_image_metadata vol_image_id = volume_image_metadata.get('image_id') if vol_image_id: server_image = rax_find_image(module, pyrax, vol_image_id, exit=False) if server_image: server.image = dict(id=server_image) # Match image IDs taking care of boot from volume if image and not server.image: vol = rax_find_bootable_volume(module, pyrax, server) volume_image_metadata = vol.volume_image_metadata vol_image_id = volume_image_metadata.get('image_id') if not vol_image_id: return None server_image = rax_find_image(module, pyrax, vol_image_id, exit=False) if image != server_image: return None server.image = dict(id=server_image) elif image and server.image['id'] != image: return None return server.image def create(module, names=None, flavor=None, image=None, meta=None, key_name=None, files=None, wait=True, wait_timeout=300, disk_config=None, group=None, nics=None, extra_create_args=None, user_data=None, config_drive=False, existing=None, block_device_mapping_v2=None): names = [] if names is None else names meta = {} if meta is None else meta files = {} if files is None else files nics = [] if nics is None else nics extra_create_args = {} if extra_create_args is None else extra_create_args existing = [] if existing is None else existing block_device_mapping_v2 = [] if block_device_mapping_v2 is None else block_device_mapping_v2 cs = pyrax.cloudservers changed = False if user_data: config_drive = True if user_data and os.path.isfile(os.path.expanduser(user_data)): try: user_data = os.path.expanduser(user_data) f = open(user_data) user_data = f.read() f.close() except Exception as e: module.fail_json(msg='Failed to load %s' % user_data) # Handle the file contents for rpath in files.keys(): lpath = os.path.expanduser(files[rpath]) try: fileobj = open(lpath, 'r') files[rpath] = fileobj.read() fileobj.close() except Exception as e: module.fail_json(msg='Failed to load %s' % lpath) try: servers = [] bdmv2 = block_device_mapping_v2 for name in names: servers.append(cs.servers.create(name=name, image=image, flavor=flavor, meta=meta, key_name=key_name, files=files, nics=nics, disk_config=disk_config, config_drive=config_drive, userdata=user_data, block_device_mapping_v2=bdmv2, **extra_create_args)) except Exception as e: if e.message: msg = str(e.message) else: msg = repr(e) module.fail_json(msg=msg) else: changed = True if wait: end_time = time.time() + wait_timeout infinite = wait_timeout == 0 while infinite or time.time() < end_time: for server in servers: try: server.get() except: server.status = 'ERROR' if not filter(lambda s: s.status not in FINAL_STATUSES, servers): break time.sleep(5) success = [] error = [] timeout = [] for server in servers: try: server.get() except: server.status = 'ERROR' instance = rax_to_dict(server, 'server') if server.status == 'ACTIVE' or not wait: success.append(instance) elif server.status == 'ERROR': error.append(instance) elif wait: timeout.append(instance) untouched = [rax_to_dict(s, 'server') for s in existing] instances = success + untouched results = { 'changed': changed, 'action': 'create', 'instances': instances, 'success': success, 'error': error, 'timeout': timeout, 'instance_ids': { 'instances': [i['id'] for i in instances], 'success': [i['id'] for i in success], 'error': [i['id'] for i in error], 'timeout': [i['id'] for i in timeout] } } if timeout: results['msg'] = 'Timeout waiting for all servers to build' elif error: results['msg'] = 'Failed to build all servers' if 'msg' in results: module.fail_json(**results) else: module.exit_json(**results) def delete(module, instance_ids=None, wait=True, wait_timeout=300, kept=None): instance_ids = [] if instance_ids is None else instance_ids kept = [] if kept is None else kept cs = pyrax.cloudservers changed = False instances = {} servers = [] for instance_id in instance_ids: servers.append(cs.servers.get(instance_id)) for server in servers: try: server.delete() except Exception as e: module.fail_json(msg=e.message) else: changed = True instance = rax_to_dict(server, 'server') instances[instance['id']] = instance # If requested, wait for server deletion if wait: end_time = time.time() + wait_timeout infinite = wait_timeout == 0 while infinite or time.time() < end_time: for server in servers: instance_id = server.id try: server.get() except: instances[instance_id]['status'] = 'DELETED' instances[instance_id]['rax_status'] = 'DELETED' if not filter(lambda s: s['status'] not in ('', 'DELETED', 'ERROR'), instances.values()): break time.sleep(5) timeout = filter(lambda s: s['status'] not in ('', 'DELETED', 'ERROR'), instances.values()) error = filter(lambda s: s['status'] in ('ERROR'), instances.values()) success = filter(lambda s: s['status'] in ('', 'DELETED'), instances.values()) instances = [rax_to_dict(s, 'server') for s in kept] results = { 'changed': changed, 'action': 'delete', 'instances': instances, 'success': success, 'error': error, 'timeout': timeout, 'instance_ids': { 'instances': [i['id'] for i in instances], 'success': [i['id'] for i in success], 'error': [i['id'] for i in error], 'timeout': [i['id'] for i in timeout] } } if timeout: results['msg'] = 'Timeout waiting for all servers to delete' elif error: results['msg'] = 'Failed to delete all servers' if 'msg' in results: module.fail_json(**results) else: module.exit_json(**results) def cloudservers(module, state=None, name=None, flavor=None, image=None, meta=None, key_name=None, files=None, wait=True, wait_timeout=300, disk_config=None, count=1, group=None, instance_ids=None, exact_count=False, networks=None, count_offset=0, auto_increment=False, extra_create_args=None, user_data=None, config_drive=False, boot_from_volume=False, boot_volume=None, boot_volume_size=None, boot_volume_terminate=False): meta = {} if meta is None else meta files = {} if files is None else files instance_ids = [] if instance_ids is None else instance_ids networks = [] if networks is None else networks extra_create_args = {} if extra_create_args is None else extra_create_args cs = pyrax.cloudservers cnw = pyrax.cloud_networks if not cnw: module.fail_json(msg='Failed to instantiate client. This ' 'typically indicates an invalid region or an ' 'incorrectly capitalized region name.') if state == 'present' or (state == 'absent' and instance_ids is None): if not boot_from_volume and not boot_volume and not image: module.fail_json(msg='image is required for the "rax" module') for arg, value in dict(name=name, flavor=flavor).items(): if not value: module.fail_json(msg='%s is required for the "rax" module' % arg) if boot_from_volume and not image and not boot_volume: module.fail_json(msg='image or boot_volume are required for the ' '"rax" with boot_from_volume') if boot_from_volume and image and not boot_volume_size: module.fail_json(msg='boot_volume_size is required for the "rax" ' 'module with boot_from_volume and image') if boot_from_volume and image and boot_volume: image = None servers = [] # Add the group meta key if group and 'group' not in meta: meta['group'] = group elif 'group' in meta and group is None: group = meta['group'] # Normalize and ensure all metadata values are strings for k, v in meta.items(): if isinstance(v, list): meta[k] = ','.join(['%s' % i for i in v]) elif isinstance(v, dict): meta[k] = json.dumps(v) elif not isinstance(v, string_types): meta[k] = '%s' % v # When using state=absent with group, the absent block won't match the # names properly. Use the exact_count functionality to decrease the count # to the desired level was_absent = False if group is not None and state == 'absent': exact_count = True state = 'present' was_absent = True if image: image = rax_find_image(module, pyrax, image) nics = [] if networks: for network in networks: nics.extend(rax_find_network(module, pyrax, network)) # act on the state if state == 'present': # Idempotent ensurance of a specific count of servers if exact_count is not False: # See if we can find servers that match our options if group is None: module.fail_json(msg='"group" must be provided when using ' '"exact_count"') if auto_increment: numbers = set() # See if the name is a printf like string, if not append # %d to the end try: name % 0 except TypeError as e: if e.message.startswith('not all'): name = '%s%%d' % name else: module.fail_json(msg=e.message) # regex pattern to match printf formatting pattern = re.sub(r'%\d*[sd]', r'(\d+)', name) for server in cs.servers.list(): # Ignore DELETED servers if server.status == 'DELETED': continue if server.metadata.get('group') == group: servers.append(server) match = re.search(pattern, server.name) if match: number = int(match.group(1)) numbers.add(number) number_range = xrange(count_offset, count_offset + count) available_numbers = list(set(number_range) .difference(numbers)) else: # Not auto incrementing for server in cs.servers.list(): # Ignore DELETED servers if server.status == 'DELETED': continue if server.metadata.get('group') == group: servers.append(server) # available_numbers not needed here, we inspect auto_increment # again later # If state was absent but the count was changed, # assume we only wanted to remove that number of instances if was_absent: diff = len(servers) - count if diff < 0: count = 0 else: count = diff if len(servers) > count: # We have more servers than we need, set state='absent' # and delete the extras, this should delete the oldest state = 'absent' kept = servers[:count] del servers[:count] instance_ids = [] for server in servers: instance_ids.append(server.id) delete(module, instance_ids=instance_ids, wait=wait, wait_timeout=wait_timeout, kept=kept) elif len(servers) < count: # we have fewer servers than we need if auto_increment: # auto incrementing server numbers names = [] name_slice = count - len(servers) numbers_to_use = available_numbers[:name_slice] for number in numbers_to_use: names.append(name % number) else: # We are not auto incrementing server numbers, # create a list of 'name' that matches how many we need names = [name] * (count - len(servers)) else: # we have the right number of servers, just return info # about all of the matched servers instances = [] instance_ids = [] for server in servers: instances.append(rax_to_dict(server, 'server')) instance_ids.append(server.id) module.exit_json(changed=False, action=None, instances=instances, success=[], error=[], timeout=[], instance_ids={'instances': instance_ids, 'success': [], 'error': [], 'timeout': []}) else: # not called with exact_count=True if group is not None: if auto_increment: # we are auto incrementing server numbers, but not with # exact_count numbers = set() # See if the name is a printf like string, if not append # %d to the end try: name % 0 except TypeError as e: if e.message.startswith('not all'): name = '%s%%d' % name else: module.fail_json(msg=e.message) # regex pattern to match printf formatting pattern = re.sub(r'%\d*[sd]', r'(\d+)', name) for server in cs.servers.list(): # Ignore DELETED servers if server.status == 'DELETED': continue if server.metadata.get('group') == group: servers.append(server) match = re.search(pattern, server.name) if match: number = int(match.group(1)) numbers.add(number) number_range = xrange(count_offset, count_offset + count + len(numbers)) available_numbers = list(set(number_range) .difference(numbers)) names = [] numbers_to_use = available_numbers[:count] for number in numbers_to_use: names.append(name % number) else: # Not auto incrementing names = [name] * count else: # No group was specified, and not using exact_count # Perform more simplistic matching search_opts = { 'name': '^%s$' % name, 'flavor': flavor } servers = [] for server in cs.servers.list(search_opts=search_opts): # Ignore DELETED servers if server.status == 'DELETED': continue if not rax_find_server_image(module, server, image, boot_volume): continue # Ignore servers with non matching metadata if server.metadata != meta: continue servers.append(server) if len(servers) >= count: # We have more servers than were requested, don't do # anything. Not running with exact_count=True, so we assume # more is OK instances = [] for server in servers: instances.append(rax_to_dict(server, 'server')) instance_ids = [i['id'] for i in instances] module.exit_json(changed=False, action=None, instances=instances, success=[], error=[], timeout=[], instance_ids={'instances': instance_ids, 'success': [], 'error': [], 'timeout': []}) # We need more servers to reach out target, create names for # them, we aren't performing auto_increment here names = [name] * (count - len(servers)) block_device_mapping_v2 = [] if boot_from_volume: mapping = { 'boot_index': '0', 'delete_on_termination': boot_volume_terminate, 'destination_type': 'volume', } if image: mapping.update({ 'uuid': image, 'source_type': 'image', 'volume_size': boot_volume_size, }) image = None elif boot_volume: volume = rax_find_volume(module, pyrax, boot_volume) mapping.update({ 'uuid': pyrax.utils.get_id(volume), 'source_type': 'volume', }) block_device_mapping_v2.append(mapping) create(module, names=names, flavor=flavor, image=image, meta=meta, key_name=key_name, files=files, wait=wait, wait_timeout=wait_timeout, disk_config=disk_config, group=group, nics=nics, extra_create_args=extra_create_args, user_data=user_data, config_drive=config_drive, existing=servers, block_device_mapping_v2=block_device_mapping_v2) elif state == 'absent': if instance_ids is None: # We weren't given an explicit list of server IDs to delete # Let's match instead search_opts = { 'name': '^%s$' % name, 'flavor': flavor } for server in cs.servers.list(search_opts=search_opts): # Ignore DELETED servers if server.status == 'DELETED': continue if not rax_find_server_image(module, server, image, boot_volume): continue # Ignore servers with non matching metadata if meta != server.metadata: continue servers.append(server) # Build a list of server IDs to delete instance_ids = [] for server in servers: if len(instance_ids) < count: instance_ids.append(server.id) else: break if not instance_ids: # No server IDs were matched for deletion, or no IDs were # explicitly provided, just exit and don't do anything module.exit_json(changed=False, action=None, instances=[], success=[], error=[], timeout=[], instance_ids={'instances': [], 'success': [], 'error': [], 'timeout': []}) delete(module, instance_ids=instance_ids, wait=wait, wait_timeout=wait_timeout) def main(): argument_spec = rax_argument_spec() argument_spec.update( dict( auto_increment=dict(default=True, type='bool'), boot_from_volume=dict(default=False, type='bool'), boot_volume=dict(type='str'), boot_volume_size=dict(type='int', default=100), boot_volume_terminate=dict(type='bool', default=False), config_drive=dict(default=False, type='bool'), count=dict(default=1, type='int'), count_offset=dict(default=1, type='int'), disk_config=dict(choices=['auto', 'manual']), exact_count=dict(default=False, type='bool'), extra_client_args=dict(type='dict', default={}), extra_create_args=dict(type='dict', default={}), files=dict(type='dict', default={}), flavor=dict(), group=dict(), image=dict(), instance_ids=dict(type='list'), key_name=dict(aliases=['keypair']), meta=dict(type='dict', default={}), name=dict(), networks=dict(type='list', default=['public', 'private']), service=dict(), state=dict(default='present', choices=['present', 'absent']), user_data=dict(no_log=True), wait=dict(default=False, type='bool'), wait_timeout=dict(default=300), ) ) module = AnsibleModule( argument_spec=argument_spec, required_together=rax_required_together(), ) if not HAS_PYRAX: module.fail_json(msg='pyrax is required for this module') service = module.params.get('service') if service is not None: module.fail_json(msg='The "service" attribute has been deprecated, ' 'please remove "service: cloudservers" from your ' 'playbook pertaining to the "rax" module') auto_increment = module.params.get('auto_increment') boot_from_volume = module.params.get('boot_from_volume') boot_volume = module.params.get('boot_volume') boot_volume_size = module.params.get('boot_volume_size') boot_volume_terminate = module.params.get('boot_volume_terminate') config_drive = module.params.get('config_drive') count = module.params.get('count') count_offset = module.params.get('count_offset') disk_config = module.params.get('disk_config') if disk_config: disk_config = disk_config.upper() exact_count = module.params.get('exact_count', False) extra_client_args = module.params.get('extra_client_args') extra_create_args = module.params.get('extra_create_args') files = module.params.get('files') flavor = module.params.get('flavor') group = module.params.get('group') image = module.params.get('image') instance_ids = module.params.get('instance_ids') key_name = module.params.get('key_name') meta = module.params.get('meta') name = module.params.get('name') networks = module.params.get('networks') state = module.params.get('state') user_data = module.params.get('user_data') wait = module.params.get('wait') wait_timeout = int(module.params.get('wait_timeout')) setup_rax_module(module, pyrax) if extra_client_args: pyrax.cloudservers = pyrax.connect_to_cloudservers( region=pyrax.cloudservers.client.region_name, **extra_client_args) client = pyrax.cloudservers.client if 'bypass_url' in extra_client_args: client.management_url = extra_client_args['bypass_url'] if pyrax.cloudservers is None: module.fail_json(msg='Failed to instantiate client. This ' 'typically indicates an invalid region or an ' 'incorrectly capitalized region name.') cloudservers(module, state=state, name=name, flavor=flavor, image=image, meta=meta, key_name=key_name, files=files, wait=wait, wait_timeout=wait_timeout, disk_config=disk_config, count=count, group=group, instance_ids=instance_ids, exact_count=exact_count, networks=networks, count_offset=count_offset, auto_increment=auto_increment, extra_create_args=extra_create_args, user_data=user_data, config_drive=config_drive, boot_from_volume=boot_from_volume, boot_volume=boot_volume, boot_volume_size=boot_volume_size, boot_volume_terminate=boot_volume_terminate) if __name__ == '__main__': main()
gpl-3.0
maheshakya/scikit-learn
sklearn/datasets/svmlight_format.py
6
14944
"""This module implements a loader and dumper for the svmlight format This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. This format is used as the default format for both svmlight and the libsvm command line programs. """ # Authors: Mathieu Blondel <mathieu@mblondel.org> # Lars Buitinck <L.J.Buitinck@uva.nl> # Olivier Grisel <olivier.grisel@ensta.org> # License: BSD 3 clause from contextlib import closing import io import os.path import numpy as np import scipy.sparse as sp from ._svmlight_format import _load_svmlight_file from .. import __version__ from ..externals import six from ..externals.six import u, b from ..externals.six.moves import range, zip from ..utils import check_array from ..utils.fixes import frombuffer_empty def load_svmlight_file(f, n_features=None, dtype=np.float64, multilabel=False, zero_based="auto", query_id=False): """Load datasets in the svmlight / libsvm format into sparse CSR matrix This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. This format is used as the default format for both svmlight and the libsvm command line programs. Parsing a text based source can be expensive. When working on repeatedly on the same dataset, it is recommended to wrap this loader with joblib.Memory.cache to store a memmapped backup of the CSR results of the first call and benefit from the near instantaneous loading of memmapped structures for the subsequent calls. In case the file contains a pairwise preference constraint (known as "qid" in the svmlight format) these are ignored unless the query_id parameter is set to True. These pairwise preference constraints can be used to constraint the combination of samples when using pairwise loss functions (as is the case in some learning to rank problems) so that only pairs with the same query_id value are considered. This implementation is written in Cython and is reasonably fast. However, a faster API-compatible loader is also available at: https://github.com/mblondel/svmlight-loader Parameters ---------- f: {str, file-like, int} (Path to) a file to load. If a path ends in ".gz" or ".bz2", it will be uncompressed on the fly. If an integer is passed, it is assumed to be a file descriptor. A file-like or file descriptor will not be closed by this function. A file-like object must be opened in binary mode. n_features: int or None The number of features to use. If None, it will be inferred. This argument is useful to load several files that are subsets of a bigger sliced dataset: each subset might not have examples of every feature, hence the inferred shape might vary from one slice to another. multilabel: boolean, optional Samples may have several labels each (see http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html) zero_based: boolean or "auto", optional Whether column indices in f are zero-based (True) or one-based (False). If column indices are one-based, they are transformed to zero-based to match Python/NumPy conventions. If set to "auto", a heuristic check is applied to determine this from the file contents. Both kinds of files occur "in the wild", but they are unfortunately not self-identifying. Using "auto" or True should always be safe. query_id: boolean, defaults to False If True, will return the query_id array for each file. Returns ------- X: scipy.sparse matrix of shape (n_samples, n_features) y: ndarray of shape (n_samples,), or, in the multilabel a list of tuples of length n_samples. query_id: array of shape (n_samples,) query_id for each sample. Only returned when query_id is set to True. See also -------- load_svmlight_files: similar function for loading multiple files in this format, enforcing the same number of features/columns on all of them. Examples -------- To use joblib.Memory to cache the svmlight file:: from sklearn.externals.joblib import Memory from sklearn.datasets import load_svmlight_file mem = Memory("./mycache") @mem.cache def get_data(): data = load_svmlight_file("mysvmlightfile") return data[0], data[1] X, y = get_data() """ return tuple(load_svmlight_files([f], n_features, dtype, multilabel, zero_based, query_id)) def _gen_open(f): if isinstance(f, int): # file descriptor return io.open(f, "rb", closefd=False) elif not isinstance(f, six.string_types): raise TypeError("expected {str, int, file-like}, got %s" % type(f)) _, ext = os.path.splitext(f) if ext == ".gz": import gzip return gzip.open(f, "rb") elif ext == ".bz2": from bz2 import BZ2File return BZ2File(f, "rb") else: return open(f, "rb") def _open_and_load(f, dtype, multilabel, zero_based, query_id): if hasattr(f, "read"): actual_dtype, data, ind, indptr, labels, query = \ _load_svmlight_file(f, dtype, multilabel, zero_based, query_id) # XXX remove closing when Python 2.7+/3.1+ required else: with closing(_gen_open(f)) as f: actual_dtype, data, ind, indptr, labels, query = \ _load_svmlight_file(f, dtype, multilabel, zero_based, query_id) # convert from array.array, give data the right dtype if not multilabel: labels = frombuffer_empty(labels, np.float64) data = frombuffer_empty(data, actual_dtype) indices = frombuffer_empty(ind, np.intc) indptr = np.frombuffer(indptr, dtype=np.intc) # never empty query = frombuffer_empty(query, np.intc) data = np.asarray(data, dtype=dtype) # no-op for float{32,64} return data, indices, indptr, labels, query def load_svmlight_files(files, n_features=None, dtype=np.float64, multilabel=False, zero_based="auto", query_id=False): """Load dataset from multiple files in SVMlight format This function is equivalent to mapping load_svmlight_file over a list of files, except that the results are concatenated into a single, flat list and the samples vectors are constrained to all have the same number of features. In case the file contains a pairwise preference constraint (known as "qid" in the svmlight format) these are ignored unless the query_id parameter is set to True. These pairwise preference constraints can be used to constraint the combination of samples when using pairwise loss functions (as is the case in some learning to rank problems) so that only pairs with the same query_id value are considered. Parameters ---------- files : iterable over {str, file-like, int} (Paths of) files to load. If a path ends in ".gz" or ".bz2", it will be uncompressed on the fly. If an integer is passed, it is assumed to be a file descriptor. File-likes and file descriptors will not be closed by this function. File-like objects must be opened in binary mode. n_features: int or None The number of features to use. If None, it will be inferred from the maximum column index occurring in any of the files. This can be set to a higher value than the actual number of features in any of the input files, but setting it to a lower value will cause an exception to be raised. multilabel: boolean, optional Samples may have several labels each (see http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html) zero_based: boolean or "auto", optional Whether column indices in f are zero-based (True) or one-based (False). If column indices are one-based, they are transformed to zero-based to match Python/NumPy conventions. If set to "auto", a heuristic check is applied to determine this from the file contents. Both kinds of files occur "in the wild", but they are unfortunately not self-identifying. Using "auto" or True should always be safe. query_id: boolean, defaults to False If True, will return the query_id array for each file. Returns ------- [X1, y1, ..., Xn, yn] where each (Xi, yi) pair is the result from load_svmlight_file(files[i]). If query_id is set to True, this will return instead [X1, y1, q1, ..., Xn, yn, qn] where (Xi, yi, qi) is the result from load_svmlight_file(files[i]) Notes ----- When fitting a model to a matrix X_train and evaluating it against a matrix X_test, it is essential that X_train and X_test have the same number of features (X_train.shape[1] == X_test.shape[1]). This may not be the case if you load the files individually with load_svmlight_file. See also -------- load_svmlight_file """ r = [_open_and_load(f, dtype, multilabel, bool(zero_based), bool(query_id)) for f in files] if (zero_based is False or zero_based == "auto" and all(np.min(tmp[1]) > 0 for tmp in r)): for ind in r: indices = ind[1] indices -= 1 n_f = max(ind[1].max() for ind in r) + 1 if n_features is None: n_features = n_f elif n_features < n_f: raise ValueError("n_features was set to {}," " but input file contains {} features" .format(n_features, n_f)) result = [] for data, indices, indptr, y, query_values in r: shape = (indptr.shape[0] - 1, n_features) X = sp.csr_matrix((data, indices, indptr), shape) X.sort_indices() result += X, y if query_id: result.append(query_values) return result def _dump_svmlight(X, y, f, one_based, comment, query_id): is_sp = int(hasattr(X, "tocsr")) if X.dtype.kind == 'i': value_pattern = u("%d:%d") else: value_pattern = u("%d:%.16g") if y.dtype.kind == 'i': line_pattern = u("%d") else: line_pattern = u("%.16g") if query_id is not None: line_pattern += u(" qid:%d") line_pattern += u(" %s\n") if comment: f.write(b("# Generated by dump_svmlight_file from scikit-learn %s\n" % __version__)) f.write(b("# Column indices are %s-based\n" % ["zero", "one"][one_based])) f.write(b("#\n")) f.writelines(b("# %s\n" % line) for line in comment.splitlines()) for i in range(X.shape[0]): if is_sp: span = slice(X.indptr[i], X.indptr[i + 1]) row = zip(X.indices[span], X.data[span]) else: nz = X[i] != 0 row = zip(np.where(nz)[0], X[i, nz]) s = " ".join(value_pattern % (j + one_based, x) for j, x in row) if query_id is not None: feat = (y[i], query_id[i], s) else: feat = (y[i], s) f.write((line_pattern % feat).encode('ascii')) def dump_svmlight_file(X, y, f, zero_based=True, comment=None, query_id=None): """Dump the dataset in svmlight / libsvm file format. This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] Target values. f : string or file-like in binary mode If string, specifies the path that will contain the data. If file-like, data will be written to f. f should be opened in binary mode. zero_based : boolean, optional Whether column indices should be written zero-based (True) or one-based (False). comment : string, optional Comment to insert at the top of the file. This should be either a Unicode string, which will be encoded as UTF-8, or an ASCII byte string. If a comment is given, then it will be preceded by one that identifies the file as having been dumped by scikit-learn. Note that not all tools grok comments in SVMlight files. query_id : array-like, shape = [n_samples] Array containing pairwise preference constraints (qid in svmlight format). """ if comment is not None: # Convert comment string to list of lines in UTF-8. # If a byte string is passed, then check whether it's ASCII; # if a user wants to get fancy, they'll have to decode themselves. # Avoid mention of str and unicode types for Python 3.x compat. if isinstance(comment, bytes): comment.decode("ascii") # just for the exception else: comment = comment.encode("utf-8") if six.b("\0") in comment: raise ValueError("comment string contains NUL byte") y = np.asarray(y) if y.ndim != 1: raise ValueError("expected y of shape (n_samples,), got %r" % (y.shape,)) Xval = check_array(X, accept_sparse='csr') if Xval.shape[0] != y.shape[0]: raise ValueError("X.shape[0] and y.shape[0] should be the same, got" " %r and %r instead." % (Xval.shape[0], y.shape[0])) # We had some issues with CSR matrices with unsorted indices (e.g. #1501), # so sort them here, but first make sure we don't modify the user's X. # TODO We can do this cheaper; sorted_indices copies the whole matrix. if Xval is X and hasattr(Xval, "sorted_indices"): X = Xval.sorted_indices() else: X = Xval if hasattr(X, "sort_indices"): X.sort_indices() if query_id is not None: query_id = np.asarray(query_id) if query_id.shape[0] != y.shape[0]: raise ValueError("expected query_id of shape (n_samples,), got %r" % (query_id.shape,)) one_based = not zero_based if hasattr(f, "write"): _dump_svmlight(X, y, f, one_based, comment, query_id) else: with open(f, "wb") as f: _dump_svmlight(X, y, f, one_based, comment, query_id)
bsd-3-clause
dimara/synnefo
snf-admin-app/synnefo_admin/admin/tables.py
8
1393
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from eztables.views import DatatablesView from django.utils.html import escape def escape_row(row): """Escape a whole row using Django's escape function.""" return [escape(cell) for cell in row] class AdminJSONView(DatatablesView): """Class-based Django view for admin purposes. It is based on the DataTablesView class of django-eztables plugin and aims to provide some common functionality for all the views that are derived from it. """ def format_data_rows(self, rows): if hasattr(self, 'format_data_row'): rows = [escape_row(self.format_data_row(row)) for row in rows] else: rows = [escape_row(row) for row in rows] return rows
gpl-3.0
afrolov1/nova
nova/tests/virt/libvirt/fakelibvirt.py
1
30826
# Copyright 2010 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from lxml import etree import time import uuid from nova.openstack.common.gettextutils import _ # Allow passing None to the various connect methods # (i.e. allow the client to rely on default URLs) allow_default_uri_connection = True # string indicating the CPU arch node_arch = 'x86_64' # or 'i686' (or whatever else uname -m might return) # memory size in kilobytes node_kB_mem = 4096 # the number of active CPUs node_cpus = 2 # expected CPU frequency node_mhz = 800 # the number of NUMA cell, 1 for unusual NUMA topologies or uniform # memory access; check capabilities XML for the actual NUMA topology node_nodes = 1 # NUMA nodes # number of CPU sockets per node if nodes > 1, total number of CPU # sockets otherwise node_sockets = 1 # number of cores per socket node_cores = 2 # number of threads per core node_threads = 1 # CPU model node_cpu_model = "Penryn" # CPU vendor node_cpu_vendor = "Intel" # Has libvirt connection been used at least once connection_used = False def _reset(): global allow_default_uri_connection allow_default_uri_connection = True # virDomainState VIR_DOMAIN_NOSTATE = 0 VIR_DOMAIN_RUNNING = 1 VIR_DOMAIN_BLOCKED = 2 VIR_DOMAIN_PAUSED = 3 VIR_DOMAIN_SHUTDOWN = 4 VIR_DOMAIN_SHUTOFF = 5 VIR_DOMAIN_CRASHED = 6 VIR_DOMAIN_XML_SECURE = 1 VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0 VIR_DOMAIN_EVENT_DEFINED = 0 VIR_DOMAIN_EVENT_UNDEFINED = 1 VIR_DOMAIN_EVENT_STARTED = 2 VIR_DOMAIN_EVENT_SUSPENDED = 3 VIR_DOMAIN_EVENT_RESUMED = 4 VIR_DOMAIN_EVENT_STOPPED = 5 VIR_DOMAIN_EVENT_SHUTDOWN = 6 VIR_DOMAIN_EVENT_PMSUSPENDED = 7 VIR_DOMAIN_UNDEFINE_MANAGED_SAVE = 1 VIR_DOMAIN_AFFECT_CURRENT = 0 VIR_DOMAIN_AFFECT_LIVE = 1 VIR_DOMAIN_AFFECT_CONFIG = 2 VIR_CPU_COMPARE_ERROR = -1 VIR_CPU_COMPARE_INCOMPATIBLE = 0 VIR_CPU_COMPARE_IDENTICAL = 1 VIR_CPU_COMPARE_SUPERSET = 2 VIR_CRED_USERNAME = 1 VIR_CRED_AUTHNAME = 2 VIR_CRED_LANGUAGE = 3 VIR_CRED_CNONCE = 4 VIR_CRED_PASSPHRASE = 5 VIR_CRED_ECHOPROMPT = 6 VIR_CRED_NOECHOPROMPT = 7 VIR_CRED_REALM = 8 VIR_CRED_EXTERNAL = 9 VIR_MIGRATE_PEER2PEER = 2 VIR_MIGRATE_UNDEFINE_SOURCE = 16 VIR_NODE_CPU_STATS_ALL_CPUS = -1 VIR_DOMAIN_START_PAUSED = 1 # libvirtError enums # (Intentionally different from what's in libvirt. We do this to check, # that consumers of the library are using the symbolic names rather than # hardcoding the numerical values) VIR_FROM_QEMU = 100 VIR_FROM_DOMAIN = 200 VIR_FROM_NWFILTER = 330 VIR_FROM_REMOTE = 340 VIR_FROM_RPC = 345 VIR_ERR_NO_SUPPORT = 3 VIR_ERR_XML_DETAIL = 350 VIR_ERR_NO_DOMAIN = 420 VIR_ERR_OPERATION_INVALID = 55 VIR_ERR_OPERATION_TIMEOUT = 68 VIR_ERR_NO_NWFILTER = 620 VIR_ERR_SYSTEM_ERROR = 900 VIR_ERR_INTERNAL_ERROR = 950 # Readonly VIR_CONNECT_RO = 1 # virConnectBaselineCPU flags VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES = 1 # snapshotCreateXML flags VIR_DOMAIN_SNAPSHOT_CREATE_NO_METADATA = 4 VIR_DOMAIN_SNAPSHOT_CREATE_DISK_ONLY = 16 VIR_DOMAIN_SNAPSHOT_CREATE_REUSE_EXT = 32 VIR_DOMAIN_SNAPSHOT_CREATE_QUIESCE = 64 def _parse_disk_info(element): disk_info = {} disk_info['type'] = element.get('type', 'file') disk_info['device'] = element.get('device', 'disk') driver = element.find('./driver') if driver is not None: disk_info['driver_name'] = driver.get('name') disk_info['driver_type'] = driver.get('type') source = element.find('./source') if source is not None: disk_info['source'] = source.get('file') if not disk_info['source']: disk_info['source'] = source.get('dev') if not disk_info['source']: disk_info['source'] = source.get('path') target = element.find('./target') if target is not None: disk_info['target_dev'] = target.get('dev') disk_info['target_bus'] = target.get('bus') return disk_info class libvirtError(Exception): """This class was copied and slightly modified from `libvirt-python:libvirt-override.py`. Since a test environment will use the real `libvirt-python` version of `libvirtError` if it's installed and not this fake, we need to maintain strict compatibility with the original class, including `__init__` args and instance-attributes. To create a libvirtError instance you should: # Create an unsupported error exception exc = libvirtError('my message') exc.err = (libvirt.VIR_ERR_NO_SUPPORT,) self.err is a tuple of form: (error_code, error_domain, error_message, error_level, str1, str2, str3, int1, int2) Alternatively, you can use the `make_libvirtError` convenience function to allow you to specify these attributes in one shot. """ def __init__(self, defmsg, conn=None, dom=None, net=None, pool=None, vol=None): Exception.__init__(self, defmsg) self.err = None def get_error_code(self): if self.err is None: return None return self.err[0] def get_error_domain(self): if self.err is None: return None return self.err[1] def get_error_message(self): if self.err is None: return None return self.err[2] def get_error_level(self): if self.err is None: return None return self.err[3] def get_str1(self): if self.err is None: return None return self.err[4] def get_str2(self): if self.err is None: return None return self.err[5] def get_str3(self): if self.err is None: return None return self.err[6] def get_int1(self): if self.err is None: return None return self.err[7] def get_int2(self): if self.err is None: return None return self.err[8] class NWFilter(object): def __init__(self, connection, xml): self._connection = connection self._xml = xml self._parse_xml(xml) def _parse_xml(self, xml): tree = etree.fromstring(xml) root = tree.find('.') self._name = root.get('name') def undefine(self): self._connection._remove_filter(self) class Domain(object): def __init__(self, connection, xml, running=False, transient=False): self._connection = connection if running: connection._mark_running(self) self._state = running and VIR_DOMAIN_RUNNING or VIR_DOMAIN_SHUTOFF self._transient = transient self._def = self._parse_definition(xml) self._has_saved_state = False self._snapshots = {} self._id = self._connection._id_counter def _parse_definition(self, xml): try: tree = etree.fromstring(xml) except etree.ParseError: raise make_libvirtError( libvirtError, "Invalid XML.", error_code=VIR_ERR_XML_DETAIL, error_domain=VIR_FROM_DOMAIN) definition = {} name = tree.find('./name') if name is not None: definition['name'] = name.text uuid_elem = tree.find('./uuid') if uuid_elem is not None: definition['uuid'] = uuid_elem.text else: definition['uuid'] = str(uuid.uuid4()) vcpu = tree.find('./vcpu') if vcpu is not None: definition['vcpu'] = int(vcpu.text) memory = tree.find('./memory') if memory is not None: definition['memory'] = int(memory.text) os = {} os_type = tree.find('./os/type') if os_type is not None: os['type'] = os_type.text os['arch'] = os_type.get('arch', node_arch) os_kernel = tree.find('./os/kernel') if os_kernel is not None: os['kernel'] = os_kernel.text os_initrd = tree.find('./os/initrd') if os_initrd is not None: os['initrd'] = os_initrd.text os_cmdline = tree.find('./os/cmdline') if os_cmdline is not None: os['cmdline'] = os_cmdline.text os_boot = tree.find('./os/boot') if os_boot is not None: os['boot_dev'] = os_boot.get('dev') definition['os'] = os features = {} acpi = tree.find('./features/acpi') if acpi is not None: features['acpi'] = True definition['features'] = features devices = {} device_nodes = tree.find('./devices') if device_nodes is not None: disks_info = [] disks = device_nodes.findall('./disk') for disk in disks: disks_info += [_parse_disk_info(disk)] devices['disks'] = disks_info nics_info = [] nics = device_nodes.findall('./interface') for nic in nics: nic_info = {} nic_info['type'] = nic.get('type') mac = nic.find('./mac') if mac is not None: nic_info['mac'] = mac.get('address') source = nic.find('./source') if source is not None: if nic_info['type'] == 'network': nic_info['source'] = source.get('network') elif nic_info['type'] == 'bridge': nic_info['source'] = source.get('bridge') nics_info += [nic_info] devices['nics'] = nics_info definition['devices'] = devices return definition def create(self): self.createWithFlags(0) def createWithFlags(self, flags): # FIXME: Not handling flags at the moment self._state = VIR_DOMAIN_RUNNING self._connection._mark_running(self) self._has_saved_state = False def isActive(self): return int(self._state == VIR_DOMAIN_RUNNING) def undefine(self): self._connection._undefine(self) def undefineFlags(self, flags): self.undefine() if flags & VIR_DOMAIN_UNDEFINE_MANAGED_SAVE: if self.hasManagedSaveImage(0): self.managedSaveRemove() def destroy(self): self._state = VIR_DOMAIN_SHUTOFF self._connection._mark_not_running(self) def ID(self): return self._id def name(self): return self._def['name'] def UUIDString(self): return self._def['uuid'] def interfaceStats(self, device): return [10000242400, 1234, 0, 2, 213412343233, 34214234, 23, 3] def blockStats(self, device): return [2, 10000242400, 234, 2343424234, 34] def suspend(self): self._state = VIR_DOMAIN_PAUSED def shutdown(self): self._state = VIR_DOMAIN_SHUTDOWN self._connection._mark_not_running(self) def reset(self, flags): # FIXME: Not handling flags at the moment self._state = VIR_DOMAIN_RUNNING self._connection._mark_running(self) def info(self): return [self._state, long(self._def['memory']), long(self._def['memory']), self._def['vcpu'], 123456789L] def migrateToURI(self, desturi, flags, dname, bandwidth): raise make_libvirtError( libvirtError, "Migration always fails for fake libvirt!", error_code=VIR_ERR_INTERNAL_ERROR, error_domain=VIR_FROM_QEMU) def attachDevice(self, xml): disk_info = _parse_disk_info(etree.fromstring(xml)) disk_info['_attached'] = True self._def['devices']['disks'] += [disk_info] return True def attachDeviceFlags(self, xml, flags): if (flags & VIR_DOMAIN_AFFECT_LIVE and self._state != VIR_DOMAIN_RUNNING): raise make_libvirtError( libvirtError, "AFFECT_LIVE only allowed for running domains!", error_code=VIR_ERR_INTERNAL_ERROR, error_domain=VIR_FROM_QEMU) self.attachDevice(xml) def detachDevice(self, xml): disk_info = _parse_disk_info(etree.fromstring(xml)) disk_info['_attached'] = True return disk_info in self._def['devices']['disks'] def detachDeviceFlags(self, xml, _flags): self.detachDevice(xml) def XMLDesc(self, flags): disks = '' for disk in self._def['devices']['disks']: disks += '''<disk type='%(type)s' device='%(device)s'> <driver name='%(driver_name)s' type='%(driver_type)s'/> <source file='%(source)s'/> <target dev='%(target_dev)s' bus='%(target_bus)s'/> <address type='drive' controller='0' bus='0' unit='0'/> </disk>''' % disk nics = '' for nic in self._def['devices']['nics']: nics += '''<interface type='%(type)s'> <mac address='%(mac)s'/> <source %(type)s='%(source)s'/> <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0'/> </interface>''' % nic return '''<domain type='kvm'> <name>%(name)s</name> <uuid>%(uuid)s</uuid> <memory>%(memory)s</memory> <currentMemory>%(memory)s</currentMemory> <vcpu>%(vcpu)s</vcpu> <os> <type arch='%(arch)s' machine='pc-0.12'>hvm</type> <boot dev='hd'/> </os> <features> <acpi/> <apic/> <pae/> </features> <clock offset='localtime'/> <on_poweroff>destroy</on_poweroff> <on_reboot>restart</on_reboot> <on_crash>restart</on_crash> <devices> <emulator>/usr/bin/kvm</emulator> %(disks)s <controller type='ide' index='0'> <address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x1'/> </controller> %(nics)s <serial type='file'> <source path='dummy.log'/> <target port='0'/> </serial> <serial type='pty'> <source pty='/dev/pts/27'/> <target port='1'/> </serial> <console type='file'> <source path='dummy.log'/> <target port='0'/> </console> <input type='tablet' bus='usb'/> <input type='mouse' bus='ps2'/> <graphics type='vnc' port='-1' autoport='yes'/> <graphics type='spice' port='-1' autoport='yes'/> <video> <model type='cirrus' vram='9216' heads='1'/> <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0'/> </video> <memballoon model='virtio'> <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0'/> </memballoon> </devices> </domain>''' % {'name': self._def['name'], 'uuid': self._def['uuid'], 'memory': self._def['memory'], 'vcpu': self._def['vcpu'], 'arch': self._def['os']['arch'], 'disks': disks, 'nics': nics} def managedSave(self, flags): self._connection._mark_not_running(self) self._has_saved_state = True def managedSaveRemove(self, flags): self._has_saved_state = False def hasManagedSaveImage(self, flags): return int(self._has_saved_state) def resume(self): self._state = VIR_DOMAIN_RUNNING def snapshotCreateXML(self, xml, flags): tree = etree.fromstring(xml) name = tree.find('./name').text snapshot = DomainSnapshot(name, self) self._snapshots[name] = snapshot return snapshot def vcpus(self): vcpus = ([], []) for i in range(0, self._def['vcpu']): vcpus[0].append((i, 1, 120405L, i)) vcpus[1].append((True, True, True, True)) return vcpus def memoryStats(self): return {} def maxMemory(self): return self._def['memory'] def blockJobInfo(self, disk, flags): return {} class DomainSnapshot(object): def __init__(self, name, domain): self._name = name self._domain = domain def delete(self, flags): del self._domain._snapshots[self._name] class Connection(object): def __init__(self, uri=None, readonly=False, version=9007): if not uri or uri == '': if allow_default_uri_connection: uri = 'qemu:///session' else: raise ValueError("URI was None, but fake libvirt is " "configured to not accept this.") uri_whitelist = ['qemu:///system', 'qemu:///session', 'xen:///system', 'uml:///system', 'test:///default'] if uri not in uri_whitelist: raise make_libvirtError( libvirtError, "libvirt error: no connection driver " "available for No connection for URI %s" % uri, error_code=5, error_domain=0) self.readonly = readonly self._uri = uri self._vms = {} self._running_vms = {} self._id_counter = 1 # libvirt reserves 0 for the hypervisor. self._nwfilters = {} self._event_callbacks = {} self.fakeLibVersion = version self.fakeVersion = version def _add_filter(self, nwfilter): self._nwfilters[nwfilter._name] = nwfilter def _remove_filter(self, nwfilter): del self._nwfilters[nwfilter._name] def _mark_running(self, dom): self._running_vms[self._id_counter] = dom self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0) self._id_counter += 1 def _mark_not_running(self, dom): if dom._transient: self._undefine(dom) dom._id = -1 for (k, v) in self._running_vms.iteritems(): if v == dom: del self._running_vms[k] self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STOPPED, 0) return def _undefine(self, dom): del self._vms[dom.name()] if not dom._transient: self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_UNDEFINED, 0) def getInfo(self): return [node_arch, node_kB_mem, node_cpus, node_mhz, node_nodes, node_sockets, node_cores, node_threads] def numOfDomains(self): return len(self._running_vms) def listDomainsID(self): return self._running_vms.keys() def lookupByID(self, id): if id in self._running_vms: return self._running_vms[id] raise make_libvirtError( libvirtError, 'Domain not found: no domain with matching id %d' % id, error_code=VIR_ERR_NO_DOMAIN, error_domain=VIR_FROM_QEMU) def lookupByName(self, name): if name in self._vms: return self._vms[name] raise make_libvirtError( libvirtError, 'Domain not found: no domain with matching name "%s"' % name, error_code=VIR_ERR_NO_DOMAIN, error_domain=VIR_FROM_QEMU) def _emit_lifecycle(self, dom, event, detail): if VIR_DOMAIN_EVENT_ID_LIFECYCLE not in self._event_callbacks: return cbinfo = self._event_callbacks[VIR_DOMAIN_EVENT_ID_LIFECYCLE] callback = cbinfo[0] opaque = cbinfo[1] callback(self, dom, event, detail, opaque) def defineXML(self, xml): dom = Domain(connection=self, running=False, transient=False, xml=xml) self._vms[dom.name()] = dom self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_DEFINED, 0) return dom def createXML(self, xml, flags): dom = Domain(connection=self, running=True, transient=True, xml=xml) self._vms[dom.name()] = dom self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0) return dom def getType(self): if self._uri == 'qemu:///system': return 'QEMU' def getLibVersion(self): return self.fakeLibVersion def getVersion(self): return self.fakeVersion def getHostname(self): return 'compute1' def domainEventRegisterAny(self, dom, eventid, callback, opaque): self._event_callbacks[eventid] = [callback, opaque] def registerCloseCallback(self, cb, opaque): pass def getCapabilities(self): """Return spoofed capabilities.""" return '''<capabilities> <host> <uuid>cef19ce0-0ca2-11df-855d-b19fbce37686</uuid> <cpu> <arch>x86_64</arch> <model>Penryn</model> <vendor>Intel</vendor> <topology sockets='1' cores='2' threads='1'/> <feature name='xtpr'/> <feature name='tm2'/> <feature name='est'/> <feature name='vmx'/> <feature name='ds_cpl'/> <feature name='monitor'/> <feature name='pbe'/> <feature name='tm'/> <feature name='ht'/> <feature name='ss'/> <feature name='acpi'/> <feature name='ds'/> <feature name='vme'/> </cpu> <migration_features> <live/> <uri_transports> <uri_transport>tcp</uri_transport> </uri_transports> </migration_features> <secmodel> <model>apparmor</model> <doi>0</doi> </secmodel> </host> <guest> <os_type>hvm</os_type> <arch name='i686'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu</emulator> <machine>pc-0.14</machine> <machine canonical='pc-0.14'>pc</machine> <machine>pc-0.13</machine> <machine>pc-0.12</machine> <machine>pc-0.11</machine> <machine>pc-0.10</machine> <machine>isapc</machine> <domain type='qemu'> </domain> <domain type='kvm'> <emulator>/usr/bin/kvm</emulator> <machine>pc-0.14</machine> <machine canonical='pc-0.14'>pc</machine> <machine>pc-0.13</machine> <machine>pc-0.12</machine> <machine>pc-0.11</machine> <machine>pc-0.10</machine> <machine>isapc</machine> </domain> </arch> <features> <cpuselection/> <deviceboot/> <pae/> <nonpae/> <acpi default='on' toggle='yes'/> <apic default='on' toggle='no'/> </features> </guest> <guest> <os_type>hvm</os_type> <arch name='x86_64'> <wordsize>64</wordsize> <emulator>/usr/bin/qemu-system-x86_64</emulator> <machine>pc-0.14</machine> <machine canonical='pc-0.14'>pc</machine> <machine>pc-0.13</machine> <machine>pc-0.12</machine> <machine>pc-0.11</machine> <machine>pc-0.10</machine> <machine>isapc</machine> <domain type='qemu'> </domain> <domain type='kvm'> <emulator>/usr/bin/kvm</emulator> <machine>pc-0.14</machine> <machine canonical='pc-0.14'>pc</machine> <machine>pc-0.13</machine> <machine>pc-0.12</machine> <machine>pc-0.11</machine> <machine>pc-0.10</machine> <machine>isapc</machine> </domain> </arch> <features> <cpuselection/> <deviceboot/> <acpi default='on' toggle='yes'/> <apic default='on' toggle='no'/> </features> </guest> <guest> <os_type>hvm</os_type> <arch name='arm'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu-system-arm</emulator> <machine>integratorcp</machine> <machine>vexpress-a9</machine> <machine>syborg</machine> <machine>musicpal</machine> <machine>mainstone</machine> <machine>n800</machine> <machine>n810</machine> <machine>n900</machine> <machine>cheetah</machine> <machine>sx1</machine> <machine>sx1-v1</machine> <machine>beagle</machine> <machine>beaglexm</machine> <machine>tosa</machine> <machine>akita</machine> <machine>spitz</machine> <machine>borzoi</machine> <machine>terrier</machine> <machine>connex</machine> <machine>verdex</machine> <machine>lm3s811evb</machine> <machine>lm3s6965evb</machine> <machine>realview-eb</machine> <machine>realview-eb-mpcore</machine> <machine>realview-pb-a8</machine> <machine>realview-pbx-a9</machine> <machine>versatilepb</machine> <machine>versatileab</machine> <domain type='qemu'> </domain> </arch> <features> <deviceboot/> </features> </guest> <guest> <os_type>hvm</os_type> <arch name='mips'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu-system-mips</emulator> <machine>malta</machine> <machine>mipssim</machine> <machine>magnum</machine> <machine>pica61</machine> <machine>mips</machine> <domain type='qemu'> </domain> </arch> <features> <deviceboot/> </features> </guest> <guest> <os_type>hvm</os_type> <arch name='mipsel'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu-system-mipsel</emulator> <machine>malta</machine> <machine>mipssim</machine> <machine>magnum</machine> <machine>pica61</machine> <machine>mips</machine> <domain type='qemu'> </domain> </arch> <features> <deviceboot/> </features> </guest> <guest> <os_type>hvm</os_type> <arch name='sparc'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu-system-sparc</emulator> <machine>SS-5</machine> <machine>leon3_generic</machine> <machine>SS-10</machine> <machine>SS-600MP</machine> <machine>SS-20</machine> <machine>Voyager</machine> <machine>LX</machine> <machine>SS-4</machine> <machine>SPARCClassic</machine> <machine>SPARCbook</machine> <machine>SS-1000</machine> <machine>SS-2000</machine> <machine>SS-2</machine> <domain type='qemu'> </domain> </arch> </guest> <guest> <os_type>hvm</os_type> <arch name='ppc'> <wordsize>32</wordsize> <emulator>/usr/bin/qemu-system-ppc</emulator> <machine>g3beige</machine> <machine>virtex-ml507</machine> <machine>mpc8544ds</machine> <machine canonical='bamboo-0.13'>bamboo</machine> <machine>bamboo-0.13</machine> <machine>bamboo-0.12</machine> <machine>ref405ep</machine> <machine>taihu</machine> <machine>mac99</machine> <machine>prep</machine> <domain type='qemu'> </domain> </arch> <features> <deviceboot/> </features> </guest> </capabilities>''' def compareCPU(self, xml, flags): tree = etree.fromstring(xml) arch_node = tree.find('./arch') if arch_node is not None: if arch_node.text not in ['x86_64', 'i686']: return VIR_CPU_COMPARE_INCOMPATIBLE model_node = tree.find('./model') if model_node is not None: if model_node.text != node_cpu_model: return VIR_CPU_COMPARE_INCOMPATIBLE vendor_node = tree.find('./vendor') if vendor_node is not None: if vendor_node.text != node_cpu_vendor: return VIR_CPU_COMPARE_INCOMPATIBLE # The rest of the stuff libvirt implements is rather complicated # and I don't think it adds much value to replicate it here. return VIR_CPU_COMPARE_IDENTICAL def getCPUStats(self, cpuNum, flag): if cpuNum < 2: return {'kernel': 5664160000000L, 'idle': 1592705190000000L, 'user': 26728850000000L, 'iowait': 6121490000000L} else: raise make_libvirtError( libvirtError, "invalid argument: Invalid cpu number", error_code=VIR_ERR_INTERNAL_ERROR, error_domain=VIR_FROM_QEMU) def nwfilterLookupByName(self, name): try: return self._nwfilters[name] except KeyError: raise make_libvirtError( libvirtError, "no nwfilter with matching name %s" % name, error_code=VIR_ERR_NO_NWFILTER, error_domain=VIR_FROM_NWFILTER) def nwfilterDefineXML(self, xml): nwfilter = NWFilter(self, xml) self._add_filter(nwfilter) def listDefinedDomains(self): return [] def listDevices(self, cap, flags): return [] def baselineCPU(self, cpu, flag): """Add new libvirt API.""" return """<cpu mode='custom' match='exact'> <model fallback='allow'>Westmere</model> <vendor>Intel</vendor> <feature policy='require' name='aes'/> </cpu>""" def openAuth(uri, auth, flags): if type(auth) != list: raise Exception(_("Expected a list for 'auth' parameter")) if type(auth[0]) != list: raise Exception( _("Expected a function in 'auth[0]' parameter")) if not callable(auth[1]): raise Exception( _("Expected a function in 'auth[1]' parameter")) return Connection(uri, (flags == VIR_CONNECT_RO)) def virEventRunDefaultImpl(): time.sleep(1) def virEventRegisterDefaultImpl(): if connection_used: raise Exception(_("virEventRegisterDefaultImpl() must be \ called before connection is used.")) def registerErrorHandler(handler, ctxt): pass def make_libvirtError(error_class, msg, error_code=None, error_domain=None, error_message=None, error_level=None, str1=None, str2=None, str3=None, int1=None, int2=None): """Convenience function for creating `libvirtError` exceptions which allow you to specify arguments in constructor without having to manipulate the `err` tuple directly. We need to pass in `error_class` to this function because it may be `libvirt.libvirtError` or `fakelibvirt.libvirtError` depending on whether `libvirt-python` is installed. """ exc = error_class(msg) exc.err = (error_code, error_domain, error_message, error_level, str1, str2, str3, int1, int2) return exc virDomain = Domain virConnect = Connection
apache-2.0
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/pip/_vendor/urllib3/connection.py
24
14485
from __future__ import absolute_import import datetime import logging import os import sys import socket from socket import error as SocketError, timeout as SocketTimeout import warnings from .packages import six from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection from .packages.six.moves.http_client import HTTPException # noqa: F401 try: # Compiled with SSL? import ssl BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. ssl = None class BaseSSLError(BaseException): pass try: # Python 3: # Not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError except NameError: # Python 2: class ConnectionError(Exception): pass from .exceptions import ( NewConnectionError, ConnectTimeoutError, SubjectAltNameWarning, SystemTimeWarning, ) from .packages.ssl_match_hostname import match_hostname, CertificateError from .util.ssl_ import ( resolve_cert_reqs, resolve_ssl_version, assert_fingerprint, create_urllib3_context, ssl_wrap_socket ) from .util import connection from ._collections import HTTPHeaderDict log = logging.getLogger(__name__) port_by_scheme = { 'http': 80, 'https': 443, } # When updating RECENT_DATE, move it to within two years of the current date, # and not less than 6 months ago. # Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or # after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) RECENT_DATE = datetime.date(2017, 6, 30) class DummyConnection(object): """Used to detect a failed ConnectionCls import.""" pass class HTTPConnection(_HTTPConnection, object): """ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass:: HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme['http'] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] #: Whether this connection verifies the host's certificate. is_verified = False def __init__(self, *args, **kw): if six.PY3: # Python 3 kw.pop('strict', None) # Pre-set source_address in case we have an older Python like 2.6. self.source_address = kw.get('source_address') if sys.version_info < (2, 7): # Python 2.6 # _HTTPConnection on Python 2.6 will balk at this keyword arg, but # not newer versions. We can still use it when creating a # connection though, so we pop it *after* we have saved it as # self.source_address. kw.pop('source_address', None) #: The socket options provided by the user. If no options are #: provided, we use the default options. self.socket_options = kw.pop('socket_options', self.default_socket_options) # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) @property def host(self): """ Getter method to remove any trailing dots that indicate the hostname is an FQDN. In general, SSL certificates don't include the trailing dot indicating a fully-qualified domain name, and thus, they don't validate properly when checked against a domain name that includes the dot. In addition, some servers may not expect to receive the trailing dot when provided. However, the hostname with trailing dot is critical to DNS resolution; doing a lookup with the trailing dot will properly only resolve the appropriate FQDN, whereas a lookup without a trailing dot will search the system's search domain list. Thus, it's important to keep the original host around for use only in those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). """ return self._dns_host.rstrip('.') @host.setter def host(self, value): """ Setter for the `host` property. We assume that only urllib3 uses the _dns_host attribute; httplib itself only uses `host`, and it seems reasonable that other libraries follow suit. """ self._dns_host = value def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except SocketError as e: raise NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn def _prepare_conn(self, conn): self.sock = conn # the _tunnel_host attribute was added in python 2.6.3 (via # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do # not have them. if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 def connect(self): conn = self._new_conn() self._prepare_conn(conn) def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ headers = HTTPHeaderDict(headers if headers is not None else {}) skip_accept_encoding = 'accept-encoding' in headers skip_host = 'host' in headers self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) for header, value in headers.items(): self.putheader(header, value) if 'transfer-encoding' not in headers: self.putheader('Transfer-Encoding', 'chunked') self.endheaders() if body is not None: stringish_types = six.string_types + (six.binary_type,) if isinstance(body, stringish_types): body = (body,) for chunk in body: if not chunk: continue if not isinstance(chunk, six.binary_type): chunk = chunk.encode('utf8') len_str = hex(len(chunk))[2:] self.send(len_str.encode('utf-8')) self.send(b'\r\n') self.send(chunk) self.send(b'\r\n') # After the if clause, to always have a closed body self.send(b'0\r\n\r\n') class HTTPSConnection(HTTPConnection): default_port = port_by_scheme['https'] ssl_version = None def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ssl_context=None, **kw): HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) self.key_file = key_file self.cert_file = cert_file self.ssl_context = ssl_context # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) self._protocol = 'https' def connect(self): conn = self._new_conn() self._prepare_conn(conn) if self.ssl_context is None: self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(None), cert_reqs=resolve_cert_reqs(None), ) self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, ssl_context=self.ssl_context, ) class VerifiedHTTPSConnection(HTTPSConnection): """ Based on httplib.HTTPSConnection but wraps the socket with SSL certification. """ cert_reqs = None ca_certs = None ca_cert_dir = None ssl_version = None assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None): """ This method should only be called once, before the connection is used. """ # If cert_reqs is not provided, we can try to guess. If the user gave # us a cert database, we assume they want to use it: otherwise, if # they gave us an SSL Context object we should use whatever is set for # it. if cert_reqs is None: if ca_certs or ca_cert_dir: cert_reqs = 'CERT_REQUIRED' elif self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host if getattr(self, '_tunnel_host', None): # _tunnel_host was added in Python 2.6.3 # (See: http://hg.python.org/cpython/rev/0f57b30a152f) self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 # Override the host with the one we're requesting data from. hostname = self._tunnel_host is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn(( 'System time is way off (before {0}). This will probably ' 'lead to SSL verification errors').format(RECENT_DATE), SystemTimeWarning ) # Wrap socket using verification with the root certs in # trusted_root_certs if self.ssl_context is None: self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(self.ssl_version), cert_reqs=resolve_cert_reqs(self.cert_reqs), ) context = self.ssl_context context.verify_mode = resolve_cert_reqs(self.cert_reqs) self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, server_hostname=hostname, ssl_context=context) if self.assert_fingerprint: assert_fingerprint(self.sock.getpeercert(binary_form=True), self.assert_fingerprint) elif context.verify_mode != ssl.CERT_NONE \ and not getattr(context, 'check_hostname', False) \ and self.assert_hostname is not False: # While urllib3 attempts to always turn off hostname matching from # the TLS library, this cannot always be done. So we check whether # the TLS Library still thinks it's matching hostnames. cert = self.sock.getpeercert() if not cert.get('subjectAltName', ()): warnings.warn(( 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' '`commonName` for now. This feature is being removed by major browsers and ' 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' 'for details.)'.format(hostname)), SubjectAltNameWarning ) _match_hostname(cert, self.assert_hostname or hostname) self.is_verified = ( context.verify_mode == ssl.CERT_REQUIRED or self.assert_fingerprint is not None ) def _match_hostname(cert, asserted_hostname): try: match_hostname(cert, asserted_hostname) except CertificateError as e: log.error( 'Certificate did not match expected hostname: %s. ' 'Certificate: %s', asserted_hostname, cert ) # Add cert to exception and reraise so client code can inspect # the cert when catching the exception, if they want to e._peer_cert = cert raise if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection else: HTTPSConnection = DummyConnection
gpl-3.0
837468220/python-for-android
python3-alpha/extra_modules/pyxmpp2/clientstream.py
46
3571
# # (C) Copyright 2003-2011 Jacek Konieczny <jajcus@jajcus.net> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License Version # 2.1 as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # # pylint: disable-msg=W0221 """Client-Server stream handling. Normative reference: - `RFC 6120 <http://www.ietf.org/rfc/rfc6120.txt>`__ """ __docformat__ = "restructuredtext en" from .streambase import StreamBase from .jid import JID from .settings import XMPPSettings from .constants import STANZA_CLIENT_NS class ClientStream(StreamBase): """Handles XMPP-IM c2s stream. Both client and server side of the connection is supported. """ # pylint: disable=R0904 def __init__(self, jid, stanza_route, handlers, settings = None): """Initialize the ClientStream object. :Parameters: - `jid`: local JID. - `handlers`: XMPP feature and event handlers - `settings`: PyXMPP settings for the stream :Types: - `jid`: `JID` - `settings`: `XMPPSettings` """ if handlers is None: handlers = [] if settings is None: settings = XMPPSettings() if "resource" not in settings: settings["resource"] = jid.resource StreamBase.__init__(self, STANZA_CLIENT_NS, stanza_route, handlers, settings) self.me = JID(jid.local, jid.domain) def initiate(self, transport, to = None): """Initiate an XMPP connection over the `transport`. :Parameters: - `transport`: an XMPP transport instance - `to`: peer name (defaults to own jid domain part) """ if to is None: to = JID(self.me.domain) return StreamBase.initiate(self, transport, to) def receive(self, transport, myname = None): """Receive an XMPP connection over the `transport`. :Parameters: - `transport`: an XMPP transport instance - `myname`: local stream endpoint name (defaults to own jid domain part). """ if myname is None: myname = JID(self.me.domain) return StreamBase.receive(self, transport, myname) def fix_out_stanza(self, stanza): """Fix outgoing stanza. On a client clear the sender JID. On a server set the sender address to the own JID if the address is not set yet.""" StreamBase.fix_out_stanza(self, stanza) if self.initiator: if stanza.from_jid: stanza.from_jid = None else: if not stanza.from_jid: stanza.from_jid = self.me def fix_in_stanza(self, stanza): """Fix an incoming stanza. Ona server replace the sender address with authorized client JID.""" StreamBase.fix_in_stanza(self, stanza) if not self.initiator: if stanza.from_jid != self.peer: stanza.set_from(self.peer) # vi: sts=4 et sw=4
apache-2.0
TeddyDesTodes/aubio
waflib/Tools/g95.py
316
1495
#! /usr/bin/env python # encoding: utf-8 # WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file import re from waflib import Utils from waflib.Tools import fc,fc_config,fc_scan,ar from waflib.Configure import conf @conf def find_g95(conf): fc=conf.find_program('g95',var='FC') fc=conf.cmd_to_list(fc) conf.get_g95_version(fc) conf.env.FC_NAME='G95' @conf def g95_flags(conf): v=conf.env v['FCFLAGS_fcshlib']=['-fPIC'] v['FORTRANMODFLAG']=['-fmod=',''] v['FCFLAGS_DEBUG']=['-Werror'] @conf def g95_modifier_win32(conf): fc_config.fortran_modifier_win32(conf) @conf def g95_modifier_cygwin(conf): fc_config.fortran_modifier_cygwin(conf) @conf def g95_modifier_darwin(conf): fc_config.fortran_modifier_darwin(conf) @conf def g95_modifier_platform(conf): dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) if g95_modifier_func: g95_modifier_func() @conf def get_g95_version(conf,fc): version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search cmd=fc+['--version'] out,err=fc_config.getoutput(conf,cmd,stdin=False) if out: match=version_re(out) else: match=version_re(err) if not match: conf.fatal('cannot determine g95 version') k=match.groupdict() conf.env['FC_VERSION']=(k['major'],k['minor']) def configure(conf): conf.find_g95() conf.find_ar() conf.fc_flags() conf.fc_add_flags() conf.g95_flags() conf.g95_modifier_platform()
gpl-3.0
bak1an/django
tests/template_tests/syntax_tests/test_named_endblock.py
521
2312
from django.template import TemplateSyntaxError from django.test import SimpleTestCase from ..utils import setup class NamedEndblockTests(SimpleTestCase): @setup({'namedendblocks01': '1{% block first %}_{% block second %}' '2{% endblock second %}_{% endblock first %}3'}) def test_namedendblocks01(self): output = self.engine.render_to_string('namedendblocks01') self.assertEqual(output, '1_2_3') # Unbalanced blocks @setup({'namedendblocks02': '1{% block first %}_{% block second %}' '2{% endblock first %}_{% endblock second %}3'}) def test_namedendblocks02(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('namedendblocks02') @setup({'namedendblocks03': '1{% block first %}_{% block second %}' '2{% endblock %}_{% endblock second %}3'}) def test_namedendblocks03(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('namedendblocks03') @setup({'namedendblocks04': '1{% block first %}_{% block second %}' '2{% endblock second %}_{% endblock third %}3'}) def test_namedendblocks04(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('namedendblocks04') @setup({'namedendblocks05': '1{% block first %}_{% block second %}2{% endblock first %}'}) def test_namedendblocks05(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('namedendblocks05') # Mixed named and unnamed endblocks @setup({'namedendblocks06': '1{% block first %}_{% block second %}' '2{% endblock %}_{% endblock first %}3'}) def test_namedendblocks06(self): """ Mixed named and unnamed endblocks """ output = self.engine.render_to_string('namedendblocks06') self.assertEqual(output, '1_2_3') @setup({'namedendblocks07': '1{% block first %}_{% block second %}' '2{% endblock second %}_{% endblock %}3'}) def test_namedendblocks07(self): output = self.engine.render_to_string('namedendblocks07') self.assertEqual(output, '1_2_3')
bsd-3-clause
pgielda/vybrid-linux
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
11088
3246
# Core.py - Python extension for perf script, core functions # # Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com> # # This software may be distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. from collections import defaultdict def autodict(): return defaultdict(autodict) flag_fields = autodict() symbolic_fields = autodict() def define_flag_field(event_name, field_name, delim): flag_fields[event_name][field_name]['delim'] = delim def define_flag_value(event_name, field_name, value, field_str): flag_fields[event_name][field_name]['values'][value] = field_str def define_symbolic_field(event_name, field_name): # nothing to do, really pass def define_symbolic_value(event_name, field_name, value, field_str): symbolic_fields[event_name][field_name]['values'][value] = field_str def flag_str(event_name, field_name, value): string = "" if flag_fields[event_name][field_name]: print_delim = 0 keys = flag_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string += flag_fields[event_name][field_name]['values'][idx] break if idx and (value & idx) == idx: if print_delim and flag_fields[event_name][field_name]['delim']: string += " " + flag_fields[event_name][field_name]['delim'] + " " string += flag_fields[event_name][field_name]['values'][idx] print_delim = 1 value &= ~idx return string def symbol_str(event_name, field_name, value): string = "" if symbolic_fields[event_name][field_name]: keys = symbolic_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string = symbolic_fields[event_name][field_name]['values'][idx] break if (value == idx): string = symbolic_fields[event_name][field_name]['values'][idx] break return string trace_flags = { 0x00: "NONE", \ 0x01: "IRQS_OFF", \ 0x02: "IRQS_NOSUPPORT", \ 0x04: "NEED_RESCHED", \ 0x08: "HARDIRQ", \ 0x10: "SOFTIRQ" } def trace_flag_str(value): string = "" print_delim = 0 keys = trace_flags.keys() for idx in keys: if not value and not idx: string += "NONE" break if idx and (value & idx) == idx: if print_delim: string += " | "; string += trace_flags[idx] print_delim = 1 value &= ~idx return string def taskState(state): states = { 0 : "R", 1 : "S", 2 : "D", 64: "DEAD" } if state not in states: return "Unknown" return states[state] class EventHeaders: def __init__(self, common_cpu, common_secs, common_nsecs, common_pid, common_comm): self.cpu = common_cpu self.secs = common_secs self.nsecs = common_nsecs self.pid = common_pid self.comm = common_comm def ts(self): return (self.secs * (10 ** 9)) + self.nsecs def ts_format(self): return "%d.%d" % (self.secs, int(self.nsecs / 1000))
gpl-2.0
mattesno1/Sick-Beard
lib/requests/packages/charade/big5prober.py
206
1726
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import Big5DistributionAnalysis from .mbcssm import Big5SMModel class Big5Prober(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(Big5SMModel) self._mDistributionAnalyzer = Big5DistributionAnalysis() self.reset() def get_charset_name(self): return "Big5"
gpl-3.0
romanoff/google-closure-library
closure/bin/build/depstree.py
39
6152
# Copyright 2009 The Closure Library Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Class to represent a full Closure Library dependency tree. Offers a queryable tree of dependencies of a given set of sources. The tree will also do logical validation to prevent duplicate provides and circular dependencies. """ class DepsTree(object): """Represents the set of dependencies between source files.""" def __init__(self, sources): """Initializes the tree with a set of sources. Args: sources: A set of JavaScript sources. Raises: MultipleProvideError: A namespace is provided by muplitple sources. NamespaceNotFoundError: A namespace is required but never provided. """ self._sources = sources self._provides_map = dict() # Ensure nothing was provided twice. for source in sources: for provide in source.provides: if provide in self._provides_map: raise MultipleProvideError( provide, [self._provides_map[provide], source]) self._provides_map[provide] = source # Check that all required namespaces are provided. for source in sources: for require in source.requires: if require not in self._provides_map: raise NamespaceNotFoundError(require, source) def GetDependencies(self, required_namespaces): """Get source dependencies, in order, for the given namespaces. Args: required_namespaces: A string (for one) or list (for one or more) of namespaces. Returns: A list of source objects that provide those namespaces and all requirements, in dependency order. Raises: NamespaceNotFoundError: A namespace is requested but doesn't exist. CircularDependencyError: A cycle is detected in the dependency tree. """ if type(required_namespaces) is str: required_namespaces = [required_namespaces] deps_sources = [] for namespace in required_namespaces: for source in DepsTree._ResolveDependencies( namespace, [], self._provides_map, []): if source not in deps_sources: deps_sources.append(source) return deps_sources @staticmethod def _ResolveDependencies(required_namespace, deps_list, provides_map, traversal_path): """Resolve dependencies for Closure source files. Follows the dependency tree down and builds a list of sources in dependency order. This function will recursively call itself to fill all dependencies below the requested namespaces, and then append its sources at the end of the list. Args: required_namespace: String of required namespace. deps_list: List of sources in dependency order. This function will append the required source once all of its dependencies are satisfied. provides_map: Map from namespace to source that provides it. traversal_path: List of namespaces of our path from the root down the dependency/recursion tree. Used to identify cyclical dependencies. This is a list used as a stack -- when the function is entered, the current namespace is pushed and popped right before returning. Each recursive call will check that the current namespace does not appear in the list, throwing a CircularDependencyError if it does. Returns: The given deps_list object filled with sources in dependency order. Raises: NamespaceNotFoundError: A namespace is requested but doesn't exist. CircularDependencyError: A cycle is detected in the dependency tree. """ source = provides_map.get(required_namespace) if not source: raise NamespaceNotFoundError(required_namespace) if required_namespace in traversal_path: traversal_path.append(required_namespace) # do this *after* the test # This must be a cycle. raise CircularDependencyError(traversal_path) traversal_path.append(required_namespace) for require in source.requires: # Append all other dependencies before we append our own. DepsTree._ResolveDependencies(require, deps_list, provides_map, traversal_path) deps_list.append(source) traversal_path.pop() return deps_list class BaseDepsTreeError(Exception): """Base DepsTree error.""" def __init__(self): Exception.__init__(self) class CircularDependencyError(BaseDepsTreeError): """Raised when a dependency cycle is encountered.""" def __init__(self, dependency_list): BaseDepsTreeError.__init__(self) self._dependency_list = dependency_list def __str__(self): return ('Encountered circular dependency:\n%s\n' % '\n'.join(self._dependency_list)) class MultipleProvideError(BaseDepsTreeError): """Raised when a namespace is provided more than once.""" def __init__(self, namespace, sources): BaseDepsTreeError.__init__(self) self._namespace = namespace self._sources = sources def __str__(self): source_strs = map(str, self._sources) return ('Namespace "%s" provided more than once in sources:\n%s\n' % (self._namespace, '\n'.join(source_strs))) class NamespaceNotFoundError(BaseDepsTreeError): """Raised when a namespace is requested but not provided.""" def __init__(self, namespace, source=None): BaseDepsTreeError.__init__(self) self._namespace = namespace self._source = source def __str__(self): msg = 'Namespace "%s" never provided.' % self._namespace if self._source: msg += ' Required in %s' % self._source return msg
apache-2.0
Mappy/mapnik
scons/scons-local-2.3.0/SCons/Tool/sgilink.py
11
2192
"""SCons.Tool.sgilink Tool-specific initialization for the SGI MIPSPro linker on SGI. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/sgilink.py 2013/03/03 09:48:35 garyo" import SCons.Util import link linkers = ['CC', 'cc'] def generate(env): """Add Builders and construction variables for MIPSPro to an Environment.""" link.generate(env) env['LINK'] = env.Detect(linkers) or 'cc' env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') # __RPATH is set to $_RPATH in the platform specification if that # platform supports it. env['RPATHPREFIX'] = '-rpath ' env['RPATHSUFFIX'] = '' env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' def exists(env): return env.Detect(linkers) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lgpl-2.1
j5shi/Thruster
pylibs/test/test_aifc.py
1
14103
from test.test_support import (findfile, TESTFN, unlink, captured_stdout, run_unittest) import unittest from test import audiotests import os import io import sys import struct import aifc class AifcTest(audiotests.AudioWriteTests, audiotests.AudioTestsWithSourceFile): module = aifc close_fd = True test_unseekable_read = None class AifcPCM8Test(AifcTest, unittest.TestCase): sndfilename = 'pluck-pcm8.aiff' sndfilenframes = 3307 nchannels = 2 sampwidth = 1 framerate = 11025 nframes = 48 comptype = 'NONE' compname = 'not compressed' frames = audiotests.fromhex("""\ 02FF 4B00 3104 8008 CB06 4803 BF01 03FE B8FA B4F3 29EB 1AE6 \ EDE4 C6E2 0EE0 EFE0 57E2 FBE8 13EF D8F7 97FB F5FC 08FB DFFB \ 11FA 3EFB BCFC 66FF CF04 4309 C10E 5112 EE17 8216 7F14 8012 \ 490E 520D EF0F CE0F E40C 630A 080A 2B0B 510E 8B11 B60E 440A \ """) class AifcPCM16Test(AifcTest, unittest.TestCase): sndfilename = 'pluck-pcm16.aiff' sndfilenframes = 3307 nchannels = 2 sampwidth = 2 framerate = 11025 nframes = 48 comptype = 'NONE' compname = 'not compressed' frames = audiotests.fromhex("""\ 022EFFEA 4B5D00F6 311804EA 80E10840 CBE106B1 48A903F5 BFE601B2 036CFE7B \ B858FA3E B4B1F34F 299AEBCA 1A5DE6DA EDFAE491 C628E275 0E09E0B5 EF2AE029 \ 5758E271 FB35E83F 1376EF86 D82BF727 9790FB76 F5FAFC0F 0867FB9C DF30FB43 \ 117EFA36 3EE5FB5B BC79FCB1 66D9FF5D CF150412 431D097C C1BA0EC8 512112A1 \ EEE21753 82071665 7FFF1443 8004128F 49A20EAF 52BB0DBA EFB40F60 CE3C0FBF \ E4B30CEC 63430A5C 08C80A20 2BBB0B08 514A0E43 8BCF1139 B6F60EEB 44120A5E \ """) class AifcPCM24Test(AifcTest, unittest.TestCase): sndfilename = 'pluck-pcm24.aiff' sndfilenframes = 3307 nchannels = 2 sampwidth = 3 framerate = 11025 nframes = 48 comptype = 'NONE' compname = 'not compressed' frames = audiotests.fromhex("""\ 022D65FFEB9D 4B5A0F00FA54 3113C304EE2B 80DCD6084303 \ CBDEC006B261 48A99803F2F8 BFE82401B07D 036BFBFE7B5D \ B85756FA3EC9 B4B055F3502B 299830EBCB62 1A5CA7E6D99A \ EDFA3EE491BD C625EBE27884 0E05A9E0B6CF EF2929E02922 \ 5758D8E27067 FB3557E83E16 1377BFEF8402 D82C5BF7272A \ 978F16FB7745 F5F865FC1013 086635FB9C4E DF30FCFB40EE \ 117FE0FA3438 3EE6B8FB5AC3 BC77A3FCB2F4 66D6DAFF5F32 \ CF13B9041275 431D69097A8C C1BB600EC74E 5120B912A2BA \ EEDF641754C0 8207001664B7 7FFFFF14453F 8000001294E6 \ 499C1B0EB3B2 52B73E0DBCA0 EFB2B20F5FD8 CE3CDB0FBE12 \ E4B49C0CEA2D 6344A80A5A7C 08C8FE0A1FFE 2BB9860B0A0E \ 51486F0E44E1 8BCC64113B05 B6F4EC0EEB36 4413170A5B48 \ """) class AifcPCM32Test(AifcTest, unittest.TestCase): sndfilename = 'pluck-pcm32.aiff' sndfilenframes = 3307 nchannels = 2 sampwidth = 4 framerate = 11025 nframes = 48 comptype = 'NONE' compname = 'not compressed' frames = audiotests.fromhex("""\ 022D65BCFFEB9D92 4B5A0F8000FA549C 3113C34004EE2BC0 80DCD680084303E0 \ CBDEC0C006B26140 48A9980003F2F8FC BFE8248001B07D92 036BFB60FE7B5D34 \ B8575600FA3EC920 B4B05500F3502BC0 29983000EBCB6240 1A5CA7A0E6D99A60 \ EDFA3E80E491BD40 C625EB80E27884A0 0E05A9A0E0B6CFE0 EF292940E0292280 \ 5758D800E2706700 FB3557D8E83E1640 1377BF00EF840280 D82C5B80F7272A80 \ 978F1600FB774560 F5F86510FC101364 086635A0FB9C4E20 DF30FC40FB40EE28 \ 117FE0A0FA3438B0 3EE6B840FB5AC3F0 BC77A380FCB2F454 66D6DA80FF5F32B4 \ CF13B980041275B0 431D6980097A8C00 C1BB60000EC74E00 5120B98012A2BAA0 \ EEDF64C01754C060 820700001664B780 7FFFFFFF14453F40 800000001294E6E0 \ 499C1B000EB3B270 52B73E000DBCA020 EFB2B2E00F5FD880 CE3CDB400FBE1270 \ E4B49CC00CEA2D90 6344A8800A5A7CA0 08C8FE800A1FFEE0 2BB986C00B0A0E00 \ 51486F800E44E190 8BCC6480113B0580 B6F4EC000EEB3630 441317800A5B48A0 \ """) class AifcULAWTest(AifcTest, unittest.TestCase): sndfilename = 'pluck-ulaw.aifc' sndfilenframes = 3307 nchannels = 2 sampwidth = 2 framerate = 11025 nframes = 48 comptype = 'ulaw' compname = '' frames = audiotests.fromhex("""\ 022CFFE8 497C0104 307C04DC 8284083C CB84069C 497C03DC BE8401AC 036CFE74 \ B684FA24 B684F344 2A7CEC04 19FCE704 EE04E504 C584E204 0E3CE104 EF04DF84 \ 557CE204 FB24E804 12FCEF04 D784F744 9684FB64 F5C4FC24 083CFBA4 DF84FB24 \ 11FCFA24 3E7CFB64 BA84FCB4 657CFF5C CF84041C 417C093C C1840EBC 517C12FC \ EF0416FC 828415FC 7D7C13FC 828412FC 497C0EBC 517C0DBC F0040F3C CD840FFC \ E5040CBC 617C0A3C 08BC0A3C 2C7C0B3C 517C0E3C 8A8410FC B6840EBC 457C0A3C \ """) if sys.byteorder != 'big': frames = audiotests.byteswap2(frames) class AifcMiscTest(audiotests.AudioTests, unittest.TestCase): def test_skipunknown(self): #Issue 2245 #This file contains chunk types aifc doesn't recognize. self.f = aifc.open(findfile('Sine-1000Hz-300ms.aif')) def test_write_markers_values(self): fout = aifc.open(io.BytesIO(), 'wb') self.assertEqual(fout.getmarkers(), None) fout.setmark(1, 0, 'foo1') fout.setmark(1, 1, 'foo2') self.assertEqual(fout.getmark(1), (1, 1, 'foo2')) self.assertEqual(fout.getmarkers(), [(1, 1, 'foo2')]) fout.initfp(None) def test_read_markers(self): fout = self.fout = aifc.open(TESTFN, 'wb') fout.aiff() fout.setparams((1, 1, 1, 1, 'NONE', '')) fout.setmark(1, 0, 'odd') fout.setmark(2, 0, 'even') fout.writeframes('\x00') fout.close() f = self.f = aifc.open(TESTFN, 'rb') self.assertEqual(f.getmarkers(), [(1, 0, 'odd'), (2, 0, 'even')]) self.assertEqual(f.getmark(1), (1, 0, 'odd')) self.assertEqual(f.getmark(2), (2, 0, 'even')) self.assertRaises(aifc.Error, f.getmark, 3) class AIFCLowLevelTest(unittest.TestCase): def test_read_written(self): def read_written(self, what): f = io.BytesIO() getattr(aifc, '_write_' + what)(f, x) f.seek(0) return getattr(aifc, '_read_' + what)(f) for x in (-1, 0, 0.1, 1): self.assertEqual(read_written(x, 'float'), x) for x in (float('NaN'), float('Inf')): self.assertEqual(read_written(x, 'float'), aifc._HUGE_VAL) for x in ('', 'foo', 'a' * 255): self.assertEqual(read_written(x, 'string'), x) for x in (-0x7FFFFFFF, -1, 0, 1, 0x7FFFFFFF): self.assertEqual(read_written(x, 'long'), x) for x in (0, 1, 0xFFFFFFFF): self.assertEqual(read_written(x, 'ulong'), x) for x in (-0x7FFF, -1, 0, 1, 0x7FFF): self.assertEqual(read_written(x, 'short'), x) for x in (0, 1, 0xFFFF): self.assertEqual(read_written(x, 'ushort'), x) def test_read_raises(self): f = io.BytesIO('\x00') self.assertRaises(EOFError, aifc._read_ulong, f) self.assertRaises(EOFError, aifc._read_long, f) self.assertRaises(EOFError, aifc._read_ushort, f) self.assertRaises(EOFError, aifc._read_short, f) def test_write_long_string_raises(self): f = io.BytesIO() with self.assertRaises(ValueError): aifc._write_string(f, 'too long' * 255) def test_wrong_open_mode(self): with self.assertRaises(aifc.Error): aifc.open(TESTFN, 'wrong_mode') def test_read_wrong_form(self): b1 = io.BytesIO('WRNG' + struct.pack('>L', 0)) b2 = io.BytesIO('FORM' + struct.pack('>L', 4) + 'WRNG') self.assertRaises(aifc.Error, aifc.open, b1) self.assertRaises(aifc.Error, aifc.open, b2) def test_read_no_comm_chunk(self): b = io.BytesIO('FORM' + struct.pack('>L', 4) + 'AIFF') self.assertRaises(aifc.Error, aifc.open, b) def test_read_wrong_compression_type(self): b = 'FORM' + struct.pack('>L', 4) + 'AIFC' b += 'COMM' + struct.pack('>LhlhhLL', 23, 0, 0, 0, 0, 0, 0) b += 'WRNG' + struct.pack('B', 0) self.assertRaises(aifc.Error, aifc.open, io.BytesIO(b)) def test_read_wrong_marks(self): b = 'FORM' + struct.pack('>L', 4) + 'AIFF' b += 'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0) b += 'SSND' + struct.pack('>L', 8) + '\x00' * 8 b += 'MARK' + struct.pack('>LhB', 3, 1, 1) with captured_stdout() as s: f = aifc.open(io.BytesIO(b)) self.assertEqual(s.getvalue(), 'Warning: MARK chunk contains ' 'only 0 markers instead of 1\n') self.assertEqual(f.getmarkers(), None) def test_read_comm_kludge_compname_even(self): b = 'FORM' + struct.pack('>L', 4) + 'AIFC' b += 'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0) b += 'NONE' + struct.pack('B', 4) + 'even' + '\x00' b += 'SSND' + struct.pack('>L', 8) + '\x00' * 8 with captured_stdout() as s: f = aifc.open(io.BytesIO(b)) self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n') self.assertEqual(f.getcompname(), 'even') def test_read_comm_kludge_compname_odd(self): b = 'FORM' + struct.pack('>L', 4) + 'AIFC' b += 'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0) b += 'NONE' + struct.pack('B', 3) + 'odd' b += 'SSND' + struct.pack('>L', 8) + '\x00' * 8 with captured_stdout() as s: f = aifc.open(io.BytesIO(b)) self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n') self.assertEqual(f.getcompname(), 'odd') def test_write_params_raises(self): fout = aifc.open(io.BytesIO(), 'wb') wrong_params = (0, 0, 0, 0, 'WRNG', '') self.assertRaises(aifc.Error, fout.setparams, wrong_params) self.assertRaises(aifc.Error, fout.getparams) self.assertRaises(aifc.Error, fout.setnchannels, 0) self.assertRaises(aifc.Error, fout.getnchannels) self.assertRaises(aifc.Error, fout.setsampwidth, 0) self.assertRaises(aifc.Error, fout.getsampwidth) self.assertRaises(aifc.Error, fout.setframerate, 0) self.assertRaises(aifc.Error, fout.getframerate) self.assertRaises(aifc.Error, fout.setcomptype, 'WRNG', '') fout.aiff() fout.setnchannels(1) fout.setsampwidth(1) fout.setframerate(1) fout.setnframes(1) fout.writeframes('\x00') self.assertRaises(aifc.Error, fout.setparams, (1, 1, 1, 1, 1, 1)) self.assertRaises(aifc.Error, fout.setnchannels, 1) self.assertRaises(aifc.Error, fout.setsampwidth, 1) self.assertRaises(aifc.Error, fout.setframerate, 1) self.assertRaises(aifc.Error, fout.setnframes, 1) self.assertRaises(aifc.Error, fout.setcomptype, 'NONE', '') self.assertRaises(aifc.Error, fout.aiff) self.assertRaises(aifc.Error, fout.aifc) def test_write_params_singles(self): fout = aifc.open(io.BytesIO(), 'wb') fout.aifc() fout.setnchannels(1) fout.setsampwidth(2) fout.setframerate(3) fout.setnframes(4) fout.setcomptype('NONE', 'name') self.assertEqual(fout.getnchannels(), 1) self.assertEqual(fout.getsampwidth(), 2) self.assertEqual(fout.getframerate(), 3) self.assertEqual(fout.getnframes(), 0) self.assertEqual(fout.tell(), 0) self.assertEqual(fout.getcomptype(), 'NONE') self.assertEqual(fout.getcompname(), 'name') fout.writeframes('\x00' * 4 * fout.getsampwidth() * fout.getnchannels()) self.assertEqual(fout.getnframes(), 4) self.assertEqual(fout.tell(), 4) def test_write_params_bunch(self): fout = aifc.open(io.BytesIO(), 'wb') fout.aifc() p = (1, 2, 3, 4, 'NONE', 'name') fout.setparams(p) self.assertEqual(fout.getparams(), p) fout.initfp(None) def test_write_header_raises(self): fout = aifc.open(io.BytesIO(), 'wb') self.assertRaises(aifc.Error, fout.close) fout = aifc.open(io.BytesIO(), 'wb') fout.setnchannels(1) self.assertRaises(aifc.Error, fout.close) fout = aifc.open(io.BytesIO(), 'wb') fout.setnchannels(1) fout.setsampwidth(1) self.assertRaises(aifc.Error, fout.close) def test_write_header_comptype_raises(self): for comptype in ('ULAW', 'ulaw', 'ALAW', 'alaw', 'G722'): fout = aifc.open(io.BytesIO(), 'wb') fout.setsampwidth(1) fout.setcomptype(comptype, '') self.assertRaises(aifc.Error, fout.close) fout.initfp(None) def test_write_markers_raises(self): fout = aifc.open(io.BytesIO(), 'wb') self.assertRaises(aifc.Error, fout.setmark, 0, 0, '') self.assertRaises(aifc.Error, fout.setmark, 1, -1, '') self.assertRaises(aifc.Error, fout.setmark, 1, 0, None) self.assertRaises(aifc.Error, fout.getmark, 1) fout.initfp(None) def test_write_aiff_by_extension(self): sampwidth = 2 fout = self.fout = aifc.open(TESTFN + '.aiff', 'wb') fout.setparams((1, sampwidth, 1, 1, 'ULAW', '')) frames = '\x00' * fout.getnchannels() * sampwidth fout.writeframes(frames) fout.close() f = self.f = aifc.open(TESTFN + '.aiff', 'rb') self.assertEqual(f.getcomptype(), 'NONE') f.close() def test_main(): run_unittest(AifcPCM8Test, AifcPCM16Test, AifcPCM16Test, AifcPCM24Test, AifcPCM32Test, AifcULAWTest, AifcMiscTest, AIFCLowLevelTest) if __name__ == "__main__": test_main()
gpl-2.0
pzajda/eloquence
scons-local-2.5.0/SCons/Tool/icc.py
3
2170
"""engine.SCons.Tool.icc Tool-specific initialization for the OS/2 icc compiler. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/icc.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog" import cc def generate(env): """Add Builders and construction variables for the OS/2 to an Environment.""" cc.generate(env) env['CC'] = 'icc' env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' env['CPPDEFPREFIX'] = '/D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '/I' env['INCSUFFIX'] = '' env['CFILESUFFIX'] = '.c' env['CXXFILESUFFIX'] = '.cc' def exists(env): return env.Detect('icc') # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
stutivarshney/Bal-Aveksha
WebServer/BalAvekshaEnv/lib/python3.5/site-packages/django/core/management/commands/sqlmigrate.py
115
2847
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS, connections from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError class Command(BaseCommand): help = "Prints the SQL statements for the named migration." output_transaction = True def add_arguments(self, parser): parser.add_argument('app_label', help='App label of the application containing the migration.') parser.add_argument('migration_name', help='Migration name to print the SQL for.') parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to create SQL for. Defaults to the "default" database.', ) parser.add_argument( '--backwards', action='store_true', dest='backwards', default=False, help='Creates SQL to unapply the migration, rather than to apply it', ) def execute(self, *args, **options): # sqlmigrate doesn't support coloring its output but we need to force # no_color=True so that the BEGIN/COMMIT statements added by # output_transaction don't get colored either. options['no_color'] = True return super(Command, self).execute(*args, **options) def handle(self, *args, **options): # Get the database we're operating from connection = connections[options['database']] # Load up an executor to get all the migration data executor = MigrationExecutor(connection) # Resolve command-line arguments into a migration app_label, migration_name = options['app_label'], options['migration_name'] if app_label not in executor.loader.migrated_apps: raise CommandError("App '%s' does not have migrations" % app_label) try: migration = executor.loader.get_migration_by_prefix(app_label, migration_name) except AmbiguityError: raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % ( migration_name, app_label)) except KeyError: raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % ( migration_name, app_label)) targets = [(app_label, migration.name)] # Show begin/end around output only for atomic migrations self.output_transaction = migration.atomic # Make a plan that represents just the requested migrations and show SQL # for it plan = [(executor.loader.graph.nodes[targets[0]], options['backwards'])] sql_statements = executor.collect_sql(plan) return '\n'.join(sql_statements)
gpl-3.0
Eric-Zhong/odoo
addons/procurement_jit/__init__.py
374
1078
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import procurement_jit # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/tests/test__pkce.py
15
1954
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import mock from oauth2client import _pkce class PKCETests(unittest.TestCase): @mock.patch('oauth2client._pkce.os.urandom') def test_verifier(self, fake_urandom): canned_randomness = ( b'\x98\x10D7\xf3\xb7\xaa\xfc\xdd\xd3M\xe2' b'\xa3,\x06\xa0\xb0\xa9\xb4\x8f\xcb\xd0' b'\xf5\x86N2p\x8c]!W\x9a\xed54\x99\x9d' b'\x8dv\\\xa7/\x81\xf3J\x98\xc3\x90\xee' b'\xb0\x8c\xb7Zc#\x05M0O\x08\xda\t\x1f\x07' ) fake_urandom.return_value = canned_randomness expected = ( b'mBBEN_O3qvzd003ioywGoLCptI_L0PWGTjJwjF0hV5rt' b'NTSZnY12XKcvgfNKmMOQ7rCMt1pjIwVNME8I2gkfBw' ) result = _pkce.code_verifier() self.assertEqual(result, expected) def test_verifier_too_long(self): with self.assertRaises(ValueError) as caught: _pkce.code_verifier(97) self.assertIn("too long", str(caught.exception)) def test_verifier_too_short(self): with self.assertRaises(ValueError) as caught: _pkce.code_verifier(30) self.assertIn("too short", str(caught.exception)) def test_challenge(self): result = _pkce.code_challenge(b'SOME_VERIFIER') expected = b'6xJCQsjTtS3zjUwd8_ZqH0SyviGHnp5PsHXWKOCqDuI' self.assertEqual(result, expected)
gpl-3.0
zsiki/ulyxes
camera/gen_template.py
1
3861
#! /usr/bin/env python3 # -*- coding: utf-8 -*- """ generate template for template matching from the first frame with aruco video file name can contain date and time like: pi1_YYYYmmdd_HHMMSS.h264 output sent to standard output in the form: line,date/time,column,row,statistic use video_arucoco.py --help for comamnd line options """ import sys import datetime import re import os.path import argparse from aruco_base import ArucoBase sys.path.append('../pyapi/') from imagereader import ImageReader from imagewriter import ImageWriter class VideoTemplateGen(ArucoBase): """ class to scan ArUco code in video image :param args: command line arguments from argparse """ def __init__(self, args): """ initialize """ super(VideoTemplateGen, self).__init__(args) fn = args.name[0] self.rdr = ImageReader(fn, fps=args.fps) self.wrt = ImageWriter(os.path.basename(fn), os.path.dirname(fn)) self.tformat = '%Y-%m-%d %H:%M:%S.%f' if re.search('[0-9]_[0-9]{8}_[0-9]{6}', fn): l = fn.split('_') self.rdr.act = datetime.datetime(int(l[-2][0:4]), int(l[-2][4:6]), int(l[-2][6:8]), int(l[-1][0:2]), int(l[-1][2:4]), int(l[-1][4:6])) self.calibration = None def process(self): """ process video frame by frame until a code found :returns: exit status 0 -OK """ # process video name = "NotFound" while True: frame, _ = self.rdr.GetNext() # get next frame if frame is None: break res = self.ProcessImg(frame, self.rdr.ind) if res: # aruco found east = res["east"] north = res["north"] width2 = int(res['width'] * 1.1 / 2) height2 = int(res['height'] * 1.1 / 2) data = frame[east-width2:east+width2, north-height2:north+height2] name = self.wrt.WriteData(data) break return name if __name__ == "__main__": # set up command line parameters parser = argparse.ArgumentParser() parser.add_argument('name', metavar='file_name', type=str, nargs=1, help='video file to processi or camera ID (e.g. 0)') parser.add_argument('-d', '--dict', type=int, default=1, help='marker dictionary id, default=1 (DICT_4X4_100)') parser.add_argument('-m', '--calibration', type=str, default=None, help='dummy arg for compatibility') parser.add_argument('--debug', type=int, default=0, help='dummy arg for compatibility') parser.add_argument('--clip', type=float, default=3.0, help='dummy arg for compatibility') parser.add_argument('--tile', type=int, default=8, help='dummy arg for compatibility') parser.add_argument('--fast', action="store_true", help='dummy arg for compatibility') parser.add_argument('--hist', action="store_true", help='dummy arg for compatibility') parser.add_argument('--lchanel', action="store_true", help='dummy arg for compatibility') parser.add_argument('-s', '--size', type=float, default=0.28, help='dummy arg for compatibility') parser.add_argument('-f', '--fps', type=int, default=None, help='dummy arg for compatibility') parser.add_argument('-c', '--code', type=int, help='marker id to search, if not given first found marker is used') args = parser.parse_args() # process parameters V_A = VideoTemplateGen(args) print(V_A.process())
gpl-2.0
onceuponatimeforever/oh-mainline
vendor/packages/django-celery/djcelery/backends/cache.py
30
2240
"""celery.backends.cache""" from __future__ import absolute_import from datetime import timedelta import django from django.utils.encoding import smart_str from django.core.cache import cache, get_cache from celery import current_app from celery.utils.timeutils import timedelta_seconds from celery.backends.base import KeyValueStoreBackend # CELERY_CACHE_BACKEND overrides the django-global(tm) backend settings. if current_app.conf.CELERY_CACHE_BACKEND: cache = get_cache(current_app.conf.CELERY_CACHE_BACKEND) # noqa class DjangoMemcacheWrapper(object): """Wrapper class to django's memcache backend class, that overrides the :meth:`get` method in order to remove the forcing of unicode strings since it may cause binary or pickled data to break.""" def __init__(self, cache): self.cache = cache def get(self, key, default=None): val = self.cache._cache.get(smart_str(key)) if val is None: return default else: return val def set(self, key, value, timeout=0): self.cache.set(key, value, timeout) # Check if django is using memcache as the cache backend. If so, wrap the # cache object in a DjangoMemcacheWrapper for Django < 1.2 that fixes a bug # with retrieving pickled data. from django.core.cache.backends.base import InvalidCacheBackendError try: from django.core.cache.backends.memcached import CacheClass except InvalidCacheBackendError: pass else: if django.VERSION[0:2] < (1, 2) and isinstance(cache, CacheClass): cache = DjangoMemcacheWrapper(cache) class CacheBackend(KeyValueStoreBackend): """Backend using the Django cache framework to store task metadata.""" def __init__(self, *args, **kwargs): super(CacheBackend, self).__init__(self, *args, **kwargs) expires = kwargs.get("expires", current_app.conf.CELERY_TASK_RESULT_EXPIRES) if isinstance(expires, timedelta): expires = int(timedelta_seconds(expires)) self.expires = expires def get(self, key): return cache.get(key) def set(self, key, value): cache.set(key, value, self.expires) def delete(self, key): cache.delete(key)
agpl-3.0
ELSUru/ADL_LRS
lrs/util/jws.py
4
6963
import base64 import json from binascii import a2b_base64 from Crypto.Hash import SHA256, SHA384, SHA512 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5, PKCS1_PSS from Crypto.Util.asn1 import DerSequence # https://www.dlitz.net/software/pycrypto/api/current/ fixb64padding = lambda s: s if len(s) % 4 == 0 else s + '=' * (4 - (len(s) % 4)) rmb64padding = lambda s: s.rstrip('=') algs = {"RS256": SHA256, "RS384": SHA384, "RS512": SHA512} class JWS(object): """ Class used to represent a JSON Web Signature (JWS). see: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-08 Only covers the requirements outlined in the Experience API spec. see: https://github.com/adlnet/xAPI-Spec/blob/master/xAPI.md#signature """ def __init__(self, header=None, payload=None, jws=None): """ Init for a JWS object. If you want to create a JWS, pass in the header and payload and call :func:`JWS.create`. If you want to parse and verify a JWS, pass in the JWS and call :func:`JWS.verify`. :param header: JWS Header - Optional :param payload: JWS Payload - Optional :param jws: JSON Web Signature - Optional """ self.header = header if self.header: self._parseheader() self.payload = payload self.jws = jws if self.jws: self._parsejws() def verify(self): """ Verifies the JWS Signature can be verified by the public key. """ # free pass for those who don't use x5c if not self.should_verify: return True try: pubkey = self._cert_to_key(self.headerobj['x5c'][0]) except: raise JWSException("Error importing public key") verifier = PKCS1_v1_5.new(pubkey) res = verifier.verify(self._hash(), self.jwssignature) if not res: verifier = PKCS1_PSS.new(pubkey) res = verifier.verify(self._hash(), self.jwssignature) return res def create(self, privatekey): """ Creates a JWS using the privatekey string to sign. :param privatekey: String format of the private key to use to sign the JWS Signature Input. """ if not self.jws: privkey = RSA.importKey(privatekey) # encode header and payload self.encheader = rmb64padding(base64.urlsafe_b64encode(self.header)) self.encpayload = rmb64padding(base64.urlsafe_b64encode(self.payload)) # hash & sign signer = PKCS1_v1_5.new(privkey) self.jwssignature = signer.sign(self._hash()) # encode signature self.encjwssignature = rmb64padding(base64.urlsafe_b64encode(self.jwssignature)) # join 3 self.jws = '.'.join([self.encheader,self.encpayload,self.encjwssignature]) return self.jws def sha2(self, jwsobj=None, alg=None): """ Hash (SHA256) the JWS according to xAPI attachment rules for the sha2 attribute. Returns the hexdigest value. If a parameter isn't provided, this will use the values provided when creating this jws object. :param jwsobj: The JWS (header.paylaod.signature) to be hashed (optional) :param alg: The hashing algorithm to use ['RS256'(default), 'RS384', 'RS512'] (optional) """ thealg = alg if alg else "RS256" thejws = jwsobj if jwsobj else self.jws return algs[thealg].new(thejws).hexdigest() def validate(self, stmt): """ Validate the incoming Statement against the Statement in the JWS payload. :param stmt: String format of the Statement object to be validated """ # free pass for those who don't use x5c if not self.should_verify: return True if type(stmt) != dict: stmtobj = json.loads(stmt) else: stmtobj = stmt atts = stmtobj.pop('attachments', None) if atts: atts = [a for a in atts if a.get('usageType',None) != "http://adlnet.gov/expapi/attachments/signature"] if atts: stmtobj['attachments'] = atts sortedstmt = json.dumps(stmtobj, sort_keys=True) sortedpayload = json.dumps(json.loads(self.payload), sort_keys=True) return sortedstmt == sortedpayload def _parseheader(self): if type(self.header) != dict: try: self.headerobj = json.loads(self.header) except: raise JWSException('JWS header was not valid JSON') else: try: self.headerobj = self.header self.header = json.dumps(self.header) except: raise JWSException('JWS header was not valid JSON') if 'alg' not in self.headerobj: raise JWSException('JWS header did not have an "alg" property') self.should_verify = 'x5c' in self.headerobj if self.should_verify: if type(self.headerobj['x5c']) != list: raise JWSException('x5c property was not an array of certificate value strings') def _parsejws(self): jwsparts = self.jws.split('.') if len(jwsparts) != 3: raise JWSException('The JWS was not formatted correctly - should be encodedheader.encodedpayload.encodedjwssignature') self.encheader = jwsparts[0] self.header = base64.urlsafe_b64decode(fixb64padding(self.encheader)) self._parseheader() self.encpayload = jwsparts[1] self.payload = base64.urlsafe_b64decode(fixb64padding(self.encpayload)) self.encjwssignature = jwsparts[2] self.jwssignature = base64.urlsafe_b64decode(fixb64padding(jwsparts[2])) def _hash(self): return algs[self.headerobj['alg']].new('.'.join([self.encheader,self.encpayload]).encode('ascii')) def _cert_to_key(self, cert): # Convert from PEM to DER if not cert.startswith('-----BEGIN CERTIFICATE-----') and not cert.endswith('-----END CERTIFICATE-----'): cert = "-----BEGIN CERTIFICATE-----\n%s\n-----END CERTIFICATE-----" % cert lines = cert.replace(" ",'').split() der = a2b_base64(''.join(lines[1:-1])) # Extract subjectPublicKeyInfo field from X.509 certificate (see RFC3280) cert = DerSequence() cert.decode(der) tbsCertificate = DerSequence() tbsCertificate.decode(cert[0]) subjectPublicKeyInfo = tbsCertificate[6] # Initialize RSA key return RSA.importKey(subjectPublicKeyInfo) class JWSException(Exception): """Generic exception class.""" def __init__(self, message='JWS error occured.'): self.message = message
apache-2.0
gcblue/gcblue
bin/Lib/test/test_signal.py
87
18905
import unittest from test import test_support from contextlib import closing import gc import pickle import select import signal import subprocess import traceback import sys, os, time, errno if sys.platform in ('os2', 'riscos'): raise unittest.SkipTest("Can't test signal on %s" % sys.platform) class HandlerBCalled(Exception): pass def exit_subprocess(): """Use os._exit(0) to exit the current subprocess. Otherwise, the test catches the SystemExit and continues executing in parallel with the original test, so you wind up with an exponential number of tests running concurrently. """ os._exit(0) def ignoring_eintr(__func, *args, **kwargs): try: return __func(*args, **kwargs) except EnvironmentError as e: if e.errno != errno.EINTR: raise return None @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class InterProcessSignalTests(unittest.TestCase): MAX_DURATION = 20 # Entire test should last at most 20 sec. def setUp(self): self.using_gc = gc.isenabled() gc.disable() def tearDown(self): if self.using_gc: gc.enable() def format_frame(self, frame, limit=None): return ''.join(traceback.format_stack(frame, limit=limit)) def handlerA(self, signum, frame): self.a_called = True if test_support.verbose: print "handlerA invoked from signal %s at:\n%s" % ( signum, self.format_frame(frame, limit=1)) def handlerB(self, signum, frame): self.b_called = True if test_support.verbose: print "handlerB invoked from signal %s at:\n%s" % ( signum, self.format_frame(frame, limit=1)) raise HandlerBCalled(signum, self.format_frame(frame)) def wait(self, child): """Wait for child to finish, ignoring EINTR.""" while True: try: child.wait() return except OSError as e: if e.errno != errno.EINTR: raise def run_test(self): # Install handlers. This function runs in a sub-process, so we # don't worry about re-setting the default handlers. signal.signal(signal.SIGHUP, self.handlerA) signal.signal(signal.SIGUSR1, self.handlerB) signal.signal(signal.SIGUSR2, signal.SIG_IGN) signal.signal(signal.SIGALRM, signal.default_int_handler) # Variables the signals will modify: self.a_called = False self.b_called = False # Let the sub-processes know who to send signals to. pid = os.getpid() if test_support.verbose: print "test runner's pid is", pid child = ignoring_eintr(subprocess.Popen, ['kill', '-HUP', str(pid)]) if child: self.wait(child) if not self.a_called: time.sleep(1) # Give the signal time to be delivered. self.assertTrue(self.a_called) self.assertFalse(self.b_called) self.a_called = False # Make sure the signal isn't delivered while the previous # Popen object is being destroyed, because __del__ swallows # exceptions. del child try: child = subprocess.Popen(['kill', '-USR1', str(pid)]) # This wait should be interrupted by the signal's exception. self.wait(child) time.sleep(1) # Give the signal time to be delivered. self.fail('HandlerBCalled exception not raised') except HandlerBCalled: self.assertTrue(self.b_called) self.assertFalse(self.a_called) if test_support.verbose: print "HandlerBCalled exception caught" child = ignoring_eintr(subprocess.Popen, ['kill', '-USR2', str(pid)]) if child: self.wait(child) # Nothing should happen. try: signal.alarm(1) # The race condition in pause doesn't matter in this case, # since alarm is going to raise a KeyboardException, which # will skip the call. signal.pause() # But if another signal arrives before the alarm, pause # may return early. time.sleep(1) except KeyboardInterrupt: if test_support.verbose: print "KeyboardInterrupt (the alarm() went off)" except: self.fail("Some other exception woke us from pause: %s" % traceback.format_exc()) else: self.fail("pause returned of its own accord, and the signal" " didn't arrive after another second.") # Issue 3864. Unknown if this affects earlier versions of freebsd also. @unittest.skipIf(sys.platform=='freebsd6', 'inter process signals not reliable (do not mix well with threading) ' 'on freebsd6') def test_main(self): # This function spawns a child process to insulate the main # test-running process from all the signals. It then # communicates with that child process over a pipe and # re-raises information about any exceptions the child # raises. The real work happens in self.run_test(). os_done_r, os_done_w = os.pipe() with closing(os.fdopen(os_done_r)) as done_r, \ closing(os.fdopen(os_done_w, 'w')) as done_w: child = os.fork() if child == 0: # In the child process; run the test and report results # through the pipe. try: done_r.close() # Have to close done_w again here because # exit_subprocess() will skip the enclosing with block. with closing(done_w): try: self.run_test() except: pickle.dump(traceback.format_exc(), done_w) else: pickle.dump(None, done_w) except: print 'Uh oh, raised from pickle.' traceback.print_exc() finally: exit_subprocess() done_w.close() # Block for up to MAX_DURATION seconds for the test to finish. r, w, x = select.select([done_r], [], [], self.MAX_DURATION) if done_r in r: tb = pickle.load(done_r) if tb: self.fail(tb) else: os.kill(child, signal.SIGKILL) self.fail('Test deadlocked after %d seconds.' % self.MAX_DURATION) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class BasicSignalTests(unittest.TestCase): def trivial_signal_handler(self, *args): pass def test_out_of_range_signal_number_raises_error(self): self.assertRaises(ValueError, signal.getsignal, 4242) self.assertRaises(ValueError, signal.signal, 4242, self.trivial_signal_handler) def test_setting_signal_handler_to_none_raises_error(self): self.assertRaises(TypeError, signal.signal, signal.SIGUSR1, None) def test_getsignal(self): hup = signal.signal(signal.SIGHUP, self.trivial_signal_handler) self.assertEqual(signal.getsignal(signal.SIGHUP), self.trivial_signal_handler) signal.signal(signal.SIGHUP, hup) self.assertEqual(signal.getsignal(signal.SIGHUP), hup) @unittest.skipUnless(sys.platform == "win32", "Windows specific") class WindowsSignalTests(unittest.TestCase): def test_issue9324(self): # Updated for issue #10003, adding SIGBREAK handler = lambda x, y: None for sig in (signal.SIGABRT, signal.SIGBREAK, signal.SIGFPE, signal.SIGILL, signal.SIGINT, signal.SIGSEGV, signal.SIGTERM): # Set and then reset a handler for signals that work on windows signal.signal(sig, signal.signal(sig, handler)) with self.assertRaises(ValueError): signal.signal(-1, handler) with self.assertRaises(ValueError): signal.signal(7, handler) class WakeupFDTests(unittest.TestCase): def test_invalid_fd(self): fd = test_support.make_bad_fd() self.assertRaises(ValueError, signal.set_wakeup_fd, fd) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class WakeupSignalTests(unittest.TestCase): TIMEOUT_FULL = 10 TIMEOUT_HALF = 5 def test_wakeup_fd_early(self): import select signal.alarm(1) before_time = time.time() # We attempt to get a signal during the sleep, # before select is called time.sleep(self.TIMEOUT_FULL) mid_time = time.time() self.assertTrue(mid_time - before_time < self.TIMEOUT_HALF) select.select([self.read], [], [], self.TIMEOUT_FULL) after_time = time.time() self.assertTrue(after_time - mid_time < self.TIMEOUT_HALF) def test_wakeup_fd_during(self): import select signal.alarm(1) before_time = time.time() # We attempt to get a signal during the select call self.assertRaises(select.error, select.select, [self.read], [], [], self.TIMEOUT_FULL) after_time = time.time() self.assertTrue(after_time - before_time < self.TIMEOUT_HALF) def setUp(self): import fcntl self.alrm = signal.signal(signal.SIGALRM, lambda x,y:None) self.read, self.write = os.pipe() flags = fcntl.fcntl(self.write, fcntl.F_GETFL, 0) flags = flags | os.O_NONBLOCK fcntl.fcntl(self.write, fcntl.F_SETFL, flags) self.old_wakeup = signal.set_wakeup_fd(self.write) def tearDown(self): signal.set_wakeup_fd(self.old_wakeup) os.close(self.read) os.close(self.write) signal.signal(signal.SIGALRM, self.alrm) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class SiginterruptTest(unittest.TestCase): def setUp(self): """Install a no-op signal handler that can be set to allow interrupts or not, and arrange for the original signal handler to be re-installed when the test is finished. """ self.signum = signal.SIGUSR1 oldhandler = signal.signal(self.signum, lambda x,y: None) self.addCleanup(signal.signal, self.signum, oldhandler) def readpipe_interrupted(self): """Perform a read during which a signal will arrive. Return True if the read is interrupted by the signal and raises an exception. Return False if it returns normally. """ # Create a pipe that can be used for the read. Also clean it up # when the test is over, since nothing else will (but see below for # the write end). r, w = os.pipe() self.addCleanup(os.close, r) # Create another process which can send a signal to this one to try # to interrupt the read. ppid = os.getpid() pid = os.fork() if pid == 0: # Child code: sleep to give the parent enough time to enter the # read() call (there's a race here, but it's really tricky to # eliminate it); then signal the parent process. Also, sleep # again to make it likely that the signal is delivered to the # parent process before the child exits. If the child exits # first, the write end of the pipe will be closed and the test # is invalid. try: time.sleep(0.2) os.kill(ppid, self.signum) time.sleep(0.2) finally: # No matter what, just exit as fast as possible now. exit_subprocess() else: # Parent code. # Make sure the child is eventually reaped, else it'll be a # zombie for the rest of the test suite run. self.addCleanup(os.waitpid, pid, 0) # Close the write end of the pipe. The child has a copy, so # it's not really closed until the child exits. We need it to # close when the child exits so that in the non-interrupt case # the read eventually completes, otherwise we could just close # it *after* the test. os.close(w) # Try the read and report whether it is interrupted or not to # the caller. try: d = os.read(r, 1) return False except OSError, err: if err.errno != errno.EINTR: raise return True def test_without_siginterrupt(self): """If a signal handler is installed and siginterrupt is not called at all, when that signal arrives, it interrupts a syscall that's in progress. """ i = self.readpipe_interrupted() self.assertTrue(i) # Arrival of the signal shouldn't have changed anything. i = self.readpipe_interrupted() self.assertTrue(i) def test_siginterrupt_on(self): """If a signal handler is installed and siginterrupt is called with a true value for the second argument, when that signal arrives, it interrupts a syscall that's in progress. """ signal.siginterrupt(self.signum, 1) i = self.readpipe_interrupted() self.assertTrue(i) # Arrival of the signal shouldn't have changed anything. i = self.readpipe_interrupted() self.assertTrue(i) def test_siginterrupt_off(self): """If a signal handler is installed and siginterrupt is called with a false value for the second argument, when that signal arrives, it does not interrupt a syscall that's in progress. """ signal.siginterrupt(self.signum, 0) i = self.readpipe_interrupted() self.assertFalse(i) # Arrival of the signal shouldn't have changed anything. i = self.readpipe_interrupted() self.assertFalse(i) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class ItimerTest(unittest.TestCase): def setUp(self): self.hndl_called = False self.hndl_count = 0 self.itimer = None self.old_alarm = signal.signal(signal.SIGALRM, self.sig_alrm) def tearDown(self): signal.signal(signal.SIGALRM, self.old_alarm) if self.itimer is not None: # test_itimer_exc doesn't change this attr # just ensure that itimer is stopped signal.setitimer(self.itimer, 0) def sig_alrm(self, *args): self.hndl_called = True if test_support.verbose: print("SIGALRM handler invoked", args) def sig_vtalrm(self, *args): self.hndl_called = True if self.hndl_count > 3: # it shouldn't be here, because it should have been disabled. raise signal.ItimerError("setitimer didn't disable ITIMER_VIRTUAL " "timer.") elif self.hndl_count == 3: # disable ITIMER_VIRTUAL, this function shouldn't be called anymore signal.setitimer(signal.ITIMER_VIRTUAL, 0) if test_support.verbose: print("last SIGVTALRM handler call") self.hndl_count += 1 if test_support.verbose: print("SIGVTALRM handler invoked", args) def sig_prof(self, *args): self.hndl_called = True signal.setitimer(signal.ITIMER_PROF, 0) if test_support.verbose: print("SIGPROF handler invoked", args) def test_itimer_exc(self): # XXX I'm assuming -1 is an invalid itimer, but maybe some platform # defines it ? self.assertRaises(signal.ItimerError, signal.setitimer, -1, 0) # Negative times are treated as zero on some platforms. if 0: self.assertRaises(signal.ItimerError, signal.setitimer, signal.ITIMER_REAL, -1) def test_itimer_real(self): self.itimer = signal.ITIMER_REAL signal.setitimer(self.itimer, 1.0) if test_support.verbose: print("\ncall pause()...") signal.pause() self.assertEqual(self.hndl_called, True) # Issue 3864. Unknown if this affects earlier versions of freebsd also. @unittest.skipIf(sys.platform in ('freebsd6', 'netbsd5'), 'itimer not reliable (does not mix well with threading) on some BSDs.') def test_itimer_virtual(self): self.itimer = signal.ITIMER_VIRTUAL signal.signal(signal.SIGVTALRM, self.sig_vtalrm) signal.setitimer(self.itimer, 0.3, 0.2) start_time = time.time() while time.time() - start_time < 60.0: # use up some virtual time by doing real work _ = pow(12345, 67890, 10000019) if signal.getitimer(self.itimer) == (0.0, 0.0): break # sig_vtalrm handler stopped this itimer else: # Issue 8424 self.skipTest("timeout: likely cause: machine too slow or load too " "high") # virtual itimer should be (0.0, 0.0) now self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0)) # and the handler should have been called self.assertEqual(self.hndl_called, True) # Issue 3864. Unknown if this affects earlier versions of freebsd also. @unittest.skipIf(sys.platform=='freebsd6', 'itimer not reliable (does not mix well with threading) on freebsd6') def test_itimer_prof(self): self.itimer = signal.ITIMER_PROF signal.signal(signal.SIGPROF, self.sig_prof) signal.setitimer(self.itimer, 0.2, 0.2) start_time = time.time() while time.time() - start_time < 60.0: # do some work _ = pow(12345, 67890, 10000019) if signal.getitimer(self.itimer) == (0.0, 0.0): break # sig_prof handler stopped this itimer else: # Issue 8424 self.skipTest("timeout: likely cause: machine too slow or load too " "high") # profiling itimer should be (0.0, 0.0) now self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0)) # and the handler should have been called self.assertEqual(self.hndl_called, True) def test_main(): test_support.run_unittest(BasicSignalTests, InterProcessSignalTests, WakeupFDTests, WakeupSignalTests, SiginterruptTest, ItimerTest, WindowsSignalTests) if __name__ == "__main__": test_main()
bsd-3-clause
LucaVazz/DualisWatcher
notification_services/mail/mail_service.py
1
4662
import logging import traceback from getpass import getpass from notification_services.mail.mail_formater import create_full_welcome_mail, create_full_dualis_diff_mail, \ create_full_error_mail, create_full_schedule_diff_mail from notification_services.mail.mail_shooter import MailShooter from notification_services.notification_service import NotificationService from version_recorder import CollectionOfChanges class MailService(NotificationService): def interactively_configure(self) -> None: do_mail_input = input('Do you want to activate Notifications via mail [y/n]? ') while not (do_mail_input == 'y' or do_mail_input == 'n'): do_mail_input = input('Unrecognized input. Try again: ') if do_mail_input == 'n': self.config_helper.remove_property('mail') else: print('[The following Inputs are not validated!]') config_valid = False while not config_valid: sender = input('E-Mail-Address of the Sender: ') server_host = input('Host of the SMTP-Server: ') server_port = input('Port of the SMTP-Server: ') username = input('Username for the SMTP-Server: ') password = getpass('Password for the SMTP-Server [no output]: ') target = input('E-Mail-Address of the Target: ') print('Testing Mail-Config...') welcome_content = create_full_welcome_mail() mail_shooter = MailShooter( sender, server_host, int(server_port), username, password ) try: mail_shooter.send(target, 'Hey!', welcome_content[0], welcome_content[1]) except BaseException as e: print('Error while sending the test mail: %s'%(str(e))) else: input( 'Please check if you received the Welcome-Mail. If yes, confirm with Return.\n' + 'If not, exit this program ([CTRL]+[C]) and try again later.' ) config_valid = True raw_send_error_msg = '' while raw_send_error_msg not in ['y', 'n']: raw_send_error_msg = input('Do you want to also get error reports sent by mail? [y/n] ') do_send_error_msg = raw_send_error_msg == 'y' mail_cfg = { 'sender': sender, 'server_host': server_host, 'server_port': server_port, 'username': username, 'password': password, 'target': target, 'send_error_msg': do_send_error_msg } self.config_helper.set_property('mail', mail_cfg) def _send_mail(self, subject, mail_content: (str, {str : str})): try: mail_cfg = self.config_helper.get_property('mail') except ValueError: logging.debug('Mail-Notifications not configured, skipping.') pass # ignore exception further up try: logging.debug('Sending Notification via Mail...') mail_shooter = MailShooter( mail_cfg['sender'], mail_cfg['server_host'], int(mail_cfg['server_port']), mail_cfg['username'], mail_cfg['password'] ) mail_shooter.send( mail_cfg['target'], subject, mail_content[0], mail_content[1] ) except BaseException as e: error_formatted = traceback.format_exc() logging.error('While sending notification:\n%s' % (error_formatted), extra={'exception': e}) raise e # to be properly notified via Sentry def notify_about_changes_in_results(self, changes: CollectionOfChanges, course_names: {str: str}) -> None: mail_content = create_full_dualis_diff_mail(changes, course_names) self._send_mail('%s neue Änderungen in den Modul-Ergebnissen!'%(changes.diff_count), mail_content) def notify_about_changes_in_schedule(self, changes: [str], uid: str): mail_content = create_full_schedule_diff_mail(changes, uid) self._send_mail('%s neue Änderungen im Vorlesungsplan!' % (len(changes) - 1), mail_content) def notify_about_error(self, error_description: str): if not self.config_helper.get_property('mail').get('send_error_msg', True): return mail_content = create_full_error_mail(error_description) self._send_mail('Fehler!', mail_content)
gpl-3.0
uglyboxer/linear_neuron
net-p3/lib/python3.5/site-packages/matplotlib/backends/qt_compat.py
10
4816
""" A Qt API selector that can be used to switch between PyQt and PySide. """ from __future__ import (absolute_import, division, print_function, unicode_literals) import six import os from matplotlib import rcParams, verbose # Available APIs. QT_API_PYQT = 'PyQt4' # API is not set here; Python 2.x default is V 1 QT_API_PYQTv2 = 'PyQt4v2' # forced to Version 2 API QT_API_PYSIDE = 'PySide' # only supports Version 2 API QT_API_PYQT5 = 'PyQt5' # use PyQt5 API; Version 2 with module shim ETS = dict(pyqt=(QT_API_PYQTv2, 4), pyside=(QT_API_PYSIDE, 4), pyqt5=(QT_API_PYQT5, 5)) # ETS is a dict of env variable to (QT_API, QT_MAJOR_VERSION) # If the ETS QT_API environment variable is set, use it, but only # if the varible if of the same major QT version. Note that # ETS requires the version 2 of PyQt4, which is not the platform # default for Python 2.x. QT_API_ENV = os.environ.get('QT_API') if rcParams['backend'] == 'Qt5Agg': QT_RC_MAJOR_VERSION = 5 else: QT_RC_MAJOR_VERSION = 4 QT_API = None if (QT_API_ENV is not None): try: QT_ENV_MAJOR_VERSION = ETS[QT_API_ENV][1] except KeyError: raise RuntimeError( ('Unrecognized environment variable %r, valid values are:' ' %r, %r or %r' % (QT_API_ENV, 'pyqt', 'pyside', 'pyqt5'))) if QT_ENV_MAJOR_VERSION == QT_RC_MAJOR_VERSION: # Only if backend and env qt major version are # compatible use the env variable. QT_API = ETS[QT_API_ENV][0] if QT_API is None: # No ETS environment or incompatible so use rcParams. if rcParams['backend'] == 'Qt5Agg': QT_API = rcParams['backend.qt5'] else: QT_API = rcParams['backend.qt4'] # We will define an appropriate wrapper for the differing versions # of file dialog. _getSaveFileName = None # Flag to check if sip could be imported _sip_imported = False # Now perform the imports. if QT_API in (QT_API_PYQT, QT_API_PYQTv2, QT_API_PYQT5): try: import sip _sip_imported = True except ImportError: # Try using PySide QT_API = QT_API_PYSIDE if _sip_imported: if QT_API == QT_API_PYQTv2: if QT_API_ENV == 'pyqt': cond = ("Found 'QT_API=pyqt' environment variable. " "Setting PyQt4 API accordingly.\n") else: cond = "PyQt API v2 specified." try: sip.setapi('QString', 2) except: res = 'QString API v2 specification failed. Defaulting to v1.' verbose.report(cond + res, 'helpful') # condition has now been reported, no need to repeat it: cond = "" try: sip.setapi('QVariant', 2) except: res = 'QVariant API v2 specification failed. Defaulting to v1.' verbose.report(cond + res, 'helpful') if QT_API in [QT_API_PYQT, QT_API_PYQTv2]: # PyQt4 API from PyQt4 import QtCore, QtGui try: if sip.getapi("QString") > 1: # Use new getSaveFileNameAndFilter() _getSaveFileName = QtGui.QFileDialog.getSaveFileNameAndFilter else: # Use old getSaveFileName() def _getSaveFileName(*args, **kwargs): return (QtGui.QFileDialog.getSaveFileName(*args, **kwargs), None) except (AttributeError, KeyError): # call to getapi() can fail in older versions of sip def _getSaveFileName(*args, **kwargs): return QtGui.QFileDialog.getSaveFileName(*args, **kwargs), None else: # PyQt5 API from PyQt5 import QtCore, QtGui, QtWidgets _getSaveFileName = QtWidgets.QFileDialog.getSaveFileName # Alias PyQt-specific functions for PySide compatibility. QtCore.Signal = QtCore.pyqtSignal try: QtCore.Slot = QtCore.pyqtSlot except AttributeError: # Not a perfect match but works in simple cases QtCore.Slot = QtCore.pyqtSignature QtCore.Property = QtCore.pyqtProperty __version__ = QtCore.PYQT_VERSION_STR else: # try importing pyside from PySide import QtCore, QtGui, __version__, __version_info__ if __version_info__ < (1, 0, 3): raise ImportError( "Matplotlib backend_qt4 and backend_qt4agg require PySide >=1.0.3") _getSaveFileName = QtGui.QFileDialog.getSaveFileName # Apply shim to Qt4 APIs to make them look like Qt5 if QT_API in (QT_API_PYQT, QT_API_PYQTv2, QT_API_PYSIDE): '''Import all used QtGui objects into QtWidgets Here I've opted to simple copy QtGui into QtWidgets as that achieves the same result as copying over the objects, and will continue to work if other objects are used. ''' QtWidgets = QtGui
mit
skrueger111/zazzie
src/sassie/test_sassie/interface/density_plot/test_smaller1.py
2
3869
''' SASSIE: Copyright (C) 2011 Joseph E. Curtis, Ph.D. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import os import string import sassie.analyze.density_plot.gui_mimic_density_plot as gui_mimic_density_plot #import gui_mimic_density_plot as gui_mimic_density_plot import filecmp from unittest import main from nose.tools import assert_equals from mocker import Mocker, MockerTestCase pdb_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'pdb_common') + os.path.sep dcd_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'dcd_common') + os.path.sep other_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'other_common') + os.path.sep module_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'interface', 'density_plot') + os.path.sep paths = {'pdb_data_path': pdb_data_path, 'dcd_data_path': dcd_data_path, 'other_data_path': other_data_path, 'module_data_path': module_data_path} class Test_Density_Plot_Filter(MockerTestCase): ''' System integration test for density_plot_filter.py / sassie 1.0 Test to see whether density_plot_filter catches improper input. Inputs tested: runname: project name path: path name for input files dcdfile: input trajectory filename (pdb or dcd) pdbfile: reference pdb name xlength: x boxlength ylength: y boxlength zlength: z boxlength gridsp: grid spacing (angstroms) nsegments: number of segments segvariables: number of flexible regions, high and low residues, basis string, segment name save_occupancy: save the unweighted raw cube data ('Y' or 'N') equalweights: use equalweights (1=yes) or weights from file (0=no) weightsfile: filename containing weights per structure Use cases tested: ''' def setUp(self): gui_mimic_density_plot.test_variables(self, paths) def extract_important_path(self, return_error): string_error = string.split(return_error[0]) path_list = string.split(string_error[-1], '..') important_path = string.split(path_list[-1], "/")[1:] error = os.path.join('..', '..') for this_path in important_path: error += os.sep + this_path return error[:-1] def test_21(self): ''' test for unknown error encountered when reading weight file ''' self.equalweights = "0" self.weightsfile = os.path.join( module_data_path, 'weird_weight_file.txt') return_error = gui_mimic_density_plot.run_module( self, test_filter=True) ''' check for file error ''' expected_error = ["unable to open and read your weight file : " + os.path.basename(self.weightsfile)] assert_equals(return_error, expected_error) def tearDown(self): if os.path.exists(self.runname): shutil.rmtree(self.runname) if __name__ == '__main__': main()
gpl-3.0
escalator2015/linux
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
4653
3596
# EventClass.py # # This is a library defining some events types classes, which could # be used by other scripts to analyzing the perf samples. # # Currently there are just a few classes defined for examples, # PerfEvent is the base class for all perf event sample, PebsEvent # is a HW base Intel x86 PEBS event, and user could add more SW/HW # event classes based on requirements. import struct # Event types, user could add more here EVTYPE_GENERIC = 0 EVTYPE_PEBS = 1 # Basic PEBS event EVTYPE_PEBS_LL = 2 # PEBS event with load latency info EVTYPE_IBS = 3 # # Currently we don't have good way to tell the event type, but by # the size of raw buffer, raw PEBS event with load latency data's # size is 176 bytes, while the pure PEBS event's size is 144 bytes. # def create_event(name, comm, dso, symbol, raw_buf): if (len(raw_buf) == 144): event = PebsEvent(name, comm, dso, symbol, raw_buf) elif (len(raw_buf) == 176): event = PebsNHM(name, comm, dso, symbol, raw_buf) else: event = PerfEvent(name, comm, dso, symbol, raw_buf) return event class PerfEvent(object): event_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC): self.name = name self.comm = comm self.dso = dso self.symbol = symbol self.raw_buf = raw_buf self.ev_type = ev_type PerfEvent.event_num += 1 def show(self): print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso) # # Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer # contains the context info when that event happened: the EFLAGS and # linear IP info, as well as all the registers. # class PebsEvent(PerfEvent): pebs_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS): tmp_buf=raw_buf[0:80] flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf) self.flags = flags self.ip = ip self.ax = ax self.bx = bx self.cx = cx self.dx = dx self.si = si self.di = di self.bp = bp self.sp = sp PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsEvent.pebs_num += 1 del tmp_buf # # Intel Nehalem and Westmere support PEBS plus Load Latency info which lie # in the four 64 bit words write after the PEBS data: # Status: records the IA32_PERF_GLOBAL_STATUS register value # DLA: Data Linear Address (EIP) # DSE: Data Source Encoding, where the latency happens, hit or miss # in L1/L2/L3 or IO operations # LAT: the actual latency in cycles # class PebsNHM(PebsEvent): pebs_nhm_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL): tmp_buf=raw_buf[144:176] status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf) self.status = status self.dla = dla self.dse = dse self.lat = lat PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsNHM.pebs_nhm_num += 1 del tmp_buf
gpl-2.0
grnet/synnefo
snf-cyclades-app/synnefo/app_settings/default/api.py
1
8466
# -*- coding: utf-8 -*- # # API configuration ##################### DEBUG = False # Top-level URL for deployment. Numerous other URLs depend on this. CYCLADES_BASE_URL = "https://compute.example.synnefo.org/compute/" # The API will return HTTP Bad Request if the ?changes-since # parameter refers to a point in time more than POLL_LIMIT seconds ago. POLL_LIMIT = 3600 # Astakos groups that have access to '/admin' views. ADMIN_STATS_PERMITTED_GROUPS = ["admin-stats"] # Enable/Disable the snapshots feature altogether at the API level. # If set to False, Cyclades will not expose the '/snapshots' API URL # of the 'volume' app. CYCLADES_SNAPSHOTS_ENABLED = True # Enable/Disable the feature of a sharing a resource to the members of the # project to which it belongs, at the API level. CYCLADES_SHARED_RESOURCES_ENABLED = False # Enable/Disable the of feature of rescuing a Virtual Machine at the API # level RESCUE_ENABLED = False # # Network Configuration # # CYCLADES_DEFAULT_SERVER_NETWORKS setting contains a list of networks to # connect a newly created server to, *if the user has not* specified them # explicitly in the POST /server API call. # Each member of the list may be a network UUID, a tuple of network UUIDs, # "SNF:ANY_PUBLIC_IPV4" [any public network with an IPv4 subnet defined], # "SNF:ANY_PUBLIC_IPV6 [any public network with only an IPV6 subnet defined], # or "SNF:ANY_PUBLIC" [any public network]. # # Access control and quota policy are enforced, just as if the user had # specified the value of CYCLADES_DEFAULT_SERVER_NETWORKS in the content # of the POST /call, after processing of "SNF:*" directives." CYCLADES_DEFAULT_SERVER_NETWORKS = [] # This setting contains a list of networks which every new server # will be forced to connect to, regardless of the contents of the POST # /servers call, or the value of CYCLADES_DEFAULT_SERVER_NETWORKS. # Its format is identical to that of CYCLADES_DEFAULT_SERVER_NETWORKS. # WARNING: No access control or quota policy are enforced. # The server will get all IPv4/IPv6 addresses needed to connect to the # networks specified in CYCLADES_FORCED_SERVER_NETWORKS, regardless # of the state of the floating IP pool of the user, and without # allocating any floating IPs." CYCLADES_FORCED_SERVER_NETWORKS = [] # Maximum allowed network size for private networks. MAX_CIDR_BLOCK = 22 # Default settings used by network flavors DEFAULT_MAC_PREFIX = 'aa:00:0' DEFAULT_BRIDGE = 'br0' # Network flavors that users are allowed to create through API requests # Available flavors are IP_LESS_ROUTED, MAC_FILTERED, PHYSICAL_VLAN API_ENABLED_NETWORK_FLAVORS = ['MAC_FILTERED'] # Settings for MAC_FILTERED network: # ------------------------------------------ # All networks of this type are bridged to the same bridge. Isolation between # networks is achieved by assigning a unique MAC-prefix to each network and # filtering packets via ebtables. DEFAULT_MAC_FILTERED_BRIDGE = 'prv0' # Firewalling. Firewall tags should contain '%d' to be filled with the NIC # ID. GANETI_FIREWALL_ENABLED_TAG = 'synnefo:network:%s:protected' GANETI_FIREWALL_DISABLED_TAG = 'synnefo:network:%s:unprotected' GANETI_FIREWALL_PROTECTED_TAG = 'synnefo:network:%s:limited' # The default firewall profile that will be in effect if no tags are defined DEFAULT_FIREWALL_PROFILE = 'DISABLED' # Fixed mapping of user VMs to a specific backend. # e.g. BACKEND_PER_USER = {'example@synnefo.org': 2} BACKEND_PER_USER = {} # Encryption key for the instance hostname in the stat graphs URLs. Set it to # a random string and update the STATS_SECRET_KEY setting in the snf-stats-app # host (20-snf-stats-app-settings.conf) accordingly. CYCLADES_STATS_SECRET_KEY = "secret_key" # URL templates for the stat graphs. # The API implementation replaces '%s' with the encrypted backend id. CPU_BAR_GRAPH_URL = 'http://stats.example.synnefo.org/stats/v1.0/cpu-bar/%s' CPU_TIMESERIES_GRAPH_URL = \ 'http://stats.example.synnefo.org/stats/v1.0/cpu-ts/%s' NET_BAR_GRAPH_URL = 'http://stats.example.synnefo.org/stats/v1.0/net-bar/%s' NET_TIMESERIES_GRAPH_URL = \ 'http://stats.example.synnefo.org/stats/v1.0/net-ts/%s' # Recommended refresh period for server stats STATS_REFRESH_PERIOD = 60 # The maximum number of file path/content pairs that can be supplied on server # build MAX_PERSONALITY = 5 # The maximum size, in bytes, for each personality file MAX_PERSONALITY_SIZE = 10240 # Authentication URL of the astakos instance to be used for user management ASTAKOS_AUTH_URL = 'https://accounts.example.synnefo.org/identity/v2.0' # Tune the size of the Astakos http client connection pool # This limit the number of concurrent requests to Astakos. CYCLADES_ASTAKOSCLIENT_POOLSIZE = 50 # Key for password encryption-decryption. After changing this setting, synnefo # will be unable to decrypt all existing Backend passwords. You will need to # store again the new password by using 'snf-manage backend-modify'. # SECRET_ENCRYPTION_KEY may up to 32 bytes. Keys bigger than 32 bytes are not # supported. SECRET_ENCRYPTION_KEY = "Password Encryption Key" # Astakos service token # The token used for astakos service api calls (e.g. api to retrieve user email # using a user uuid) CYCLADES_SERVICE_TOKEN = '' # Template to use to build the FQDN of VMs. The setting will be formated with # the id of the VM. CYCLADES_SERVERS_FQDN = 'snf-%(id)s.vm.example.synnefo.org' # Description of applied port forwarding rules (DNAT) for Cyclades VMs. This # setting contains a mapping from the port of each VM to a tuple contaning the # destination IP/hostname and the new port: (host, port). Instead of a tuple a # python callable object may be used which must return such a tuple. The caller # will pass to the callable the following positional arguments, in the # following order: # * server_id: The ID of the VM in the DB # * ip_address: The IPv4 address of the public VM NIC # * fqdn: The FQDN of the VM # * user: The UUID of the owner of the VM # # Here is an example describing the mapping of the SSH port of all VMs to # the external address 'gate.example.synnefo.org' and port 60000+server_id. # e.g. iptables -t nat -A prerouting -d gate.example.synnefo.org \ # --dport (61000 + $(VM_ID)) -j DNAT --to-destination $(VM_IP):22 #CYCLADES_PORT_FORWARDING = { # 22: lambda ip_address, server_id, fqdn, user: # ("gate.example.synnefo.org", 61000 + server_id), #} CYCLADES_PORT_FORWARDING = {} # Extra configuration options required for snf-vncauthproxy (>=1.5). Each dict # of the list, describes one vncauthproxy instance. CYCLADES_VNCAUTHPROXY_OPTS = [ { # These values are required for VNC console support. They should match # a user / password configured in the snf-vncauthproxy authentication / # users file (/var/lib/vncauthproxy/users). 'auth_user': 'synnefo', 'auth_password': 'secret_password', # server_address and server_port should reflect the --listen-address and # --listen-port options passed to the vncauthproxy daemon 'server_address': '127.0.0.1', 'server_port': 24999, # Set to True to enable SSL support on the control socket. 'enable_ssl': False, # If you enabled SSL support for snf-vncauthproxy you can optionally # provide a path to a CA file and enable strict checkfing for the server # certficiate. 'ca_cert': None, 'strict': False, }, ] # The maximum allowed size(GB) for a Cyclades Volume CYCLADES_VOLUME_MAX_SIZE = 200 # The maximum allowed metadata items for a Cyclades Volume CYCLADES_VOLUME_MAX_METADATA = 10 # The volume types that Cyclades allow to be detached CYCLADES_DETACHABLE_DISK_TEMPLATES = ("ext_archipelago", "ext_vlmc") # The maximum number of tags allowed for a Cyclades Virtual Machine CYCLADES_VM_MAX_TAGS = 50 # The maximmum allowed metadata items for a Cyclades Virtual Machine CYCLADES_VM_MAX_METADATA = 10 # Define cache for public stats PUBLIC_STATS_CACHE = { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": "", "KEY_PREFIX": "publicstats", "TIMEOUT": 300, } # Permit users of specific groups to override the flavor allow_create policy CYCLADES_FLAVOR_OVERRIDE_ALLOW_CREATE = {} # Define cache for VM password VM_PASSWORD_CACHE = { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": "", "KEY_PREFIX": "vmpassword", "TIMEOUT": None, }
gpl-3.0
jnerin/ansible
lib/ansible/plugins/lookup/mongodb.py
34
8869
# (c) 2016, Marcos Diez <marcos@unitron.com.br> # https://github.com/marcosdiez/ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) from ansible.module_utils.six import string_types, integer_types __metaclass__ = type DOCUMENTATION = ''' author: 'Marcos Diez <marcos (at) unitron.com.br>' lookup: mongodb version_added: "2.3" short_description: lookup info from MongoDB description: - 'The ``MongoDB`` lookup runs the *find()* command on a given *collection* on a given *MongoDB* server.' - 'The result is a list of jsons, so slightly different from what PyMongo returns. In particular, *timestamps* are converted to epoch integers.' options: connect_string: description: - Can be any valid MongoDB connection string, supporting authentication, replicasets, etc. - "More info at U(https://docs.mongodb.org/manual/reference/connection-string/)" default: "mongodb://localhost/" database: description: - Name of the database which the query will be made required: True collection: description: - Name of the collection which the query will be made required: True filter: description: - Criteria of the output type: 'dict' default: '{}' projection: description: - Fields you want returned type: dict default: "{}" skip: description: - How many results should be skept type: integer limit: description: - How many results should be shown type: integer sort: description: - Sorting rules. Please notice the constats are replaced by strings. type: list default: "[]" notes: - "Please check https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find for more detais." requirements: - pymongo >= 2.4 (python library) ''' EXAMPLES = ''' - hosts: all gather_facts: false vars: mongodb_parameters: #mandatory parameters database: 'local' #optional collection: "startup_log" connection_string: "mongodb://localhost/" extra_connection_parameters: { "ssl" : True , "ssl_certfile": /etc/self_signed_certificate.pem" } #optional query parameters, we accept any parameter from the normal mongodb query. filter: { "hostname": "batman" } projection: { "pid": True , "_id" : False , "hostname" : True } skip: 0 limit: 1 sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ] tasks: - debug: msg="Mongo has already started with the following PID [{{ item.pid }}]" with_mongodb: "{{mongodb_parameters}}" ''' import datetime try: from pymongo import ASCENDING, DESCENDING from pymongo.errors import ConnectionFailure from pymongo import MongoClient except ImportError: try: # for older PyMongo 2.2 from pymongo import Connection as MongoClient except ImportError: pymongo_found = False else: pymongo_found = True else: pymongo_found = True from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase class LookupModule(LookupBase): def _fix_sort_parameter(self, sort_parameter): if sort_parameter is None: return sort_parameter if not isinstance(sort_parameter, list): raise AnsibleError(u"Error. Sort parameters must be a list, not [ {0} ]".format(sort_parameter)) for item in sort_parameter: self._convert_sort_string_to_constant(item) return sort_parameter def _convert_sort_string_to_constant(self, item): original_sort_order = item[1] sort_order = original_sort_order.upper() if sort_order == u"ASCENDING": item[1] = ASCENDING elif sort_order == u"DESCENDING": item[1] = DESCENDING # else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary def convert_mongo_result_to_valid_json(self, result): if result is None: return result if isinstance(result, integer_types + (float, bool)): return result if isinstance(result, string_types): return result elif isinstance(result, list): new_list = [] for elem in result: new_list.append(self.convert_mongo_result_to_valid_json(elem)) return new_list elif isinstance(result, dict): new_dict = {} for key in result.keys(): value = result[key] # python2 and 3 compatible.... new_dict[key] = self.convert_mongo_result_to_valid_json(value) return new_dict elif isinstance(result, datetime.datetime): # epoch return (result - datetime.datetime(1970, 1, 1)). total_seconds() else: # failsafe return u"{0}".format(result) def run(self, terms, variables, **kwargs): ret = [] for term in terms: u''' Makes a MongoDB query and returns the output as a valid list of json. Timestamps are converted to epoch integers/longs. Here is a sample playbook that uses it: ------------------------------------------------------------------------------- - hosts: all gather_facts: false vars: mongodb_parameters: #optional parameter, default = "mongodb://localhost/" # connection_string: "mongodb://localhost/" #mandatory parameters database: 'local' collection: "startup_log" #optional query parameters #we accept any parameter from the normal mongodb query. # the official documentation is here # https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find # filter: { "hostname": "batman" } # projection: { "pid": True , "_id" : False , "hostname" : True } # skip: 0 # limit: 1 # sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ] # extra_connection_parameters = { } # dictionary with extra parameters like ssl, ssl_keyfile, maxPoolSize etc... # the full list is available here. It varies from PyMongo version # https://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient tasks: - debug: msg="Mongo has already started with the following PID [{{ item.pid }}] - full_data {{ item }} " with_items: - "{{ lookup('mongodb', mongodb_parameters) }}" ------------------------------------------------------------------------------- ''' connection_string = term.get(u'connection_string', u"mongodb://localhost") database = term[u"database"] collection = term[u'collection'] extra_connection_parameters = term.get(u'extra_connection_parameters', {}) if u"extra_connection_parameters" in term: del term[u"extra_connection_parameters"] if u"connection_string" in term: del term[u"connection_string"] del term[u"database"] del term[u"collection"] if u"sort" in term: term[u"sort"] = self._fix_sort_parameter(term[u"sort"]) # all other parameters are sent to mongo, so we are future and past proof try: client = MongoClient(connection_string, **extra_connection_parameters) results = client[database][collection].find(**term) for result in results: result = self.convert_mongo_result_to_valid_json(result) ret.append(result) except ConnectionFailure as e: raise AnsibleError(u'unable to connect to database: %s' % str(e)) return ret
gpl-3.0
UXE/local-edx
lms/djangoapps/instructor/tests/test_legacy_xss.py
10
2344
""" Tests of various instructor dashboard features that include lists of students """ from django.conf import settings from django.test.client import RequestFactory from django.test.utils import override_settings from markupsafe import escape from xmodule.modulestore.tests.django_utils import TEST_DATA_MOCK_MODULESTORE from student.tests.factories import UserFactory, CourseEnrollmentFactory from edxmako.tests import mako_middleware_process_request from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from instructor.views import legacy # pylint: disable=missing-docstring @override_settings(MODULESTORE=TEST_DATA_MOCK_MODULESTORE) class TestXss(ModuleStoreTestCase): def setUp(self): self._request_factory = RequestFactory() self._course = CourseFactory.create() self._evil_student = UserFactory.create( email="robot+evil@edx.org", username="evil-robot", profile__name='<span id="evil">Evil Robot</span>', ) self._instructor = UserFactory.create( email="robot+instructor@edx.org", username="instructor", is_staff=True ) CourseEnrollmentFactory.create( user=self._evil_student, course_id=self._course.id ) def _test_action(self, action): """ Test for XSS vulnerability in the given action Build a request with the given action, call the instructor dashboard view, and check that HTML code in a user's name is properly escaped. """ req = self._request_factory.post( "dummy_url", data={"action": action} ) req.user = self._instructor req.session = {} mako_middleware_process_request(req) resp = legacy.instructor_dashboard(req, self._course.id.to_deprecated_string()) respUnicode = resp.content.decode(settings.DEFAULT_CHARSET) self.assertNotIn(self._evil_student.profile.name, respUnicode) self.assertIn(escape(self._evil_student.profile.name), respUnicode) def test_list_enrolled(self): self._test_action("List enrolled students") def test_dump_list_of_enrolled(self): self._test_action("Dump list of enrolled students")
agpl-3.0
DJMedhaug/BizSprint
bizsprint/users/views.py
1
3347
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import absolute_import, unicode_literals from django.core.urlresolvers import reverse from django.views.generic import DetailView, ListView, RedirectView, UpdateView from django.contrib.auth.mixins import LoginRequiredMixin from bizsprint.users.models import User from django.http import HttpResponseRedirect from django.conf import settings from django.core.mail import send_mail from django.shortcuts import render from bizsprint.users.forms import ContactForm, SignUpForm # Create your views here. def home(request): title = 'Welcome' form = SignUpForm(request.POST or None) context = { "title": title, "form": form } if form.is_valid(): instance = form.save(commit=False) full_name = form.cleaned_data.get("full_name") if not full_name: full_name = "New full name" instance.full_name = full_name instance.save() context = { "title": "Thank you" } return render(request, "home.html", context) def contact(request): title = 'Contact Us' title_align_center = True form = ContactForm(request.POST or None) if form.is_valid(): form_email = form.cleaned_data.get("email") form_message = form.cleaned_data.get("message") form_full_name = form.cleaned_data.get("full_name") subject = 'Tiny Spot contact form' from_email = settings.EMAIL_HOST_USER to_email = [from_email] # add more emails as a string if needed contact_message = "%s: %s via %s" % ( form_full_name, form_message, form_email) send_mail(subject, contact_message, from_email, to_email, fail_silently=False) context = { "form": form, "title": title, "title_align_center": title_align_center, } return HttpResponseRedirect("/confirmation/") context = { "form": form, "title": title, } return render(request, "contact.html", context) class UserDetailView(LoginRequiredMixin, DetailView): model = User # These next two lines tell the view to index lookups by username slug_field = 'username' slug_url_kwarg = 'username' class UserRedirectView(LoginRequiredMixin, RedirectView): permanent = False def get_redirect_url(self): return reverse('users:detail', kwargs={'username': self.request.user.username}) class UserUpdateView(LoginRequiredMixin, UpdateView): fields = ['name', ] # we already imported User in the view code above, remember? model = User # send the user back to their own page after a successful update def get_success_url(self): return reverse('users:detail', kwargs={'username': self.request.user.username}) def get_object(self): # Only get the User record for the user making the request return User.objects.get(username=self.request.user.username) class UserListView(LoginRequiredMixin, ListView): model = User # These next two lines tell the view to index lookups by username slug_field = 'username' slug_url_kwarg = 'username'
bsd-3-clause
chrippa/blues
blues/wowza.py
5
1394
""" Wowza Blueprint =============== **Fabric environment:** .. code-block:: yaml blueprints: - blues.wowza """ from fabric.decorators import task from refabric.api import run, info from refabric.context_managers import sudo from refabric.contrib import blueprints from . import debian __all__ = ['start', 'stop', 'restart', 'setup', 'configure'] blueprint = blueprints.get(__name__) start = debian.service_task('WowzaStreamingEngine', 'start') stop = debian.service_task('WowzaStreamingEngine', 'stop') restart = debian.service_task('WowzaStreamingEngine', 'restart') wowza_root ='/usr/local/WowzaMediaServer/' @task def setup(): """ Install and configure Wowza """ install() configure() def install(): with sudo(): info('Downloading wowza') version = blueprint.get('wowza_version', '4.1.2') binary = 'WowzaStreamingEngine-{}.deb.bin'.format(version) version_path = version.replace('.', '-') url = 'http://www.wowza.com/downloads/WowzaStreamingEngine-{}/{}'.format(version_path, binary) run('wget -P /tmp/ {url}'.format(url=url)) debian.chmod('/tmp/{}'.format(binary), '+x') info('Installing wowza') run('/tmp/{}'.format(binary)) @task def configure(): """ Configure Wowza """
mit
pim89/youtube-dl
test/test_utils.py
4
49253
#!/usr/bin/env python # coding: utf-8 from __future__ import unicode_literals # Allow direct execution import os import sys import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Various small unit tests import io import json import xml.etree.ElementTree from youtube_dl.utils import ( age_restricted, args_to_str, encode_base_n, clean_html, date_from_str, DateRange, detect_exe_version, determine_ext, dict_get, encode_compat_str, encodeFilename, escape_rfc3986, escape_url, extract_attributes, ExtractorError, find_xpath_attr, fix_xml_ampersands, get_element_by_class, InAdvancePagedList, intlist_to_bytes, is_html, js_to_json, limit_length, mimetype2ext, month_by_name, ohdave_rsa_encrypt, OnDemandPagedList, orderedSet, parse_age_limit, parse_duration, parse_filesize, parse_count, parse_iso8601, read_batch_urls, sanitize_filename, sanitize_path, prepend_extension, replace_extension, remove_start, remove_end, remove_quotes, shell_quote, smuggle_url, str_to_int, strip_jsonp, timeconvert, unescapeHTML, unified_strdate, unified_timestamp, unsmuggle_url, uppercase_escape, lowercase_escape, url_basename, urlencode_postdata, urshift, update_url_query, version_tuple, xpath_with_ns, xpath_element, xpath_text, xpath_attr, render_table, match_str, parse_dfxp_time_expr, dfxp2srt, cli_option, cli_valueless_option, cli_bool_option, parse_codecs, ) from youtube_dl.compat import ( compat_chr, compat_etree_fromstring, compat_urlparse, compat_parse_qs, ) class TestUtil(unittest.TestCase): def test_timeconvert(self): self.assertTrue(timeconvert('') is None) self.assertTrue(timeconvert('bougrg') is None) def test_sanitize_filename(self): self.assertEqual(sanitize_filename('abc'), 'abc') self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e') self.assertEqual(sanitize_filename('123'), '123') self.assertEqual('abc_de', sanitize_filename('abc/de')) self.assertFalse('/' in sanitize_filename('abc/de///')) self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de')) self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|')) self.assertEqual('yes no', sanitize_filename('yes? no')) self.assertEqual('this - that', sanitize_filename('this: that')) self.assertEqual(sanitize_filename('AT&T'), 'AT&T') aumlaut = 'ä' self.assertEqual(sanitize_filename(aumlaut), aumlaut) tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430' self.assertEqual(sanitize_filename(tests), tests) self.assertEqual( sanitize_filename('New World record at 0:12:34'), 'New World record at 0_12_34') self.assertEqual(sanitize_filename('--gasdgf'), '_-gasdgf') self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf') self.assertEqual(sanitize_filename('.gasdgf'), 'gasdgf') self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf') forbidden = '"\0\\/' for fc in forbidden: for fbc in forbidden: self.assertTrue(fbc not in sanitize_filename(fc)) def test_sanitize_filename_restricted(self): self.assertEqual(sanitize_filename('abc', restricted=True), 'abc') self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e') self.assertEqual(sanitize_filename('123', restricted=True), '123') self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True)) self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True)) self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True)) self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True)) self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True)) self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True)) tests = 'aäb\u4e2d\u56fd\u7684c' self.assertEqual(sanitize_filename(tests, restricted=True), 'aab_c') self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#' for fc in forbidden: for fbc in forbidden: self.assertTrue(fbc not in sanitize_filename(fc, restricted=True)) # Handle a common case more neatly self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song') self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech') # .. but make sure the file name is never empty self.assertTrue(sanitize_filename('-', restricted=True) != '') self.assertTrue(sanitize_filename(':', restricted=True) != '') self.assertEqual(sanitize_filename( 'ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ', restricted=True), 'AAAAAAAECEEEEIIIIDNOOOOOOOOEUUUUUYPssaaaaaaaeceeeeiiiionooooooooeuuuuuypy') def test_sanitize_ids(self): self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw') self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw') self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI') def test_sanitize_path(self): if sys.platform != 'win32': return self.assertEqual(sanitize_path('abc'), 'abc') self.assertEqual(sanitize_path('abc/def'), 'abc\\def') self.assertEqual(sanitize_path('abc\\def'), 'abc\\def') self.assertEqual(sanitize_path('abc|def'), 'abc#def') self.assertEqual(sanitize_path('<>:"|?*'), '#######') self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def') self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def') self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc') self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc') self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc') self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc') self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f') self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc') self.assertEqual( sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'), 'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s') self.assertEqual( sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'), 'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part') self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#') self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def') self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#') self.assertEqual(sanitize_path('../abc'), '..\\abc') self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc') self.assertEqual(sanitize_path('./abc'), 'abc') self.assertEqual(sanitize_path('./../abc'), '..\\abc') def test_prepend_extension(self): self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext') self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext') self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp') self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp') self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp') self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext') def test_replace_extension(self): self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp') self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp') self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp') self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp') self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp') self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp') def test_remove_start(self): self.assertEqual(remove_start(None, 'A - '), None) self.assertEqual(remove_start('A - B', 'A - '), 'B') self.assertEqual(remove_start('B - A', 'A - '), 'B - A') def test_remove_end(self): self.assertEqual(remove_end(None, ' - B'), None) self.assertEqual(remove_end('A - B', ' - B'), 'A') self.assertEqual(remove_end('B - A', ' - B'), 'B - A') def test_remove_quotes(self): self.assertEqual(remove_quotes(None), None) self.assertEqual(remove_quotes('"'), '"') self.assertEqual(remove_quotes("'"), "'") self.assertEqual(remove_quotes(';'), ';') self.assertEqual(remove_quotes('";'), '";') self.assertEqual(remove_quotes('""'), '') self.assertEqual(remove_quotes('";"'), ';') def test_ordered_set(self): self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7]) self.assertEqual(orderedSet([]), []) self.assertEqual(orderedSet([1]), [1]) # keep the list ordered self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1]) def test_unescape_html(self): self.assertEqual(unescapeHTML('%20;'), '%20;') self.assertEqual(unescapeHTML('&#x2F;'), '/') self.assertEqual(unescapeHTML('&#47;'), '/') self.assertEqual(unescapeHTML('&eacute;'), 'é') self.assertEqual(unescapeHTML('&#2013266066;'), '&#2013266066;') # HTML5 entities self.assertEqual(unescapeHTML('&period;&apos;'), '.\'') def test_date_from_str(self): self.assertEqual(date_from_str('yesterday'), date_from_str('now-1day')) self.assertEqual(date_from_str('now+7day'), date_from_str('now+1week')) self.assertEqual(date_from_str('now+14day'), date_from_str('now+2week')) self.assertEqual(date_from_str('now+365day'), date_from_str('now+1year')) self.assertEqual(date_from_str('now+30day'), date_from_str('now+1month')) def test_daterange(self): _20century = DateRange("19000101", "20000101") self.assertFalse("17890714" in _20century) _ac = DateRange("00010101") self.assertTrue("19690721" in _ac) _firstmilenium = DateRange(end="10000101") self.assertTrue("07110427" in _firstmilenium) def test_unified_dates(self): self.assertEqual(unified_strdate('December 21, 2010'), '20101221') self.assertEqual(unified_strdate('8/7/2009'), '20090708') self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214') self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011') self.assertEqual(unified_strdate('1968 12 10'), '19681210') self.assertEqual(unified_strdate('1968-12-10'), '19681210') self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128') self.assertEqual( unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False), '20141126') self.assertEqual( unified_strdate('2/2/2015 6:47:40 PM', day_first=False), '20150202') self.assertEqual(unified_strdate('Feb 14th 2016 5:45PM'), '20160214') self.assertEqual(unified_strdate('25-09-2014'), '20140925') self.assertEqual(unified_strdate('27.02.2016 17:30'), '20160227') self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None) self.assertEqual(unified_strdate('Feb 7, 2016 at 6:35 pm'), '20160207') def test_unified_timestamps(self): self.assertEqual(unified_timestamp('December 21, 2010'), 1292889600) self.assertEqual(unified_timestamp('8/7/2009'), 1247011200) self.assertEqual(unified_timestamp('Dec 14, 2012'), 1355443200) self.assertEqual(unified_timestamp('2012/10/11 01:56:38 +0000'), 1349920598) self.assertEqual(unified_timestamp('1968 12 10'), -33436800) self.assertEqual(unified_timestamp('1968-12-10'), -33436800) self.assertEqual(unified_timestamp('28/01/2014 21:00:00 +0100'), 1390939200) self.assertEqual( unified_timestamp('11/26/2014 11:30:00 AM PST', day_first=False), 1417001400) self.assertEqual( unified_timestamp('2/2/2015 6:47:40 PM', day_first=False), 1422902860) self.assertEqual(unified_timestamp('Feb 14th 2016 5:45PM'), 1455471900) self.assertEqual(unified_timestamp('25-09-2014'), 1411603200) self.assertEqual(unified_timestamp('27.02.2016 17:30'), 1456594200) self.assertEqual(unified_timestamp('UNKNOWN DATE FORMAT'), None) self.assertEqual(unified_timestamp('May 16, 2016 11:15 PM'), 1463440500) self.assertEqual(unified_timestamp('Feb 7, 2016 at 6:35 pm'), 1454870100) def test_determine_ext(self): self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4') self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None) self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None) self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None) self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8') def test_find_xpath_attr(self): testxml = '''<root> <node/> <node x="a"/> <node x="a" y="c" /> <node x="b" y="d" /> <node x="" /> </root>''' doc = compat_etree_fromstring(testxml) self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None) self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None) self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None) self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None) self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1]) self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1]) self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3]) self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2]) self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2]) self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3]) self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4]) def test_xpath_with_ns(self): testxml = '''<root xmlns:media="http://example.com/"> <media:song> <media:author>The Author</media:author> <url>http://server.com/download.mp3</url> </media:song> </root>''' doc = compat_etree_fromstring(testxml) find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'})) self.assertTrue(find('media:song') is not None) self.assertEqual(find('media:song/media:author').text, 'The Author') self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3') def test_xpath_element(self): doc = xml.etree.ElementTree.Element('root') div = xml.etree.ElementTree.SubElement(doc, 'div') p = xml.etree.ElementTree.SubElement(div, 'p') p.text = 'Foo' self.assertEqual(xpath_element(doc, 'div/p'), p) self.assertEqual(xpath_element(doc, ['div/p']), p) self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p) self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default') self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default') self.assertTrue(xpath_element(doc, 'div/bar') is None) self.assertTrue(xpath_element(doc, ['div/bar']) is None) self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None) self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True) self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True) self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True) def test_xpath_text(self): testxml = '''<root> <div> <p>Foo</p> </div> </root>''' doc = compat_etree_fromstring(testxml) self.assertEqual(xpath_text(doc, 'div/p'), 'Foo') self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default') self.assertTrue(xpath_text(doc, 'div/bar') is None) self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True) def test_xpath_attr(self): testxml = '''<root> <div> <p x="a">Foo</p> </div> </root>''' doc = compat_etree_fromstring(testxml) self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a') self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None) self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None) self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default') self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default') self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True) self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True) def test_smuggle_url(self): data = {"ö": "ö", "abc": [3]} url = 'https://foo.bar/baz?x=y#a' smug_url = smuggle_url(url, data) unsmug_url, unsmug_data = unsmuggle_url(smug_url) self.assertEqual(url, unsmug_url) self.assertEqual(data, unsmug_data) res_url, res_data = unsmuggle_url(url) self.assertEqual(res_url, url) self.assertEqual(res_data, None) smug_url = smuggle_url(url, {'a': 'b'}) smug_smug_url = smuggle_url(smug_url, {'c': 'd'}) res_url, res_data = unsmuggle_url(smug_smug_url) self.assertEqual(res_url, url) self.assertEqual(res_data, {'a': 'b', 'c': 'd'}) def test_shell_quote(self): args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')] self.assertEqual(shell_quote(args), """ffmpeg -i 'ñ€ß'"'"'.mp4'""") def test_str_to_int(self): self.assertEqual(str_to_int('123,456'), 123456) self.assertEqual(str_to_int('123.456'), 123456) def test_url_basename(self): self.assertEqual(url_basename('http://foo.de/'), '') self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz') self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz') self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz') self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz') self.assertEqual( url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'), 'trailer.mp4') def test_parse_age_limit(self): self.assertEqual(parse_age_limit(None), None) self.assertEqual(parse_age_limit(False), None) self.assertEqual(parse_age_limit('invalid'), None) self.assertEqual(parse_age_limit(0), 0) self.assertEqual(parse_age_limit(18), 18) self.assertEqual(parse_age_limit(21), 21) self.assertEqual(parse_age_limit(22), None) self.assertEqual(parse_age_limit('18'), 18) self.assertEqual(parse_age_limit('18+'), 18) self.assertEqual(parse_age_limit('PG-13'), 13) self.assertEqual(parse_age_limit('TV-14'), 14) self.assertEqual(parse_age_limit('TV-MA'), 17) def test_parse_duration(self): self.assertEqual(parse_duration(None), None) self.assertEqual(parse_duration(False), None) self.assertEqual(parse_duration('invalid'), None) self.assertEqual(parse_duration('1'), 1) self.assertEqual(parse_duration('1337:12'), 80232) self.assertEqual(parse_duration('9:12:43'), 33163) self.assertEqual(parse_duration('12:00'), 720) self.assertEqual(parse_duration('00:01:01'), 61) self.assertEqual(parse_duration('x:y'), None) self.assertEqual(parse_duration('3h11m53s'), 11513) self.assertEqual(parse_duration('3h 11m 53s'), 11513) self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513) self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513) self.assertEqual(parse_duration('62m45s'), 3765) self.assertEqual(parse_duration('6m59s'), 419) self.assertEqual(parse_duration('49s'), 49) self.assertEqual(parse_duration('0h0m0s'), 0) self.assertEqual(parse_duration('0m0s'), 0) self.assertEqual(parse_duration('0s'), 0) self.assertEqual(parse_duration('01:02:03.05'), 3723.05) self.assertEqual(parse_duration('T30M38S'), 1838) self.assertEqual(parse_duration('5 s'), 5) self.assertEqual(parse_duration('3 min'), 180) self.assertEqual(parse_duration('2.5 hours'), 9000) self.assertEqual(parse_duration('02:03:04'), 7384) self.assertEqual(parse_duration('01:02:03:04'), 93784) self.assertEqual(parse_duration('1 hour 3 minutes'), 3780) self.assertEqual(parse_duration('87 Min.'), 5220) self.assertEqual(parse_duration('PT1H0.040S'), 3600.04) def test_fix_xml_ampersands(self): self.assertEqual( fix_xml_ampersands('"&x=y&z=a'), '"&amp;x=y&amp;z=a') self.assertEqual( fix_xml_ampersands('"&amp;x=y&wrong;&z=a'), '"&amp;x=y&amp;wrong;&amp;z=a') self.assertEqual( fix_xml_ampersands('&amp;&apos;&gt;&lt;&quot;'), '&amp;&apos;&gt;&lt;&quot;') self.assertEqual( fix_xml_ampersands('&#1234;&#x1abC;'), '&#1234;&#x1abC;') self.assertEqual(fix_xml_ampersands('&#&#'), '&amp;#&amp;#') def test_paged_list(self): def testPL(size, pagesize, sliceargs, expected): def get_page(pagenum): firstid = pagenum * pagesize upto = min(size, pagenum * pagesize + pagesize) for i in range(firstid, upto): yield i pl = OnDemandPagedList(get_page, pagesize) got = pl.getslice(*sliceargs) self.assertEqual(got, expected) iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize) got = iapl.getslice(*sliceargs) self.assertEqual(got, expected) testPL(5, 2, (), [0, 1, 2, 3, 4]) testPL(5, 2, (1,), [1, 2, 3, 4]) testPL(5, 2, (2,), [2, 3, 4]) testPL(5, 2, (4,), [4]) testPL(5, 2, (0, 3), [0, 1, 2]) testPL(5, 2, (1, 4), [1, 2, 3]) testPL(5, 2, (2, 99), [2, 3, 4]) testPL(5, 2, (20, 99), []) def test_read_batch_urls(self): f = io.StringIO('''\xef\xbb\xbf foo bar\r baz # More after this line\r ; or after this bam''') self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam']) def test_urlencode_postdata(self): data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'}) self.assertTrue(isinstance(data, bytes)) def test_update_url_query(self): def query_dict(url): return compat_parse_qs(compat_urlparse.urlparse(url).query) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'quality': ['HD'], 'format': ['mp4']})), query_dict('http://example.com/path?quality=HD&format=mp4')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'system': ['LINUX', 'WINDOWS']})), query_dict('http://example.com/path?system=LINUX&system=WINDOWS')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'fields': 'id,formats,subtitles'})), query_dict('http://example.com/path?fields=id,formats,subtitles')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'fields': ('id,formats,subtitles', 'thumbnails')})), query_dict('http://example.com/path?fields=id,formats,subtitles&fields=thumbnails')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path?manifest=f4m', {'manifest': []})), query_dict('http://example.com/path')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path?system=LINUX&system=WINDOWS', {'system': 'LINUX'})), query_dict('http://example.com/path?system=LINUX')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'fields': b'id,formats,subtitles'})), query_dict('http://example.com/path?fields=id,formats,subtitles')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'width': 1080, 'height': 720})), query_dict('http://example.com/path?width=1080&height=720')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'bitrate': 5020.43})), query_dict('http://example.com/path?bitrate=5020.43')) self.assertEqual(query_dict(update_url_query( 'http://example.com/path', {'test': '第二行тест'})), query_dict('http://example.com/path?test=%E7%AC%AC%E4%BA%8C%E8%A1%8C%D1%82%D0%B5%D1%81%D1%82')) def test_dict_get(self): FALSE_VALUES = { 'none': None, 'false': False, 'zero': 0, 'empty_string': '', 'empty_list': [], } d = FALSE_VALUES.copy() d['a'] = 42 self.assertEqual(dict_get(d, 'a'), 42) self.assertEqual(dict_get(d, 'b'), None) self.assertEqual(dict_get(d, 'b', 42), 42) self.assertEqual(dict_get(d, ('a', )), 42) self.assertEqual(dict_get(d, ('b', 'a', )), 42) self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42) self.assertEqual(dict_get(d, ('b', 'c', )), None) self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42) for key, false_value in FALSE_VALUES.items(): self.assertEqual(dict_get(d, ('b', 'c', key, )), None) self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value) def test_encode_compat_str(self): self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест') self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест') def test_parse_iso8601(self): self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266) self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266) self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266) self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266) self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251) self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None) def test_strip_jsonp(self): stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);') d = json.loads(stripped) self.assertEqual(d, [{"id": "532cb", "x": 3}]) stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc') d = json.loads(stripped) self.assertEqual(d, {'STATUS': 'OK'}) stripped = strip_jsonp('ps.embedHandler({"status": "success"});') d = json.loads(stripped) self.assertEqual(d, {'status': 'success'}) def test_uppercase_escape(self): self.assertEqual(uppercase_escape('aä'), 'aä') self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐') def test_lowercase_escape(self): self.assertEqual(lowercase_escape('aä'), 'aä') self.assertEqual(lowercase_escape('\\u0026'), '&') def test_limit_length(self): self.assertEqual(limit_length(None, 12), None) self.assertEqual(limit_length('foo', 12), 'foo') self.assertTrue( limit_length('foo bar baz asd', 12).startswith('foo bar')) self.assertTrue('...' in limit_length('foo bar baz asd', 12)) def test_mimetype2ext(self): self.assertEqual(mimetype2ext(None), None) self.assertEqual(mimetype2ext('video/x-flv'), 'flv') self.assertEqual(mimetype2ext('application/x-mpegURL'), 'm3u8') self.assertEqual(mimetype2ext('text/vtt'), 'vtt') self.assertEqual(mimetype2ext('text/vtt;charset=utf-8'), 'vtt') self.assertEqual(mimetype2ext('text/html; charset=utf-8'), 'html') def test_month_by_name(self): self.assertEqual(month_by_name(None), None) self.assertEqual(month_by_name('December', 'en'), 12) self.assertEqual(month_by_name('décembre', 'fr'), 12) self.assertEqual(month_by_name('December'), 12) self.assertEqual(month_by_name('décembre'), None) self.assertEqual(month_by_name('Unknown', 'unknown'), None) def test_parse_codecs(self): self.assertEqual(parse_codecs(''), {}) self.assertEqual(parse_codecs('avc1.77.30, mp4a.40.2'), { 'vcodec': 'avc1.77.30', 'acodec': 'mp4a.40.2', }) self.assertEqual(parse_codecs('mp4a.40.2'), { 'vcodec': 'none', 'acodec': 'mp4a.40.2', }) self.assertEqual(parse_codecs('mp4a.40.5,avc1.42001e'), { 'vcodec': 'avc1.42001e', 'acodec': 'mp4a.40.5', }) self.assertEqual(parse_codecs('avc3.640028'), { 'vcodec': 'avc3.640028', 'acodec': 'none', }) self.assertEqual(parse_codecs(', h264,,newcodec,aac'), { 'vcodec': 'h264', 'acodec': 'aac', }) def test_escape_rfc3986(self): reserved = "!*'();:@&=+$,/?#[]" unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~' self.assertEqual(escape_rfc3986(reserved), reserved) self.assertEqual(escape_rfc3986(unreserved), unreserved) self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82') self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82') self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar') self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar') def test_escape_url(self): self.assertEqual( escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'), 'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4' ) self.assertEqual( escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'), 'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290' ) self.assertEqual( escape_url('http://тест.рф/фрагмент'), 'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82' ) self.assertEqual( escape_url('http://тест.рф/абв?абв=абв#абв'), 'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2' ) self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0') def test_js_to_json_realworld(self): inp = '''{ 'clip':{'provider':'pseudo'} }''' self.assertEqual(js_to_json(inp), '''{ "clip":{"provider":"pseudo"} }''') json.loads(js_to_json(inp)) inp = '''{ 'playlist':[{'controls':{'all':null}}] }''' self.assertEqual(js_to_json(inp), '''{ "playlist":[{"controls":{"all":null}}] }''') inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"''' self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''') inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"' json_code = js_to_json(inp) self.assertEqual(json.loads(json_code), json.loads(inp)) inp = '''{ 0:{src:'skipped', type: 'application/dash+xml'}, 1:{src:'skipped', type: 'application/vnd.apple.mpegURL'}, }''' self.assertEqual(js_to_json(inp), '''{ "0":{"src":"skipped", "type": "application/dash+xml"}, "1":{"src":"skipped", "type": "application/vnd.apple.mpegURL"} }''') inp = '''{"foo":101}''' self.assertEqual(js_to_json(inp), '''{"foo":101}''') inp = '''{"duration": "00:01:07"}''' self.assertEqual(js_to_json(inp), '''{"duration": "00:01:07"}''') def test_js_to_json_edgecases(self): on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}") self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"}) on = js_to_json('{"abc": true}') self.assertEqual(json.loads(on), {'abc': True}) # Ignore JavaScript code as well on = js_to_json('''{ "x": 1, y: "a", z: some.code }''') d = json.loads(on) self.assertEqual(d['x'], 1) self.assertEqual(d['y'], 'a') on = js_to_json('["abc", "def",]') self.assertEqual(json.loads(on), ['abc', 'def']) on = js_to_json('{"abc": "def",}') self.assertEqual(json.loads(on), {'abc': 'def'}) on = js_to_json('{ 0: /* " \n */ ",]" , }') self.assertEqual(json.loads(on), {'0': ',]'}) on = js_to_json(r'["<p>x<\/p>"]') self.assertEqual(json.loads(on), ['<p>x</p>']) on = js_to_json(r'["\xaa"]') self.assertEqual(json.loads(on), ['\u00aa']) on = js_to_json("['a\\\nb']") self.assertEqual(json.loads(on), ['ab']) on = js_to_json('{0xff:0xff}') self.assertEqual(json.loads(on), {'255': 255}) on = js_to_json('{077:077}') self.assertEqual(json.loads(on), {'63': 63}) on = js_to_json('{42:42}') self.assertEqual(json.loads(on), {'42': 42}) def test_extract_attributes(self): self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'}) self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'}) self.assertEqual(extract_attributes('<e x=y>'), {'x': 'y'}) self.assertEqual(extract_attributes('<e x="a \'b\' c">'), {'x': "a 'b' c"}) self.assertEqual(extract_attributes('<e x=\'a "b" c\'>'), {'x': 'a "b" c'}) self.assertEqual(extract_attributes('<e x="&#121;">'), {'x': 'y'}) self.assertEqual(extract_attributes('<e x="&#x79;">'), {'x': 'y'}) self.assertEqual(extract_attributes('<e x="&amp;">'), {'x': '&'}) # XML self.assertEqual(extract_attributes('<e x="&quot;">'), {'x': '"'}) self.assertEqual(extract_attributes('<e x="&pound;">'), {'x': '£'}) # HTML 3.2 self.assertEqual(extract_attributes('<e x="&lambda;">'), {'x': 'λ'}) # HTML 4.0 self.assertEqual(extract_attributes('<e x="&foo">'), {'x': '&foo'}) self.assertEqual(extract_attributes('<e x="\'">'), {'x': "'"}) self.assertEqual(extract_attributes('<e x=\'"\'>'), {'x': '"'}) self.assertEqual(extract_attributes('<e x >'), {'x': None}) self.assertEqual(extract_attributes('<e x=y a>'), {'x': 'y', 'a': None}) self.assertEqual(extract_attributes('<e x= y>'), {'x': 'y'}) self.assertEqual(extract_attributes('<e x=1 y=2 x=3>'), {'y': '2', 'x': '3'}) self.assertEqual(extract_attributes('<e \nx=\ny\n>'), {'x': 'y'}) self.assertEqual(extract_attributes('<e \nx=\n"y"\n>'), {'x': 'y'}) self.assertEqual(extract_attributes("<e \nx=\n'y'\n>"), {'x': 'y'}) self.assertEqual(extract_attributes('<e \nx="\ny\n">'), {'x': '\ny\n'}) self.assertEqual(extract_attributes('<e CAPS=x>'), {'caps': 'x'}) # Names lowercased self.assertEqual(extract_attributes('<e x=1 X=2>'), {'x': '2'}) self.assertEqual(extract_attributes('<e X=1 x=2>'), {'x': '2'}) self.assertEqual(extract_attributes('<e _:funny-name1=1>'), {'_:funny-name1': '1'}) self.assertEqual(extract_attributes('<e x="Fáilte 世界 \U0001f600">'), {'x': 'Fáilte 世界 \U0001f600'}) self.assertEqual(extract_attributes('<e x="décompose&#769;">'), {'x': 'décompose\u0301'}) # "Narrow" Python builds don't support unicode code points outside BMP. try: compat_chr(0x10000) supports_outside_bmp = True except ValueError: supports_outside_bmp = False if supports_outside_bmp: self.assertEqual(extract_attributes('<e x="Smile &#128512;!">'), {'x': 'Smile \U0001f600!'}) def test_clean_html(self): self.assertEqual(clean_html('a:\nb'), 'a: b') self.assertEqual(clean_html('a:\n "b"'), 'a: "b"') def test_intlist_to_bytes(self): self.assertEqual( intlist_to_bytes([0, 1, 127, 128, 255]), b'\x00\x01\x7f\x80\xff') def test_args_to_str(self): self.assertEqual( args_to_str(['foo', 'ba/r', '-baz', '2 be', '']), 'foo ba/r -baz \'2 be\' \'\'' ) def test_parse_filesize(self): self.assertEqual(parse_filesize(None), None) self.assertEqual(parse_filesize(''), None) self.assertEqual(parse_filesize('91 B'), 91) self.assertEqual(parse_filesize('foobar'), None) self.assertEqual(parse_filesize('2 MiB'), 2097152) self.assertEqual(parse_filesize('5 GB'), 5000000000) self.assertEqual(parse_filesize('1.2Tb'), 1200000000000) self.assertEqual(parse_filesize('1.2tb'), 1200000000000) self.assertEqual(parse_filesize('1,24 KB'), 1240) self.assertEqual(parse_filesize('1,24 kb'), 1240) self.assertEqual(parse_filesize('8.5 megabytes'), 8500000) def test_parse_count(self): self.assertEqual(parse_count(None), None) self.assertEqual(parse_count(''), None) self.assertEqual(parse_count('0'), 0) self.assertEqual(parse_count('1000'), 1000) self.assertEqual(parse_count('1.000'), 1000) self.assertEqual(parse_count('1.1k'), 1100) self.assertEqual(parse_count('1.1kk'), 1100000) self.assertEqual(parse_count('1.1kk '), 1100000) self.assertEqual(parse_count('1.1kk views'), 1100000) def test_version_tuple(self): self.assertEqual(version_tuple('1'), (1,)) self.assertEqual(version_tuple('10.23.344'), (10, 23, 344)) self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style def test_detect_exe_version(self): self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1 built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4) configuration: --prefix=/usr --extra-'''), '1.2.1') self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685 built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685') self.assertEqual(detect_exe_version('''X server found. dri2 connection failed! Trying to open render node... Success at /dev/dri/renderD128. ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4') def test_age_restricted(self): self.assertFalse(age_restricted(None, 10)) # unrestricted content self.assertFalse(age_restricted(1, None)) # unrestricted policy self.assertFalse(age_restricted(8, 10)) self.assertTrue(age_restricted(18, 14)) self.assertFalse(age_restricted(18, 18)) def test_is_html(self): self.assertFalse(is_html(b'\x49\x44\x43<html')) self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa')) self.assertTrue(is_html( # UTF-8 with BOM b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa')) self.assertTrue(is_html( # UTF-16-LE b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00' )) self.assertTrue(is_html( # UTF-16-BE b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4' )) self.assertTrue(is_html( # UTF-32-BE b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4')) self.assertTrue(is_html( # UTF-32-LE b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00')) def test_render_table(self): self.assertEqual( render_table( ['a', 'bcd'], [[123, 4], [9999, 51]]), 'a bcd\n' '123 4\n' '9999 51') def test_match_str(self): self.assertRaises(ValueError, match_str, 'xy>foobar', {}) self.assertFalse(match_str('xy', {'x': 1200})) self.assertTrue(match_str('!xy', {'x': 1200})) self.assertTrue(match_str('x', {'x': 1200})) self.assertFalse(match_str('!x', {'x': 1200})) self.assertTrue(match_str('x', {'x': 0})) self.assertFalse(match_str('x>0', {'x': 0})) self.assertFalse(match_str('x>0', {})) self.assertTrue(match_str('x>?0', {})) self.assertTrue(match_str('x>1K', {'x': 1200})) self.assertFalse(match_str('x>2K', {'x': 1200})) self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200})) self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200})) self.assertFalse(match_str('y=a212', {'y': 'foobar42'})) self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'})) self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'})) self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'})) self.assertFalse(match_str( 'like_count > 100 & dislike_count <? 50 & description', {'like_count': 90, 'description': 'foo'})) self.assertTrue(match_str( 'like_count > 100 & dislike_count <? 50 & description', {'like_count': 190, 'description': 'foo'})) self.assertFalse(match_str( 'like_count > 100 & dislike_count <? 50 & description', {'like_count': 190, 'dislike_count': 60, 'description': 'foo'})) self.assertFalse(match_str( 'like_count > 100 & dislike_count <? 50 & description', {'like_count': 190, 'dislike_count': 10})) def test_parse_dfxp_time_expr(self): self.assertEqual(parse_dfxp_time_expr(None), None) self.assertEqual(parse_dfxp_time_expr(''), None) self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1) self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1) self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0) self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1) self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1) def test_dfxp2srt(self): dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?> <tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter"> <body> <div xml:lang="en"> <p begin="0" end="1">The following line contains Chinese characters and special symbols</p> <p begin="1" end="2">第二行<br/>♪♪</p> <p begin="2" dur="1"><span>Third<br/>Line</span></p> <p begin="3" end="-1">Lines with invalid timestamps are ignored</p> <p begin="-1" end="-1">Ignore, two</p> <p begin="3" dur="-1">Ignored, three</p> </div> </body> </tt>''' srt_data = '''1 00:00:00,000 --> 00:00:01,000 The following line contains Chinese characters and special symbols 2 00:00:01,000 --> 00:00:02,000 第二行 ♪♪ 3 00:00:02,000 --> 00:00:03,000 Third Line ''' self.assertEqual(dfxp2srt(dfxp_data), srt_data) dfxp_data_no_default_namespace = '''<?xml version="1.0" encoding="UTF-8"?> <tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter"> <body> <div xml:lang="en"> <p begin="0" end="1">The first line</p> </div> </body> </tt>''' srt_data = '''1 00:00:00,000 --> 00:00:01,000 The first line ''' self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data) def test_cli_option(self): self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128']) self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), []) self.assertEqual(cli_option({}, '--proxy', 'proxy'), []) self.assertEqual(cli_option({'retries': 10}, '--retries', 'retries'), ['--retries', '10']) def test_cli_valueless_option(self): self.assertEqual(cli_valueless_option( {'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader']) self.assertEqual(cli_valueless_option( {'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), []) self.assertEqual(cli_valueless_option( {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate']) self.assertEqual(cli_valueless_option( {'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), []) self.assertEqual(cli_valueless_option( {'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), []) self.assertEqual(cli_valueless_option( {'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate']) def test_cli_bool_option(self): self.assertEqual( cli_bool_option( {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate', 'true']) self.assertEqual( cli_bool_option( {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='), ['--no-check-certificate=true']) self.assertEqual( cli_bool_option( {'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'), ['--check-certificate', 'false']) self.assertEqual( cli_bool_option( {'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='), ['--check-certificate=false']) self.assertEqual( cli_bool_option( {'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'), ['--check-certificate', 'true']) self.assertEqual( cli_bool_option( {'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='), ['--check-certificate=true']) def test_ohdave_rsa_encrypt(self): N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd e = 65537 self.assertEqual( ohdave_rsa_encrypt(b'aa111222', e, N), '726664bd9a23fd0c70f9f1b84aab5e3905ce1e45a584e9cbcf9bcc7510338fc1986d6c599ff990d923aa43c51c0d9013cd572e13bc58f4ae48f2ed8c0b0ba881') def test_encode_base_n(self): self.assertEqual(encode_base_n(0, 30), '0') self.assertEqual(encode_base_n(80, 30), '2k') custom_table = '9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA' self.assertEqual(encode_base_n(0, 30, custom_table), '9') self.assertEqual(encode_base_n(80, 30, custom_table), '7P') self.assertRaises(ValueError, encode_base_n, 0, 70) self.assertRaises(ValueError, encode_base_n, 0, 60, custom_table) def test_urshift(self): self.assertEqual(urshift(3, 1), 1) self.assertEqual(urshift(-3, 1), 2147483646) def test_get_element_by_class(self): html = ''' <span class="foo bar">nice</span> ''' self.assertEqual(get_element_by_class('foo', html), 'nice') self.assertEqual(get_element_by_class('no-such-class', html), None) if __name__ == '__main__': unittest.main()
unlicense
mattseymour/django
tests/test_client/views.py
26
10894
from urllib.parse import urlencode from xml.dom.minidom import parseString from django.contrib.auth.decorators import login_required, permission_required from django.core import mail from django.forms import fields from django.forms.forms import Form, ValidationError from django.forms.formsets import BaseFormSet, formset_factory from django.http import ( HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseRedirect, ) from django.shortcuts import render from django.template import Context, Template from django.test import Client from django.utils.decorators import method_decorator def get_view(request): "A simple view that expects a GET request, and returns a rendered template" t = Template('This is a test. {{ var }} is the value.', name='GET Template') c = Context({'var': request.GET.get('var', 42)}) return HttpResponse(t.render(c)) def trace_view(request): """ A simple view that expects a TRACE request and echoes its status line. TRACE requests should not have an entity; the view will return a 400 status response if it is present. """ if request.method.upper() != "TRACE": return HttpResponseNotAllowed("TRACE") elif request.body: return HttpResponseBadRequest("TRACE requests MUST NOT include an entity") else: protocol = request.META["SERVER_PROTOCOL"] t = Template( '{{ method }} {{ uri }} {{ version }}', name="TRACE Template", ) c = Context({ 'method': request.method, 'uri': request.path, 'version': protocol, }) return HttpResponse(t.render(c)) def post_view(request): """A view that expects a POST, and returns a different template depending on whether any POST data is available """ if request.method == 'POST': if request.POST: t = Template('Data received: {{ data }} is the value.', name='POST Template') c = Context({'data': request.POST['value']}) else: t = Template('Viewing POST page.', name='Empty POST Template') c = Context() else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def view_with_header(request): "A view that has a custom header" response = HttpResponse() response['X-DJANGO-TEST'] = 'Slartibartfast' return response def raw_post_view(request): """A view which expects raw XML to be posted and returns content extracted from the XML""" if request.method == 'POST': root = parseString(request.body) first_book = root.firstChild.firstChild title, author = [n.firstChild.nodeValue for n in first_book.childNodes] t = Template("{{ title }} - {{ author }}", name="Book template") c = Context({"title": title, "author": author}) else: t = Template("GET request.", name="Book GET template") c = Context() return HttpResponse(t.render(c)) def redirect_view(request): "A view that redirects all requests to the GET view" if request.GET: query = '?' + urlencode(request.GET, True) else: query = '' return HttpResponseRedirect('/get_view/' + query) def view_with_secure(request): "A view that indicates if the request was secure" response = HttpResponse() response.test_was_secure_request = request.is_secure() response.test_server_port = request.META.get('SERVER_PORT', 80) return response def double_redirect_view(request): "A view that redirects all requests to a redirection view" return HttpResponseRedirect('/permanent_redirect_view/') def bad_view(request): "A view that returns a 404 with some error content" return HttpResponseNotFound('Not found!. This page contains some MAGIC content') TestChoices = ( ('a', 'First Choice'), ('b', 'Second Choice'), ('c', 'Third Choice'), ('d', 'Fourth Choice'), ('e', 'Fifth Choice') ) class TestForm(Form): text = fields.CharField() email = fields.EmailField() value = fields.IntegerField() single = fields.ChoiceField(choices=TestChoices) multi = fields.MultipleChoiceField(choices=TestChoices) def clean(self): cleaned_data = self.cleaned_data if cleaned_data.get("text") == "Raise non-field error": raise ValidationError("Non-field error.") return cleaned_data def form_view(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template') c = Context({'form': form}) else: form = TestForm(request.GET) t = Template('Viewing base form. {{ form }}.', name='Form GET Template') c = Context({'form': form}) return HttpResponse(t.render(c)) def form_view_with_template(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): message = 'POST data OK' else: message = 'POST data has errors' else: form = TestForm() message = 'GET form page' return render(request, 'form_view.html', { 'form': form, 'message': message, }) class BaseTestFormSet(BaseFormSet): def clean(self): """No two email addresses are the same.""" if any(self.errors): # Don't bother validating the formset unless each form is valid return emails = [] for i in range(0, self.total_form_count()): form = self.forms[i] email = form.cleaned_data['email'] if email in emails: raise ValidationError( "Forms in a set must have distinct email addresses." ) emails.append(email) TestFormSet = formset_factory(TestForm, BaseTestFormSet) def formset_view(request): "A view that tests a simple formset" if request.method == 'POST': formset = TestFormSet(request.POST) if formset.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ my_formset.errors }}', name='Invalid POST Template') c = Context({'my_formset': formset}) else: formset = TestForm(request.GET) t = Template('Viewing base formset. {{ my_formset }}.', name='Formset GET Template') c = Context({'my_formset': formset}) return HttpResponse(t.render(c)) @login_required def login_protected_view(request): "A simple view that is login protected." t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @login_required(redirect_field_name='redirect_to') def login_protected_view_changed_redirect(request): "A simple view that is login protected with a custom redirect field set" t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) def _permission_protected_view(request): "A simple view that is permission protected." t = Template('This is a permission protected test. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) permission_protected_view = permission_required('permission_not_granted')(_permission_protected_view) permission_protected_view_exception = ( permission_required('permission_not_granted', raise_exception=True)(_permission_protected_view) ) class _ViewManager: @method_decorator(login_required) def login_protected_view(self, request): t = Template('This is a login protected test using a method. ' 'Username is {{ user.username }}.', name='Login Method Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @method_decorator(permission_required('permission_not_granted')) def permission_protected_view(self, request): t = Template('This is a permission protected test using a method. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) _view_manager = _ViewManager() login_protected_method_view = _view_manager.login_protected_view permission_protected_method_view = _view_manager.permission_protected_view def session_view(request): "A view that modifies the session" request.session['tobacconist'] = 'hovercraft' t = Template('This is a view that modifies the session.', name='Session Modifying View Template') c = Context() return HttpResponse(t.render(c)) def broken_view(request): """A view which just raises an exception, simulating a broken view.""" raise KeyError("Oops! Looks like you wrote some bad code.") def mail_sending_view(request): mail.EmailMessage( "Test message", "This is a test email", "from@example.com", ['first@example.com', 'second@example.com']).send() return HttpResponse("Mail sent") def mass_mail_sending_view(request): m1 = mail.EmailMessage( 'First Test message', 'This is the first test email', 'from@example.com', ['first@example.com', 'second@example.com']) m2 = mail.EmailMessage( 'Second Test message', 'This is the second test email', 'from@example.com', ['second@example.com', 'third@example.com']) c = mail.get_connection() c.send_messages([m1, m2]) return HttpResponse("Mail sent") def nesting_exception_view(request): """ A view that uses a nested client to call another view and then raises an exception. """ client = Client() client.get('/get_view/') raise Exception('exception message') def django_project_redirect(request): return HttpResponseRedirect('https://www.djangoproject.com/') def upload_view(request): """Prints keys of request.FILES to the response.""" return HttpResponse(', '.join(request.FILES.keys()))
bsd-3-clause
ammarkhann/FinalSeniorCode
lib/python2.7/site-packages/requests/packages/chardet/big5prober.py
2931
1684
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import Big5DistributionAnalysis from .mbcssm import Big5SMModel class Big5Prober(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(Big5SMModel) self._mDistributionAnalyzer = Big5DistributionAnalysis() self.reset() def get_charset_name(self): return "Big5"
mit
ticosax/django
django/core/checks/model_checks.py
525
2390
# -*- coding: utf-8 -*- from __future__ import unicode_literals import inspect import types from django.apps import apps from django.core.checks import Error, Tags, register @register(Tags.models) def check_all_models(app_configs=None, **kwargs): errors = [] for model in apps.get_models(): if app_configs is None or model._meta.app_config in app_configs: if not inspect.ismethod(model.check): errors.append( Error( "The '%s.check()' class method is " "currently overridden by %r." % ( model.__name__, model.check), hint=None, obj=model, id='models.E020' ) ) else: errors.extend(model.check(**kwargs)) return errors @register(Tags.models, Tags.signals) def check_model_signals(app_configs=None, **kwargs): """ Ensure lazily referenced model signals senders are installed. """ # Avoid circular import from django.db import models errors = [] for name in dir(models.signals): obj = getattr(models.signals, name) if isinstance(obj, models.signals.ModelSignal): for reference, receivers in obj.unresolved_references.items(): for receiver, _, _ in receivers: # The receiver is either a function or an instance of class # defining a `__call__` method. if isinstance(receiver, types.FunctionType): description = "The '%s' function" % receiver.__name__ else: description = "An instance of the '%s' class" % receiver.__class__.__name__ errors.append( Error( "%s was connected to the '%s' signal " "with a lazy reference to the '%s' sender, " "which has not been installed." % ( description, name, '.'.join(reference) ), obj=receiver.__module__, hint=None, id='signals.E001' ) ) return errors
bsd-3-clause
benthomasson/ansible
lib/ansible/modules/notification/nexmo.py
8
3589
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Matt Martz <matt@sivel.net> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ module: nexmo short_description: Send a SMS via nexmo description: - Send a SMS message via nexmo version_added: 1.6 author: "Matt Martz (@sivel)" options: api_key: description: - Nexmo API Key required: true api_secret: description: - Nexmo API Secret required: true src: description: - Nexmo Number to send from required: true dest: description: - Phone number(s) to send SMS message to required: true msg: description: - Message to text to send. Messages longer than 160 characters will be split into multiple messages required: true validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: - 'yes' - 'no' """ EXAMPLES = """ - name: Send notification message via Nexmo nexmo: api_key: 640c8a53 api_secret: 0ce239a6 src: 12345678901 dest: - 10987654321 - 16789012345 msg: '{{ inventory_hostname }} completed' delegate_to: localhost """ import json from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import fetch_url, url_argument_spec NEXMO_API = 'https://rest.nexmo.com/sms/json' def send_msg(module): failed = list() responses = dict() msg = { 'api_key': module.params.get('api_key'), 'api_secret': module.params.get('api_secret'), 'from': module.params.get('src'), 'text': module.params.get('msg') } for number in module.params.get('dest'): msg['to'] = number url = "%s?%s" % (NEXMO_API, urlencode(msg)) headers = dict(Accept='application/json') response, info = fetch_url(module, url, headers=headers) if info['status'] != 200: failed.append(number) responses[number] = dict(failed=True) try: responses[number] = json.load(response) except: failed.append(number) responses[number] = dict(failed=True) else: for message in responses[number]['messages']: if int(message['status']) != 0: failed.append(number) responses[number] = dict(failed=True, **responses[number]) if failed: msg = 'One or messages failed to send' else: msg = '' module.exit_json(failed=bool(failed), msg=msg, changed=False, responses=responses) def main(): argument_spec = url_argument_spec() argument_spec.update( dict( api_key=dict(required=True, no_log=True), api_secret=dict(required=True, no_log=True), src=dict(required=True, type='int'), dest=dict(required=True, type='list'), msg=dict(required=True), ), ) module = AnsibleModule( argument_spec=argument_spec ) send_msg(module) if __name__ == '__main__': main()
gpl-3.0
mpasternak/pyglet-fix-issue-552
examples/tablet.py
29
1403
#!/usr/bin/python # $Id:$ import pyglet window = pyglet.window.Window() tablets = pyglet.input.get_tablets() canvases = [] if tablets: print 'Tablets:' for i, tablet in enumerate(tablets): print ' (%d) %s' % (i + 1, tablet.name) print 'Press number key to open corresponding tablet device.' else: print 'No tablets found.' @window.event def on_text(text): try: index = int(text) - 1 except ValueError: return if not (0 <= index < len(tablets)): return name = tablets[i].name try: canvas = tablets[i].open(window) except pyglet.input.DeviceException: print 'Failed to open tablet %d on window' % index print 'Opened %s' % name @canvas.event def on_enter(cursor): print '%s: on_enter(%r)' % (name, cursor) @canvas.event def on_leave(cursor): print '%s: on_leave(%r)' % (name, cursor) @canvas.event def on_motion(cursor, x, y, pressure): print '%s: on_motion(%r, %r, %r, %r)' % (name, cursor, x, y, pressure) @window.event def on_mouse_press(x, y, button, modifiers): print 'on_mouse_press(%r, %r, %r, %r' % (x, y, button, modifiers) @window.event def on_mouse_release(x, y, button, modifiers): print 'on_mouse_release(%r, %r, %r, %r' % (x, y, button, modifiers) pyglet.app.run()
bsd-3-clause
Kongsea/tensorflow
tensorflow/tools/docs/generate_1_0.py
55
3183
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Generate docs for the TensorFlow Python API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys import tensorflow as tf from tensorflow.python import debug as tf_debug from tensorflow.python.util import tf_inspect from tensorflow.tools.docs import generate_lib if __name__ == '__main__': doc_generator = generate_lib.DocGenerator() doc_generator.add_output_dir_argument() doc_generator.add_src_dir_argument() # This doc generator works on the TensorFlow codebase. Since this script lives # at tensorflow/tools/docs, and all code is defined somewhere inside # tensorflow/, we can compute the base directory (two levels up), which is # valid unless we're trying to apply this to a different code base, or are # moving the script around. script_dir = os.path.dirname(tf_inspect.getfile(tf_inspect.currentframe())) default_base_dir = os.path.join(script_dir, '..', '..') doc_generator.add_base_dir_argument(default_base_dir) flags = doc_generator.parse_known_args() # tf_debug is not imported with tf, it's a separate module altogether doc_generator.set_py_modules([('tf', tf), ('tfdbg', tf_debug)]) doc_generator.set_do_not_descend_map({ 'tf': ['cli', 'lib', 'wrappers'], 'tf.contrib': [ 'compiler', 'factorization', 'grid_rnn', 'labeled_tensor', 'ndlstm', 'quantization', 'session_bundle', 'slim', 'solvers', 'specs', 'tensor_forest', 'tensorboard', 'testing', 'training', 'tfprof', ], 'tf.contrib.bayesflow': [ 'entropy', 'monte_carlo', 'special_math', 'stochastic_gradient_estimators', 'stochastic_graph', 'stochastic_tensor', 'stochastic_variables', 'variational_inference' ], 'tf.contrib.distributions': ['bijector'], 'tf.contrib.ffmpeg': ['ffmpeg_ops'], 'tf.contrib.graph_editor': [ 'edit', 'match', 'reroute', 'subgraph', 'transform', 'select', 'util' ], 'tf.contrib.layers': ['feature_column', 'summaries'], 'tf.contrib.learn': [ 'datasets', 'head', 'graph_actions', 'io', 'models', 'monitors', 'ops', 'preprocessing', 'utils', ], 'tf.contrib.util': ['loader'], }) sys.exit(doc_generator.build(flags))
apache-2.0
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_http_cookiejar.py
3
69589
"""Tests for http/cookiejar.py.""" import os import re import test.support import time import unittest import urllib.request from http.cookiejar import (time2isoz, http2time, time2netscape, parse_ns_headers, join_header_words, split_header_words, Cookie, CookieJar, DefaultCookiePolicy, LWPCookieJar, MozillaCookieJar, LoadError, lwp_cookie_str, DEFAULT_HTTP_PORT, escape_path, reach, is_HDN, domain_match, user_domain_match, request_path, request_port, request_host) class DateTimeTests(unittest.TestCase): def test_time2isoz(self): base = 1019227000 day = 24*3600 self.assertEqual(time2isoz(base), "2002-04-19 14:36:40Z") self.assertEqual(time2isoz(base+day), "2002-04-20 14:36:40Z") self.assertEqual(time2isoz(base+2*day), "2002-04-21 14:36:40Z") self.assertEqual(time2isoz(base+3*day), "2002-04-22 14:36:40Z") az = time2isoz() bz = time2isoz(500000) for text in (az, bz): self.assertTrue(re.search(r"^\d{4}-\d\d-\d\d \d\d:\d\d:\d\dZ$", text), "bad time2isoz format: %s %s" % (az, bz)) def test_http2time(self): def parse_date(text): return time.gmtime(http2time(text))[:6] self.assertEqual(parse_date("01 Jan 2001"), (2001, 1, 1, 0, 0, 0.0)) # this test will break around year 2070 self.assertEqual(parse_date("03-Feb-20"), (2020, 2, 3, 0, 0, 0.0)) # this test will break around year 2048 self.assertEqual(parse_date("03-Feb-98"), (1998, 2, 3, 0, 0, 0.0)) def test_http2time_formats(self): # test http2time for supported dates. Test cases with 2 digit year # will probably break in year 2044. tests = [ 'Thu, 03 Feb 1994 00:00:00 GMT', # proposed new HTTP format 'Thursday, 03-Feb-94 00:00:00 GMT', # old rfc850 HTTP format 'Thursday, 03-Feb-1994 00:00:00 GMT', # broken rfc850 HTTP format '03 Feb 1994 00:00:00 GMT', # HTTP format (no weekday) '03-Feb-94 00:00:00 GMT', # old rfc850 (no weekday) '03-Feb-1994 00:00:00 GMT', # broken rfc850 (no weekday) '03-Feb-1994 00:00 GMT', # broken rfc850 (no weekday, no seconds) '03-Feb-1994 00:00', # broken rfc850 (no weekday, no seconds, no tz) '03-Feb-94', # old rfc850 HTTP format (no weekday, no time) '03-Feb-1994', # broken rfc850 HTTP format (no weekday, no time) '03 Feb 1994', # proposed new HTTP format (no weekday, no time) # A few tests with extra space at various places ' 03 Feb 1994 0:00 ', ' 03-Feb-1994 ', ] test_t = 760233600 # assume broken POSIX counting of seconds result = time2isoz(test_t) expected = "1994-02-03 00:00:00Z" self.assertEqual(result, expected, "%s => '%s' (%s)" % (test_t, result, expected)) for s in tests: t = http2time(s) t2 = http2time(s.lower()) t3 = http2time(s.upper()) self.assertTrue(t == t2 == t3 == test_t, "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) def test_http2time_garbage(self): for test in [ '', 'Garbage', 'Mandag 16. September 1996', '01-00-1980', '01-13-1980', '00-01-1980', '32-01-1980', '01-01-1980 25:00:00', '01-01-1980 00:61:00', '01-01-1980 00:00:62', ]: self.assertTrue(http2time(test) is None, "http2time(%s) is not None\n" "http2time(test) %s" % (test, http2time(test)) ) class HeaderTests(unittest.TestCase): def test_parse_ns_headers(self): # quotes should be stripped expected = [[('foo', 'bar'), ('expires', 2209069412), ('version', '0')]] for hdr in [ 'foo=bar; expires=01 Jan 2040 22:23:32 GMT', 'foo=bar; expires="01 Jan 2040 22:23:32 GMT"', ]: self.assertEqual(parse_ns_headers([hdr]), expected) def test_parse_ns_headers_version(self): # quotes should be stripped expected = [[('foo', 'bar'), ('version', '1')]] for hdr in [ 'foo=bar; version="1"', 'foo=bar; Version="1"', ]: self.assertEqual(parse_ns_headers([hdr]), expected) def test_parse_ns_headers_special_names(self): # names such as 'expires' are not special in first name=value pair # of Set-Cookie: header # Cookie with name 'expires' hdr = 'expires=01 Jan 2040 22:23:32 GMT' expected = [[("expires", "01 Jan 2040 22:23:32 GMT"), ("version", "0")]] self.assertEqual(parse_ns_headers([hdr]), expected) def test_join_header_words(self): joined = join_header_words([[("foo", None), ("bar", "baz")]]) self.assertEqual(joined, "foo; bar=baz") self.assertEqual(join_header_words([[]]), "") def test_split_header_words(self): tests = [ ("foo", [[("foo", None)]]), ("foo=bar", [[("foo", "bar")]]), (" foo ", [[("foo", None)]]), (" foo= ", [[("foo", "")]]), (" foo=", [[("foo", "")]]), (" foo= ; ", [[("foo", "")]]), (" foo= ; bar= baz ", [[("foo", ""), ("bar", "baz")]]), ("foo=bar bar=baz", [[("foo", "bar"), ("bar", "baz")]]), # doesn't really matter if this next fails, but it works ATM ("foo= bar=baz", [[("foo", "bar=baz")]]), ("foo=bar;bar=baz", [[("foo", "bar"), ("bar", "baz")]]), ('foo bar baz', [[("foo", None), ("bar", None), ("baz", None)]]), ("a, b, c", [[("a", None)], [("b", None)], [("c", None)]]), (r'foo; bar=baz, spam=, foo="\,\;\"", bar= ', [[("foo", None), ("bar", "baz")], [("spam", "")], [("foo", ',;"')], [("bar", "")]]), ] for arg, expect in tests: try: result = split_header_words([arg]) except: import traceback, io f = io.StringIO() traceback.print_exc(None, f) result = "(error -- traceback follows)\n\n%s" % f.getvalue() self.assertEqual(result, expect, """ When parsing: '%s' Expected: '%s' Got: '%s' """ % (arg, expect, result)) def test_roundtrip(self): tests = [ ("foo", "foo"), ("foo=bar", "foo=bar"), (" foo ", "foo"), ("foo=", 'foo=""'), ("foo=bar bar=baz", "foo=bar; bar=baz"), ("foo=bar;bar=baz", "foo=bar; bar=baz"), ('foo bar baz', "foo; bar; baz"), (r'foo="\"" bar="\\"', r'foo="\""; bar="\\"'), ('foo,,,bar', 'foo, bar'), ('foo=bar,bar=baz', 'foo=bar, bar=baz'), ('text/html; charset=iso-8859-1', 'text/html; charset="iso-8859-1"'), ('foo="bar"; port="80,81"; discard, bar=baz', 'foo=bar; port="80,81"; discard, bar=baz'), (r'Basic realm="\"foo\\\\bar\""', r'Basic; realm="\"foo\\\\bar\""') ] for arg, expect in tests: input = split_header_words([arg]) res = join_header_words(input) self.assertEqual(res, expect, """ When parsing: '%s' Expected: '%s' Got: '%s' Input was: '%s' """ % (arg, expect, res, input)) class FakeResponse: def __init__(self, headers=[], url=None): """ headers: list of RFC822-style 'Key: value' strings """ import email self._headers = email.message_from_string("\n".join(headers)) self._url = url def info(self): return self._headers def interact_2965(cookiejar, url, *set_cookie_hdrs): return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie2") def interact_netscape(cookiejar, url, *set_cookie_hdrs): return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie") def _interact(cookiejar, url, set_cookie_hdrs, hdr_name): """Perform a single request / response cycle, returning Cookie: header.""" req = urllib.request.Request(url) cookiejar.add_cookie_header(req) cookie_hdr = req.get_header("Cookie", "") headers = [] for hdr in set_cookie_hdrs: headers.append("%s: %s" % (hdr_name, hdr)) res = FakeResponse(headers, url) cookiejar.extract_cookies(res, req) return cookie_hdr class FileCookieJarTests(unittest.TestCase): def test_lwp_valueless_cookie(self): # cookies with no value should be saved and loaded consistently filename = test.support.TESTFN c = LWPCookieJar() interact_netscape(c, "http://www.acme.com/", 'boo') self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None) try: c.save(filename, ignore_discard=True) c = LWPCookieJar() c.load(filename, ignore_discard=True) finally: try: os.unlink(filename) except OSError: pass self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None) def test_bad_magic(self): # IOErrors (eg. file doesn't exist) are allowed to propagate filename = test.support.TESTFN for cookiejar_class in LWPCookieJar, MozillaCookieJar: c = cookiejar_class() try: c.load(filename="for this test to work, a file with this " "filename should not exist") except IOError as exc: # exactly IOError, not LoadError self.assertEqual(exc.__class__, IOError) else: self.fail("expected IOError for invalid filename") # Invalid contents of cookies file (eg. bad magic string) # causes a LoadError. try: with open(filename, "w") as f: f.write("oops\n") for cookiejar_class in LWPCookieJar, MozillaCookieJar: c = cookiejar_class() self.assertRaises(LoadError, c.load, filename) finally: try: os.unlink(filename) except OSError: pass class CookieTests(unittest.TestCase): # XXX # Get rid of string comparisons where not actually testing str / repr. # .clear() etc. # IP addresses like 50 (single number, no dot) and domain-matching # functions (and is_HDN)? See draft RFC 2965 errata. # Strictness switches # is_third_party() # unverifiability / third-party blocking # Netscape cookies work the same as RFC 2965 with regard to port. # Set-Cookie with negative max age. # If turn RFC 2965 handling off, Set-Cookie2 cookies should not clobber # Set-Cookie cookies. # Cookie2 should be sent if *any* cookies are not V1 (ie. V0 OR V2 etc.). # Cookies (V1 and V0) with no expiry date should be set to be discarded. # RFC 2965 Quoting: # Should accept unquoted cookie-attribute values? check errata draft. # Which are required on the way in and out? # Should always return quoted cookie-attribute values? # Proper testing of when RFC 2965 clobbers Netscape (waiting for errata). # Path-match on return (same for V0 and V1). # RFC 2965 acceptance and returning rules # Set-Cookie2 without version attribute is rejected. # Netscape peculiarities list from Ronald Tschalar. # The first two still need tests, the rest are covered. ## - Quoting: only quotes around the expires value are recognized as such ## (and yes, some folks quote the expires value); quotes around any other ## value are treated as part of the value. ## - White space: white space around names and values is ignored ## - Default path: if no path parameter is given, the path defaults to the ## path in the request-uri up to, but not including, the last '/'. Note ## that this is entirely different from what the spec says. ## - Commas and other delimiters: Netscape just parses until the next ';'. ## This means it will allow commas etc inside values (and yes, both ## commas and equals are commonly appear in the cookie value). This also ## means that if you fold multiple Set-Cookie header fields into one, ## comma-separated list, it'll be a headache to parse (at least my head ## starts hurting everytime I think of that code). ## - Expires: You'll get all sorts of date formats in the expires, ## including emtpy expires attributes ("expires="). Be as flexible as you ## can, and certainly don't expect the weekday to be there; if you can't ## parse it, just ignore it and pretend it's a session cookie. ## - Domain-matching: Netscape uses the 2-dot rule for _all_ domains, not ## just the 7 special TLD's listed in their spec. And folks rely on ## that... def test_domain_return_ok(self): # test optimization: .domain_return_ok() should filter out most # domains in the CookieJar before we try to access them (because that # may require disk access -- in particular, with MSIECookieJar) # This is only a rough check for performance reasons, so it's not too # critical as long as it's sufficiently liberal. pol = DefaultCookiePolicy() for url, domain, ok in [ ("http://foo.bar.com/", "blah.com", False), ("http://foo.bar.com/", "rhubarb.blah.com", False), ("http://foo.bar.com/", "rhubarb.foo.bar.com", False), ("http://foo.bar.com/", ".foo.bar.com", True), ("http://foo.bar.com/", "foo.bar.com", True), ("http://foo.bar.com/", ".bar.com", True), ("http://foo.bar.com/", "com", True), ("http://foo.com/", "rhubarb.foo.com", False), ("http://foo.com/", ".foo.com", True), ("http://foo.com/", "foo.com", True), ("http://foo.com/", "com", True), ("http://foo/", "rhubarb.foo", False), ("http://foo/", ".foo", True), ("http://foo/", "foo", True), ("http://foo/", "foo.local", True), ("http://foo/", ".local", True), ]: request = urllib.request.Request(url) r = pol.domain_return_ok(domain, request) if ok: self.assertTrue(r) else: self.assertTrue(not r) def test_missing_value(self): # missing = sign in Cookie: header is regarded by Mozilla as a missing # name, and by http.cookiejar as a missing value filename = test.support.TESTFN c = MozillaCookieJar(filename) interact_netscape(c, "http://www.acme.com/", 'eggs') interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/') cookie = c._cookies["www.acme.com"]["/"]["eggs"] self.assertTrue(cookie.value is None) self.assertEqual(cookie.name, "eggs") cookie = c._cookies["www.acme.com"]['/foo/']['"spam"'] self.assertTrue(cookie.value is None) self.assertEqual(cookie.name, '"spam"') self.assertEqual(lwp_cookie_str(cookie), ( r'"spam"; path="/foo/"; domain="www.acme.com"; ' 'path_spec; discard; version=0')) old_str = repr(c) c.save(ignore_expires=True, ignore_discard=True) try: c = MozillaCookieJar(filename) c.revert(ignore_expires=True, ignore_discard=True) finally: os.unlink(c.filename) # cookies unchanged apart from lost info re. whether path was specified self.assertEqual( repr(c), re.sub("path_specified=%s" % True, "path_specified=%s" % False, old_str) ) self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"), '"spam"; eggs') def test_rfc2109_handling(self): # RFC 2109 cookies are handled as RFC 2965 or Netscape cookies, # dependent on policy settings for rfc2109_as_netscape, rfc2965, version in [ # default according to rfc2965 if not explicitly specified (None, False, 0), (None, True, 1), # explicit rfc2109_as_netscape (False, False, None), # version None here means no cookie stored (False, True, 1), (True, False, 0), (True, True, 0), ]: policy = DefaultCookiePolicy( rfc2109_as_netscape=rfc2109_as_netscape, rfc2965=rfc2965) c = CookieJar(policy) interact_netscape(c, "http://www.example.com/", "ni=ni; Version=1") try: cookie = c._cookies["www.example.com"]["/"]["ni"] except KeyError: self.assertTrue(version is None) # didn't expect a stored cookie else: self.assertEqual(cookie.version, version) # 2965 cookies are unaffected interact_2965(c, "http://www.example.com/", "foo=bar; Version=1") if rfc2965: cookie2965 = c._cookies["www.example.com"]["/"]["foo"] self.assertEqual(cookie2965.version, 1) def test_ns_parser(self): c = CookieJar() interact_netscape(c, "http://www.acme.com/", 'spam=eggs; DoMain=.acme.com; port; blArgh="feep"') interact_netscape(c, "http://www.acme.com/", 'ni=ni; port=80,8080') interact_netscape(c, "http://www.acme.com:80/", 'nini=ni') interact_netscape(c, "http://www.acme.com:80/", 'foo=bar; expires=') interact_netscape(c, "http://www.acme.com:80/", 'spam=eggs; ' 'expires="Foo Bar 25 33:22:11 3022"') cookie = c._cookies[".acme.com"]["/"]["spam"] self.assertEqual(cookie.domain, ".acme.com") self.assertTrue(cookie.domain_specified) self.assertEqual(cookie.port, DEFAULT_HTTP_PORT) self.assertTrue(not cookie.port_specified) # case is preserved self.assertTrue(cookie.has_nonstandard_attr("blArgh") and not cookie.has_nonstandard_attr("blargh")) cookie = c._cookies["www.acme.com"]["/"]["ni"] self.assertEqual(cookie.domain, "www.acme.com") self.assertTrue(not cookie.domain_specified) self.assertEqual(cookie.port, "80,8080") self.assertTrue(cookie.port_specified) cookie = c._cookies["www.acme.com"]["/"]["nini"] self.assertTrue(cookie.port is None) self.assertTrue(not cookie.port_specified) # invalid expires should not cause cookie to be dropped foo = c._cookies["www.acme.com"]["/"]["foo"] spam = c._cookies["www.acme.com"]["/"]["foo"] self.assertTrue(foo.expires is None) self.assertTrue(spam.expires is None) def test_ns_parser_special_names(self): # names such as 'expires' are not special in first name=value pair # of Set-Cookie: header c = CookieJar() interact_netscape(c, "http://www.acme.com/", 'expires=eggs') interact_netscape(c, "http://www.acme.com/", 'version=eggs; spam=eggs') cookies = c._cookies["www.acme.com"]["/"] self.assertIn('expires', cookies) self.assertIn('version', cookies) def test_expires(self): # if expires is in future, keep cookie... c = CookieJar() future = time2netscape(time.time()+3600) interact_netscape(c, "http://www.acme.com/", 'spam="bar"; expires=%s' % future) self.assertEqual(len(c), 1) now = time2netscape(time.time()-1) # ... and if in past or present, discard it interact_netscape(c, "http://www.acme.com/", 'foo="eggs"; expires=%s' % now) h = interact_netscape(c, "http://www.acme.com/") self.assertEqual(len(c), 1) self.assertIn('spam="bar"', h) self.assertNotIn("foo", h) # max-age takes precedence over expires, and zero max-age is request to # delete both new cookie and any old matching cookie interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; expires=%s' % future) interact_netscape(c, "http://www.acme.com/", 'bar="bar"; expires=%s' % future) self.assertEqual(len(c), 3) interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; ' 'expires=%s; max-age=0' % future) interact_netscape(c, "http://www.acme.com/", 'bar="bar"; ' 'max-age=0; expires=%s' % future) h = interact_netscape(c, "http://www.acme.com/") self.assertEqual(len(c), 1) # test expiry at end of session for cookies with no expires attribute interact_netscape(c, "http://www.rhubarb.net/", 'whum="fizz"') self.assertEqual(len(c), 2) c.clear_session_cookies() self.assertEqual(len(c), 1) self.assertIn('spam="bar"', h) # XXX RFC 2965 expiry rules (some apply to V0 too) def test_default_path(self): # RFC 2965 pol = DefaultCookiePolicy(rfc2965=True) c = CookieJar(pol) interact_2965(c, "http://www.acme.com/", 'spam="bar"; Version="1"') self.assertIn("/", c._cookies["www.acme.com"]) c = CookieJar(pol) interact_2965(c, "http://www.acme.com/blah", 'eggs="bar"; Version="1"') self.assertIn("/", c._cookies["www.acme.com"]) c = CookieJar(pol) interact_2965(c, "http://www.acme.com/blah/rhubarb", 'eggs="bar"; Version="1"') self.assertIn("/blah/", c._cookies["www.acme.com"]) c = CookieJar(pol) interact_2965(c, "http://www.acme.com/blah/rhubarb/", 'eggs="bar"; Version="1"') self.assertIn("/blah/rhubarb/", c._cookies["www.acme.com"]) # Netscape c = CookieJar() interact_netscape(c, "http://www.acme.com/", 'spam="bar"') self.assertIn("/", c._cookies["www.acme.com"]) c = CookieJar() interact_netscape(c, "http://www.acme.com/blah", 'eggs="bar"') self.assertIn("/", c._cookies["www.acme.com"]) c = CookieJar() interact_netscape(c, "http://www.acme.com/blah/rhubarb", 'eggs="bar"') self.assertIn("/blah", c._cookies["www.acme.com"]) c = CookieJar() interact_netscape(c, "http://www.acme.com/blah/rhubarb/", 'eggs="bar"') self.assertIn("/blah/rhubarb", c._cookies["www.acme.com"]) def test_default_path_with_query(self): cj = CookieJar() uri = "http://example.com/?spam/eggs" value = 'eggs="bar"' interact_netscape(cj, uri, value) # Default path does not include query, so is "/", not "/?spam". self.assertIn("/", cj._cookies["example.com"]) # Cookie is sent back to the same URI. self.assertEqual(interact_netscape(cj, uri), value) def test_escape_path(self): cases = [ # quoted safe ("/foo%2f/bar", "/foo%2F/bar"), ("/foo%2F/bar", "/foo%2F/bar"), # quoted % ("/foo%%/bar", "/foo%%/bar"), # quoted unsafe ("/fo%19o/bar", "/fo%19o/bar"), ("/fo%7do/bar", "/fo%7Do/bar"), # unquoted safe ("/foo/bar&", "/foo/bar&"), ("/foo//bar", "/foo//bar"), ("\176/foo/bar", "\176/foo/bar"), # unquoted unsafe ("/foo\031/bar", "/foo%19/bar"), ("/\175foo/bar", "/%7Dfoo/bar"), # unicode, latin-1 range ("/foo/bar\u00fc", "/foo/bar%C3%BC"), # UTF-8 encoded # unicode ("/foo/bar\uabcd", "/foo/bar%EA%AF%8D"), # UTF-8 encoded ] for arg, result in cases: self.assertEqual(escape_path(arg), result) def test_request_path(self): # with parameters req = urllib.request.Request( "http://www.example.com/rheum/rhaponticum;" "foo=bar;sing=song?apples=pears&spam=eggs#ni") self.assertEqual(request_path(req), "/rheum/rhaponticum;foo=bar;sing=song") # without parameters req = urllib.request.Request( "http://www.example.com/rheum/rhaponticum?" "apples=pears&spam=eggs#ni") self.assertEqual(request_path(req), "/rheum/rhaponticum") # missing final slash req = urllib.request.Request("http://www.example.com") self.assertEqual(request_path(req), "/") def test_request_port(self): req = urllib.request.Request("http://www.acme.com:1234/", headers={"Host": "www.acme.com:4321"}) self.assertEqual(request_port(req), "1234") req = urllib.request.Request("http://www.acme.com/", headers={"Host": "www.acme.com:4321"}) self.assertEqual(request_port(req), DEFAULT_HTTP_PORT) def test_request_host(self): # this request is illegal (RFC2616, 14.2.3) req = urllib.request.Request("http://1.1.1.1/", headers={"Host": "www.acme.com:80"}) # libwww-perl wants this response, but that seems wrong (RFC 2616, # section 5.2, point 1., and RFC 2965 section 1, paragraph 3) #self.assertEqual(request_host(req), "www.acme.com") self.assertEqual(request_host(req), "1.1.1.1") req = urllib.request.Request("http://www.acme.com/", headers={"Host": "irrelevant.com"}) self.assertEqual(request_host(req), "www.acme.com") # port shouldn't be in request-host req = urllib.request.Request("http://www.acme.com:2345/resource.html", headers={"Host": "www.acme.com:5432"}) self.assertEqual(request_host(req), "www.acme.com") def test_is_HDN(self): self.assertTrue(is_HDN("foo.bar.com")) self.assertTrue(is_HDN("1foo2.3bar4.5com")) self.assertTrue(not is_HDN("192.168.1.1")) self.assertTrue(not is_HDN("")) self.assertTrue(not is_HDN(".")) self.assertTrue(not is_HDN(".foo.bar.com")) self.assertTrue(not is_HDN("..foo")) self.assertTrue(not is_HDN("foo.")) def test_reach(self): self.assertEqual(reach("www.acme.com"), ".acme.com") self.assertEqual(reach("acme.com"), "acme.com") self.assertEqual(reach("acme.local"), ".local") self.assertEqual(reach(".local"), ".local") self.assertEqual(reach(".com"), ".com") self.assertEqual(reach("."), ".") self.assertEqual(reach(""), "") self.assertEqual(reach("192.168.0.1"), "192.168.0.1") def test_domain_match(self): self.assertTrue(domain_match("192.168.1.1", "192.168.1.1")) self.assertTrue(not domain_match("192.168.1.1", ".168.1.1")) self.assertTrue(domain_match("x.y.com", "x.Y.com")) self.assertTrue(domain_match("x.y.com", ".Y.com")) self.assertTrue(not domain_match("x.y.com", "Y.com")) self.assertTrue(domain_match("a.b.c.com", ".c.com")) self.assertTrue(not domain_match(".c.com", "a.b.c.com")) self.assertTrue(domain_match("example.local", ".local")) self.assertTrue(not domain_match("blah.blah", "")) self.assertTrue(not domain_match("", ".rhubarb.rhubarb")) self.assertTrue(domain_match("", "")) self.assertTrue(user_domain_match("acme.com", "acme.com")) self.assertTrue(not user_domain_match("acme.com", ".acme.com")) self.assertTrue(user_domain_match("rhubarb.acme.com", ".acme.com")) self.assertTrue(user_domain_match("www.rhubarb.acme.com", ".acme.com")) self.assertTrue(user_domain_match("x.y.com", "x.Y.com")) self.assertTrue(user_domain_match("x.y.com", ".Y.com")) self.assertTrue(not user_domain_match("x.y.com", "Y.com")) self.assertTrue(user_domain_match("y.com", "Y.com")) self.assertTrue(not user_domain_match(".y.com", "Y.com")) self.assertTrue(user_domain_match(".y.com", ".Y.com")) self.assertTrue(user_domain_match("x.y.com", ".com")) self.assertTrue(not user_domain_match("x.y.com", "com")) self.assertTrue(not user_domain_match("x.y.com", "m")) self.assertTrue(not user_domain_match("x.y.com", ".m")) self.assertTrue(not user_domain_match("x.y.com", "")) self.assertTrue(not user_domain_match("x.y.com", ".")) self.assertTrue(user_domain_match("192.168.1.1", "192.168.1.1")) # not both HDNs, so must string-compare equal to match self.assertTrue(not user_domain_match("192.168.1.1", ".168.1.1")) self.assertTrue(not user_domain_match("192.168.1.1", ".")) # empty string is a special case self.assertTrue(not user_domain_match("192.168.1.1", "")) def test_wrong_domain(self): # Cookies whose effective request-host name does not domain-match the # domain are rejected. # XXX far from complete c = CookieJar() interact_2965(c, "http://www.nasty.com/", 'foo=bar; domain=friendly.org; Version="1"') self.assertEqual(len(c), 0) def test_strict_domain(self): # Cookies whose domain is a country-code tld like .co.uk should # not be set if CookiePolicy.strict_domain is true. cp = DefaultCookiePolicy(strict_domain=True) cj = CookieJar(policy=cp) interact_netscape(cj, "http://example.co.uk/", 'no=problemo') interact_netscape(cj, "http://example.co.uk/", 'okey=dokey; Domain=.example.co.uk') self.assertEqual(len(cj), 2) for pseudo_tld in [".co.uk", ".org.za", ".tx.us", ".name.us"]: interact_netscape(cj, "http://example.%s/" % pseudo_tld, 'spam=eggs; Domain=.co.uk') self.assertEqual(len(cj), 2) def test_two_component_domain_ns(self): # Netscape: .www.bar.com, www.bar.com, .bar.com, bar.com, no domain # should all get accepted, as should .acme.com, acme.com and no domain # for 2-component domains like acme.com. c = CookieJar() # two-component V0 domain is OK interact_netscape(c, "http://foo.net/", 'ns=bar') self.assertEqual(len(c), 1) self.assertEqual(c._cookies["foo.net"]["/"]["ns"].value, "bar") self.assertEqual(interact_netscape(c, "http://foo.net/"), "ns=bar") # *will* be returned to any other domain (unlike RFC 2965)... self.assertEqual(interact_netscape(c, "http://www.foo.net/"), "ns=bar") # ...unless requested otherwise pol = DefaultCookiePolicy( strict_ns_domain=DefaultCookiePolicy.DomainStrictNonDomain) c.set_policy(pol) self.assertEqual(interact_netscape(c, "http://www.foo.net/"), "") # unlike RFC 2965, even explicit two-component domain is OK, # because .foo.net matches foo.net interact_netscape(c, "http://foo.net/foo/", 'spam1=eggs; domain=foo.net') # even if starts with a dot -- in NS rules, .foo.net matches foo.net! interact_netscape(c, "http://foo.net/foo/bar/", 'spam2=eggs; domain=.foo.net') self.assertEqual(len(c), 3) self.assertEqual(c._cookies[".foo.net"]["/foo"]["spam1"].value, "eggs") self.assertEqual(c._cookies[".foo.net"]["/foo/bar"]["spam2"].value, "eggs") self.assertEqual(interact_netscape(c, "http://foo.net/foo/bar/"), "spam2=eggs; spam1=eggs; ns=bar") # top-level domain is too general interact_netscape(c, "http://foo.net/", 'nini="ni"; domain=.net') self.assertEqual(len(c), 3) ## # Netscape protocol doesn't allow non-special top level domains (such ## # as co.uk) in the domain attribute unless there are at least three ## # dots in it. # Oh yes it does! Real implementations don't check this, and real # cookies (of course) rely on that behaviour. interact_netscape(c, "http://foo.co.uk", 'nasty=trick; domain=.co.uk') ## self.assertEqual(len(c), 2) self.assertEqual(len(c), 4) def test_two_component_domain_rfc2965(self): pol = DefaultCookiePolicy(rfc2965=True) c = CookieJar(pol) # two-component V1 domain is OK interact_2965(c, "http://foo.net/", 'foo=bar; Version="1"') self.assertEqual(len(c), 1) self.assertEqual(c._cookies["foo.net"]["/"]["foo"].value, "bar") self.assertEqual(interact_2965(c, "http://foo.net/"), "$Version=1; foo=bar") # won't be returned to any other domain (because domain was implied) self.assertEqual(interact_2965(c, "http://www.foo.net/"), "") # unless domain is given explicitly, because then it must be # rewritten to start with a dot: foo.net --> .foo.net, which does # not domain-match foo.net interact_2965(c, "http://foo.net/foo", 'spam=eggs; domain=foo.net; path=/foo; Version="1"') self.assertEqual(len(c), 1) self.assertEqual(interact_2965(c, "http://foo.net/foo"), "$Version=1; foo=bar") # explicit foo.net from three-component domain www.foo.net *does* get # set, because .foo.net domain-matches .foo.net interact_2965(c, "http://www.foo.net/foo/", 'spam=eggs; domain=foo.net; Version="1"') self.assertEqual(c._cookies[".foo.net"]["/foo/"]["spam"].value, "eggs") self.assertEqual(len(c), 2) self.assertEqual(interact_2965(c, "http://foo.net/foo/"), "$Version=1; foo=bar") self.assertEqual(interact_2965(c, "http://www.foo.net/foo/"), '$Version=1; spam=eggs; $Domain="foo.net"') # top-level domain is too general interact_2965(c, "http://foo.net/", 'ni="ni"; domain=".net"; Version="1"') self.assertEqual(len(c), 2) # RFC 2965 doesn't require blocking this interact_2965(c, "http://foo.co.uk/", 'nasty=trick; domain=.co.uk; Version="1"') self.assertEqual(len(c), 3) def test_domain_allow(self): c = CookieJar(policy=DefaultCookiePolicy( blocked_domains=["acme.com"], allowed_domains=["www.acme.com"])) req = urllib.request.Request("http://acme.com/") headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"] res = FakeResponse(headers, "http://acme.com/") c.extract_cookies(res, req) self.assertEqual(len(c), 0) req = urllib.request.Request("http://www.acme.com/") res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) self.assertEqual(len(c), 1) req = urllib.request.Request("http://www.coyote.com/") res = FakeResponse(headers, "http://www.coyote.com/") c.extract_cookies(res, req) self.assertEqual(len(c), 1) # set a cookie with non-allowed domain... req = urllib.request.Request("http://www.coyote.com/") res = FakeResponse(headers, "http://www.coyote.com/") cookies = c.make_cookies(res, req) c.set_cookie(cookies[0]) self.assertEqual(len(c), 2) # ... and check is doesn't get returned c.add_cookie_header(req) self.assertTrue(not req.has_header("Cookie")) def test_domain_block(self): pol = DefaultCookiePolicy( rfc2965=True, blocked_domains=[".acme.com"]) c = CookieJar(policy=pol) headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"] req = urllib.request.Request("http://www.acme.com/") res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) self.assertEqual(len(c), 0) p = pol.set_blocked_domains(["acme.com"]) c.extract_cookies(res, req) self.assertEqual(len(c), 1) c.clear() req = urllib.request.Request("http://www.roadrunner.net/") res = FakeResponse(headers, "http://www.roadrunner.net/") c.extract_cookies(res, req) self.assertEqual(len(c), 1) req = urllib.request.Request("http://www.roadrunner.net/") c.add_cookie_header(req) self.assertTrue((req.has_header("Cookie") and req.has_header("Cookie2"))) c.clear() pol.set_blocked_domains([".acme.com"]) c.extract_cookies(res, req) self.assertEqual(len(c), 1) # set a cookie with blocked domain... req = urllib.request.Request("http://www.acme.com/") res = FakeResponse(headers, "http://www.acme.com/") cookies = c.make_cookies(res, req) c.set_cookie(cookies[0]) self.assertEqual(len(c), 2) # ... and check is doesn't get returned c.add_cookie_header(req) self.assertTrue(not req.has_header("Cookie")) def test_secure(self): for ns in True, False: for whitespace in " ", "": c = CookieJar() if ns: pol = DefaultCookiePolicy(rfc2965=False) int = interact_netscape vs = "" else: pol = DefaultCookiePolicy(rfc2965=True) int = interact_2965 vs = "; Version=1" c.set_policy(pol) url = "http://www.acme.com/" int(c, url, "foo1=bar%s%s" % (vs, whitespace)) int(c, url, "foo2=bar%s; secure%s" % (vs, whitespace)) self.assertTrue( not c._cookies["www.acme.com"]["/"]["foo1"].secure, "non-secure cookie registered secure") self.assertTrue( c._cookies["www.acme.com"]["/"]["foo2"].secure, "secure cookie registered non-secure") def test_quote_cookie_value(self): c = CookieJar(policy=DefaultCookiePolicy(rfc2965=True)) interact_2965(c, "http://www.acme.com/", r'foo=\b"a"r; Version=1') h = interact_2965(c, "http://www.acme.com/") self.assertEqual(h, r'$Version=1; foo=\\b\"a\"r') def test_missing_final_slash(self): # Missing slash from request URL's abs_path should be assumed present. url = "http://www.acme.com" c = CookieJar(DefaultCookiePolicy(rfc2965=True)) interact_2965(c, url, "foo=bar; Version=1") req = urllib.request.Request(url) self.assertEqual(len(c), 1) c.add_cookie_header(req) self.assertTrue(req.has_header("Cookie")) def test_domain_mirror(self): pol = DefaultCookiePolicy(rfc2965=True) c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, "spam=eggs; Version=1") h = interact_2965(c, url) self.assertNotIn("Domain", h, "absent domain returned with domain present") c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, 'spam=eggs; Version=1; Domain=.bar.com') h = interact_2965(c, url) self.assertIn('$Domain=".bar.com"', h, "domain not returned") c = CookieJar(pol) url = "http://foo.bar.com/" # note missing initial dot in Domain interact_2965(c, url, 'spam=eggs; Version=1; Domain=bar.com') h = interact_2965(c, url) self.assertIn('$Domain="bar.com"', h, "domain not returned") def test_path_mirror(self): pol = DefaultCookiePolicy(rfc2965=True) c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, "spam=eggs; Version=1") h = interact_2965(c, url) self.assertNotIn("Path", h, "absent path returned with path present") c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, 'spam=eggs; Version=1; Path=/') h = interact_2965(c, url) self.assertIn('$Path="/"', h, "path not returned") def test_port_mirror(self): pol = DefaultCookiePolicy(rfc2965=True) c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, "spam=eggs; Version=1") h = interact_2965(c, url) self.assertNotIn("Port", h, "absent port returned with port present") c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, "spam=eggs; Version=1; Port") h = interact_2965(c, url) self.assertTrue(re.search("\$Port([^=]|$)", h), "port with no value not returned with no value") c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, 'spam=eggs; Version=1; Port="80"') h = interact_2965(c, url) self.assertIn('$Port="80"', h, "port with single value not returned with single value") c = CookieJar(pol) url = "http://foo.bar.com/" interact_2965(c, url, 'spam=eggs; Version=1; Port="80,8080"') h = interact_2965(c, url) self.assertIn('$Port="80,8080"', h, "port with multiple values not returned with multiple " "values") def test_no_return_comment(self): c = CookieJar(DefaultCookiePolicy(rfc2965=True)) url = "http://foo.bar.com/" interact_2965(c, url, 'spam=eggs; Version=1; ' 'Comment="does anybody read these?"; ' 'CommentURL="http://foo.bar.net/comment.html"') h = interact_2965(c, url) self.assertTrue( "Comment" not in h, "Comment or CommentURL cookie-attributes returned to server") def test_Cookie_iterator(self): cs = CookieJar(DefaultCookiePolicy(rfc2965=True)) # add some random cookies interact_2965(cs, "http://blah.spam.org/", 'foo=eggs; Version=1; ' 'Comment="does anybody read these?"; ' 'CommentURL="http://foo.bar.net/comment.html"') interact_netscape(cs, "http://www.acme.com/blah/", "spam=bar; secure") interact_2965(cs, "http://www.acme.com/blah/", "foo=bar; secure; Version=1") interact_2965(cs, "http://www.acme.com/blah/", "foo=bar; path=/; Version=1") interact_2965(cs, "http://www.sol.no", r'bang=wallop; version=1; domain=".sol.no"; ' r'port="90,100, 80,8080"; ' r'max-age=100; Comment = "Just kidding! (\"|\\\\) "') versions = [1, 1, 1, 0, 1] names = ["bang", "foo", "foo", "spam", "foo"] domains = [".sol.no", "blah.spam.org", "www.acme.com", "www.acme.com", "www.acme.com"] paths = ["/", "/", "/", "/blah", "/blah/"] for i in range(4): i = 0 for c in cs: self.assertTrue(isinstance(c, Cookie)) self.assertEqual(c.version, versions[i]) self.assertEqual(c.name, names[i]) self.assertEqual(c.domain, domains[i]) self.assertEqual(c.path, paths[i]) i = i + 1 def test_parse_ns_headers(self): # missing domain value (invalid cookie) self.assertEqual( parse_ns_headers(["foo=bar; path=/; domain"]), [[("foo", "bar"), ("path", "/"), ("domain", None), ("version", "0")]] ) # invalid expires value self.assertEqual( parse_ns_headers(["foo=bar; expires=Foo Bar 12 33:22:11 2000"]), [[("foo", "bar"), ("expires", None), ("version", "0")]] ) # missing cookie value (valid cookie) self.assertEqual( parse_ns_headers(["foo"]), [[("foo", None), ("version", "0")]] ) # shouldn't add version if header is empty self.assertEqual(parse_ns_headers([""]), []) def test_bad_cookie_header(self): def cookiejar_from_cookie_headers(headers): c = CookieJar() req = urllib.request.Request("http://www.example.com/") r = FakeResponse(headers, "http://www.example.com/") c.extract_cookies(r, req) return c # none of these bad headers should cause an exception to be raised for headers in [ ["Set-Cookie: "], # actually, nothing wrong with this ["Set-Cookie2: "], # ditto # missing domain value ["Set-Cookie2: a=foo; path=/; Version=1; domain"], # bad max-age ["Set-Cookie: b=foo; max-age=oops"], # bad version ["Set-Cookie: b=foo; version=spam"], ]: c = cookiejar_from_cookie_headers(headers) # these bad cookies shouldn't be set self.assertEqual(len(c), 0) # cookie with invalid expires is treated as session cookie headers = ["Set-Cookie: c=foo; expires=Foo Bar 12 33:22:11 2000"] c = cookiejar_from_cookie_headers(headers) cookie = c._cookies["www.example.com"]["/"]["c"] self.assertTrue(cookie.expires is None) class LWPCookieTests(unittest.TestCase): # Tests taken from libwww-perl, with a few modifications and additions. def test_netscape_example_1(self): #------------------------------------------------------------------- # First we check that it works for the original example at # http://www.netscape.com/newsref/std/cookie_spec.html # Client requests a document, and receives in the response: # # Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/; expires=Wednesday, 09-Nov-99 23:12:40 GMT # # When client requests a URL in path "/" on this server, it sends: # # Cookie: CUSTOMER=WILE_E_COYOTE # # Client requests a document, and receives in the response: # # Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/ # # When client requests a URL in path "/" on this server, it sends: # # Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001 # # Client receives: # # Set-Cookie: SHIPPING=FEDEX; path=/fo # # When client requests a URL in path "/" on this server, it sends: # # Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001 # # When client requests a URL in path "/foo" on this server, it sends: # # Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001; SHIPPING=FEDEX # # The last Cookie is buggy, because both specifications say that the # most specific cookie must be sent first. SHIPPING=FEDEX is the # most specific and should thus be first. year_plus_one = time.localtime()[0] + 1 headers = [] c = CookieJar(DefaultCookiePolicy(rfc2965 = True)) #req = urllib.request.Request("http://1.1.1.1/", # headers={"Host": "www.acme.com:80"}) req = urllib.request.Request("http://www.acme.com:80/", headers={"Host": "www.acme.com:80"}) headers.append( "Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/ ; " "expires=Wednesday, 09-Nov-%d 23:12:40 GMT" % year_plus_one) res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) req = urllib.request.Request("http://www.acme.com/") c.add_cookie_header(req) self.assertEqual(req.get_header("Cookie"), "CUSTOMER=WILE_E_COYOTE") self.assertEqual(req.get_header("Cookie2"), '$Version="1"') headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/") res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) req = urllib.request.Request("http://www.acme.com/foo/bar") c.add_cookie_header(req) h = req.get_header("Cookie") self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h) self.assertIn("CUSTOMER=WILE_E_COYOTE", h) headers.append('Set-Cookie: SHIPPING=FEDEX; path=/foo') res = FakeResponse(headers, "http://www.acme.com") c.extract_cookies(res, req) req = urllib.request.Request("http://www.acme.com/") c.add_cookie_header(req) h = req.get_header("Cookie") self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h) self.assertIn("CUSTOMER=WILE_E_COYOTE", h) self.assertNotIn("SHIPPING=FEDEX", h) req = urllib.request.Request("http://www.acme.com/foo/") c.add_cookie_header(req) h = req.get_header("Cookie") self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h) self.assertIn("CUSTOMER=WILE_E_COYOTE", h) self.assertTrue(h.startswith("SHIPPING=FEDEX;")) def test_netscape_example_2(self): # Second Example transaction sequence: # # Assume all mappings from above have been cleared. # # Client receives: # # Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/ # # When client requests a URL in path "/" on this server, it sends: # # Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001 # # Client receives: # # Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo # # When client requests a URL in path "/ammo" on this server, it sends: # # Cookie: PART_NUMBER=RIDING_ROCKET_0023; PART_NUMBER=ROCKET_LAUNCHER_0001 # # NOTE: There are two name/value pairs named "PART_NUMBER" due to # the inheritance of the "/" mapping in addition to the "/ammo" mapping. c = CookieJar() headers = [] req = urllib.request.Request("http://www.acme.com/") headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/") res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) req = urllib.request.Request("http://www.acme.com/") c.add_cookie_header(req) self.assertEqual(req.get_header("Cookie"), "PART_NUMBER=ROCKET_LAUNCHER_0001") headers.append( "Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo") res = FakeResponse(headers, "http://www.acme.com/") c.extract_cookies(res, req) req = urllib.request.Request("http://www.acme.com/ammo") c.add_cookie_header(req) self.assertTrue(re.search(r"PART_NUMBER=RIDING_ROCKET_0023;\s*" "PART_NUMBER=ROCKET_LAUNCHER_0001", req.get_header("Cookie"))) def test_ietf_example_1(self): #------------------------------------------------------------------- # Then we test with the examples from draft-ietf-http-state-man-mec-03.txt # # 5. EXAMPLES c = CookieJar(DefaultCookiePolicy(rfc2965=True)) # # 5.1 Example 1 # # Most detail of request and response headers has been omitted. Assume # the user agent has no stored cookies. # # 1. User Agent -> Server # # POST /acme/login HTTP/1.1 # [form data] # # User identifies self via a form. # # 2. Server -> User Agent # # HTTP/1.1 200 OK # Set-Cookie2: Customer="WILE_E_COYOTE"; Version="1"; Path="/acme" # # Cookie reflects user's identity. cookie = interact_2965( c, 'http://www.acme.com/acme/login', 'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"') self.assertTrue(not cookie) # # 3. User Agent -> Server # # POST /acme/pickitem HTTP/1.1 # Cookie: $Version="1"; Customer="WILE_E_COYOTE"; $Path="/acme" # [form data] # # User selects an item for ``shopping basket.'' # # 4. Server -> User Agent # # HTTP/1.1 200 OK # Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1"; # Path="/acme" # # Shopping basket contains an item. cookie = interact_2965(c, 'http://www.acme.com/acme/pickitem', 'Part_Number="Rocket_Launcher_0001"; ' 'Version="1"; Path="/acme"'); self.assertTrue(re.search( r'^\$Version="?1"?; Customer="?WILE_E_COYOTE"?; \$Path="/acme"$', cookie)) # # 5. User Agent -> Server # # POST /acme/shipping HTTP/1.1 # Cookie: $Version="1"; # Customer="WILE_E_COYOTE"; $Path="/acme"; # Part_Number="Rocket_Launcher_0001"; $Path="/acme" # [form data] # # User selects shipping method from form. # # 6. Server -> User Agent # # HTTP/1.1 200 OK # Set-Cookie2: Shipping="FedEx"; Version="1"; Path="/acme" # # New cookie reflects shipping method. cookie = interact_2965(c, "http://www.acme.com/acme/shipping", 'Shipping="FedEx"; Version="1"; Path="/acme"') self.assertTrue(re.search(r'^\$Version="?1"?;', cookie)) self.assertTrue(re.search(r'Part_Number="?Rocket_Launcher_0001"?;' '\s*\$Path="\/acme"', cookie)) self.assertTrue(re.search(r'Customer="?WILE_E_COYOTE"?;\s*\$Path="\/acme"', cookie)) # # 7. User Agent -> Server # # POST /acme/process HTTP/1.1 # Cookie: $Version="1"; # Customer="WILE_E_COYOTE"; $Path="/acme"; # Part_Number="Rocket_Launcher_0001"; $Path="/acme"; # Shipping="FedEx"; $Path="/acme" # [form data] # # User chooses to process order. # # 8. Server -> User Agent # # HTTP/1.1 200 OK # # Transaction is complete. cookie = interact_2965(c, "http://www.acme.com/acme/process") self.assertTrue( re.search(r'Shipping="?FedEx"?;\s*\$Path="\/acme"', cookie) and "WILE_E_COYOTE" in cookie) # # The user agent makes a series of requests on the origin server, after # each of which it receives a new cookie. All the cookies have the same # Path attribute and (default) domain. Because the request URLs all have # /acme as a prefix, and that matches the Path attribute, each request # contains all the cookies received so far. def test_ietf_example_2(self): # 5.2 Example 2 # # This example illustrates the effect of the Path attribute. All detail # of request and response headers has been omitted. Assume the user agent # has no stored cookies. c = CookieJar(DefaultCookiePolicy(rfc2965=True)) # Imagine the user agent has received, in response to earlier requests, # the response headers # # Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1"; # Path="/acme" # # and # # Set-Cookie2: Part_Number="Riding_Rocket_0023"; Version="1"; # Path="/acme/ammo" interact_2965( c, "http://www.acme.com/acme/ammo/specific", 'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"', 'Part_Number="Riding_Rocket_0023"; Version="1"; Path="/acme/ammo"') # A subsequent request by the user agent to the (same) server for URLs of # the form /acme/ammo/... would include the following request header: # # Cookie: $Version="1"; # Part_Number="Riding_Rocket_0023"; $Path="/acme/ammo"; # Part_Number="Rocket_Launcher_0001"; $Path="/acme" # # Note that the NAME=VALUE pair for the cookie with the more specific Path # attribute, /acme/ammo, comes before the one with the less specific Path # attribute, /acme. Further note that the same cookie name appears more # than once. cookie = interact_2965(c, "http://www.acme.com/acme/ammo/...") self.assertTrue( re.search(r"Riding_Rocket_0023.*Rocket_Launcher_0001", cookie)) # A subsequent request by the user agent to the (same) server for a URL of # the form /acme/parts/ would include the following request header: # # Cookie: $Version="1"; Part_Number="Rocket_Launcher_0001"; $Path="/acme" # # Here, the second cookie's Path attribute /acme/ammo is not a prefix of # the request URL, /acme/parts/, so the cookie does not get forwarded to # the server. cookie = interact_2965(c, "http://www.acme.com/acme/parts/") self.assertIn("Rocket_Launcher_0001", cookie) self.assertNotIn("Riding_Rocket_0023", cookie) def test_rejection(self): # Test rejection of Set-Cookie2 responses based on domain, path, port. pol = DefaultCookiePolicy(rfc2965=True) c = LWPCookieJar(policy=pol) max_age = "max-age=3600" # illegal domain (no embedded dots) cookie = interact_2965(c, "http://www.acme.com", 'foo=bar; domain=".com"; version=1') self.assertTrue(not c) # legal domain cookie = interact_2965(c, "http://www.acme.com", 'ping=pong; domain="acme.com"; version=1') self.assertEqual(len(c), 1) # illegal domain (host prefix "www.a" contains a dot) cookie = interact_2965(c, "http://www.a.acme.com", 'whiz=bang; domain="acme.com"; version=1') self.assertEqual(len(c), 1) # legal domain cookie = interact_2965(c, "http://www.a.acme.com", 'wow=flutter; domain=".a.acme.com"; version=1') self.assertEqual(len(c), 2) # can't partially match an IP-address cookie = interact_2965(c, "http://125.125.125.125", 'zzzz=ping; domain="125.125.125"; version=1') self.assertEqual(len(c), 2) # illegal path (must be prefix of request path) cookie = interact_2965(c, "http://www.sol.no", 'blah=rhubarb; domain=".sol.no"; path="/foo"; ' 'version=1') self.assertEqual(len(c), 2) # legal path cookie = interact_2965(c, "http://www.sol.no/foo/bar", 'bing=bong; domain=".sol.no"; path="/foo"; ' 'version=1') self.assertEqual(len(c), 3) # illegal port (request-port not in list) cookie = interact_2965(c, "http://www.sol.no", 'whiz=ffft; domain=".sol.no"; port="90,100"; ' 'version=1') self.assertEqual(len(c), 3) # legal port cookie = interact_2965( c, "http://www.sol.no", r'bang=wallop; version=1; domain=".sol.no"; ' r'port="90,100, 80,8080"; ' r'max-age=100; Comment = "Just kidding! (\"|\\\\) "') self.assertEqual(len(c), 4) # port attribute without any value (current port) cookie = interact_2965(c, "http://www.sol.no", 'foo9=bar; version=1; domain=".sol.no"; port; ' 'max-age=100;') self.assertEqual(len(c), 5) # encoded path # LWP has this test, but unescaping allowed path characters seems # like a bad idea, so I think this should fail: ## cookie = interact_2965(c, "http://www.sol.no/foo/", ## r'foo8=bar; version=1; path="/%66oo"') # but this is OK, because '<' is not an allowed HTTP URL path # character: cookie = interact_2965(c, "http://www.sol.no/<oo/", r'foo8=bar; version=1; path="/%3coo"') self.assertEqual(len(c), 6) # save and restore filename = test.support.TESTFN try: c.save(filename, ignore_discard=True) old = repr(c) c = LWPCookieJar(policy=pol) c.load(filename, ignore_discard=True) finally: try: os.unlink(filename) except OSError: pass self.assertEqual(old, repr(c)) def test_url_encoding(self): # Try some URL encodings of the PATHs. # (the behaviour here has changed from libwww-perl) c = CookieJar(DefaultCookiePolicy(rfc2965=True)) interact_2965(c, "http://www.acme.com/foo%2f%25/" "%3c%3c%0Anew%C3%A5/%C3%A5", "foo = bar; version = 1") cookie = interact_2965( c, "http://www.acme.com/foo%2f%25/<<%0anew\345/\346\370\345", 'bar=baz; path="/foo/"; version=1'); version_re = re.compile(r'^\$version=\"?1\"?', re.I) self.assertIn("foo=bar", cookie) self.assertTrue(version_re.search(cookie)) cookie = interact_2965( c, "http://www.acme.com/foo/%25/<<%0anew\345/\346\370\345") self.assertTrue(not cookie) # unicode URL doesn't raise exception cookie = interact_2965(c, "http://www.acme.com/\xfc") def test_mozilla(self): # Save / load Mozilla/Netscape cookie file format. year_plus_one = time.localtime()[0] + 1 filename = test.support.TESTFN c = MozillaCookieJar(filename, policy=DefaultCookiePolicy(rfc2965=True)) interact_2965(c, "http://www.acme.com/", "foo1=bar; max-age=100; Version=1") interact_2965(c, "http://www.acme.com/", 'foo2=bar; port="80"; max-age=100; Discard; Version=1') interact_2965(c, "http://www.acme.com/", "foo3=bar; secure; Version=1") expires = "expires=09-Nov-%d 23:12:40 GMT" % (year_plus_one,) interact_netscape(c, "http://www.foo.com/", "fooa=bar; %s" % expires) interact_netscape(c, "http://www.foo.com/", "foob=bar; Domain=.foo.com; %s" % expires) interact_netscape(c, "http://www.foo.com/", "fooc=bar; Domain=www.foo.com; %s" % expires) def save_and_restore(cj, ignore_discard): try: cj.save(ignore_discard=ignore_discard) new_c = MozillaCookieJar(filename, DefaultCookiePolicy(rfc2965=True)) new_c.load(ignore_discard=ignore_discard) finally: try: os.unlink(filename) except OSError: pass return new_c new_c = save_and_restore(c, True) self.assertEqual(len(new_c), 6) # none discarded self.assertIn("name='foo1', value='bar'", repr(new_c)) new_c = save_and_restore(c, False) self.assertEqual(len(new_c), 4) # 2 of them discarded on save self.assertIn("name='foo1', value='bar'", repr(new_c)) def test_netscape_misc(self): # Some additional Netscape cookies tests. c = CookieJar() headers = [] req = urllib.request.Request("http://foo.bar.acme.com/foo") # Netscape allows a host part that contains dots headers.append("Set-Cookie: Customer=WILE_E_COYOTE; domain=.acme.com") res = FakeResponse(headers, "http://www.acme.com/foo") c.extract_cookies(res, req) # and that the domain is the same as the host without adding a leading # dot to the domain. Should not quote even if strange chars are used # in the cookie value. headers.append("Set-Cookie: PART_NUMBER=3,4; domain=foo.bar.acme.com") res = FakeResponse(headers, "http://www.acme.com/foo") c.extract_cookies(res, req) req = urllib.request.Request("http://foo.bar.acme.com/foo") c.add_cookie_header(req) self.assertIn("PART_NUMBER=3,4", req.get_header("Cookie")) self.assertIn("Customer=WILE_E_COYOTE",req.get_header("Cookie")) def test_intranet_domains_2965(self): # Test handling of local intranet hostnames without a dot. c = CookieJar(DefaultCookiePolicy(rfc2965=True)) interact_2965(c, "http://example/", "foo1=bar; PORT; Discard; Version=1;") cookie = interact_2965(c, "http://example/", 'foo2=bar; domain=".local"; Version=1') self.assertIn("foo1=bar", cookie) interact_2965(c, "http://example/", 'foo3=bar; Version=1') cookie = interact_2965(c, "http://example/") self.assertIn("foo2=bar", cookie) self.assertEqual(len(c), 3) def test_intranet_domains_ns(self): c = CookieJar(DefaultCookiePolicy(rfc2965 = False)) interact_netscape(c, "http://example/", "foo1=bar") cookie = interact_netscape(c, "http://example/", 'foo2=bar; domain=.local') self.assertEqual(len(c), 2) self.assertIn("foo1=bar", cookie) cookie = interact_netscape(c, "http://example/") self.assertIn("foo2=bar", cookie) self.assertEqual(len(c), 2) def test_empty_path(self): # Test for empty path # Broken web-server ORION/1.3.38 returns to the client response like # # Set-Cookie: JSESSIONID=ABCDERANDOM123; Path= # # ie. with Path set to nothing. # In this case, extract_cookies() must set cookie to / (root) c = CookieJar(DefaultCookiePolicy(rfc2965 = True)) headers = [] req = urllib.request.Request("http://www.ants.com/") headers.append("Set-Cookie: JSESSIONID=ABCDERANDOM123; Path=") res = FakeResponse(headers, "http://www.ants.com/") c.extract_cookies(res, req) req = urllib.request.Request("http://www.ants.com/") c.add_cookie_header(req) self.assertEqual(req.get_header("Cookie"), "JSESSIONID=ABCDERANDOM123") self.assertEqual(req.get_header("Cookie2"), '$Version="1"') # missing path in the request URI req = urllib.request.Request("http://www.ants.com:8080") c.add_cookie_header(req) self.assertEqual(req.get_header("Cookie"), "JSESSIONID=ABCDERANDOM123") self.assertEqual(req.get_header("Cookie2"), '$Version="1"') def test_session_cookies(self): year_plus_one = time.localtime()[0] + 1 # Check session cookies are deleted properly by # CookieJar.clear_session_cookies method req = urllib.request.Request('http://www.perlmeister.com/scripts') headers = [] headers.append("Set-Cookie: s1=session;Path=/scripts") headers.append("Set-Cookie: p1=perm; Domain=.perlmeister.com;" "Path=/;expires=Fri, 02-Feb-%d 23:24:20 GMT" % year_plus_one) headers.append("Set-Cookie: p2=perm;Path=/;expires=Fri, " "02-Feb-%d 23:24:20 GMT" % year_plus_one) headers.append("Set-Cookie: s2=session;Path=/scripts;" "Domain=.perlmeister.com") headers.append('Set-Cookie2: s3=session;Version=1;Discard;Path="/"') res = FakeResponse(headers, 'http://www.perlmeister.com/scripts') c = CookieJar() c.extract_cookies(res, req) # How many session/permanent cookies do we have? counter = {"session_after": 0, "perm_after": 0, "session_before": 0, "perm_before": 0} for cookie in c: key = "%s_before" % cookie.value counter[key] = counter[key] + 1 c.clear_session_cookies() # How many now? for cookie in c: key = "%s_after" % cookie.value counter[key] = counter[key] + 1 self.assertTrue(not ( # a permanent cookie got lost accidently counter["perm_after"] != counter["perm_before"] or # a session cookie hasn't been cleared counter["session_after"] != 0 or # we didn't have session cookies in the first place counter["session_before"] == 0)) def test_main(verbose=None): test.support.run_unittest( DateTimeTests, HeaderTests, CookieTests, FileCookieJarTests, LWPCookieTests, ) if __name__ == "__main__": test_main(verbose=True)
mit
fzalkow/scikit-learn
sklearn/linear_model/randomized_l1.py
95
23365
""" Randomized Lasso/Logistic: feature selection based on Lasso and sparse Logistic Regression """ # Author: Gael Varoquaux, Alexandre Gramfort # # License: BSD 3 clause import itertools from abc import ABCMeta, abstractmethod import warnings import numpy as np from scipy.sparse import issparse from scipy import sparse from scipy.interpolate import interp1d from .base import center_data from ..base import BaseEstimator, TransformerMixin from ..externals import six from ..externals.joblib import Memory, Parallel, delayed from ..utils import (as_float_array, check_random_state, check_X_y, check_array, safe_mask, ConvergenceWarning) from ..utils.validation import check_is_fitted from .least_angle import lars_path, LassoLarsIC from .logistic import LogisticRegression ############################################################################### # Randomized linear model: feature selection def _resample_model(estimator_func, X, y, scaling=.5, n_resampling=200, n_jobs=1, verbose=False, pre_dispatch='3*n_jobs', random_state=None, sample_fraction=.75, **params): random_state = check_random_state(random_state) # We are generating 1 - weights, and not weights n_samples, n_features = X.shape if not (0 < scaling < 1): raise ValueError( "'scaling' should be between 0 and 1. Got %r instead." % scaling) scaling = 1. - scaling scores_ = 0.0 for active_set in Parallel(n_jobs=n_jobs, verbose=verbose, pre_dispatch=pre_dispatch)( delayed(estimator_func)( X, y, weights=scaling * random_state.random_integers( 0, 1, size=(n_features,)), mask=(random_state.rand(n_samples) < sample_fraction), verbose=max(0, verbose - 1), **params) for _ in range(n_resampling)): scores_ += active_set scores_ /= n_resampling return scores_ class BaseRandomizedLinearModel(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)): """Base class to implement randomized linear models for feature selection This implements the strategy by Meinshausen and Buhlman: stability selection with randomized sampling, and random re-weighting of the penalty. """ @abstractmethod def __init__(self): pass _center_data = staticmethod(center_data) def fit(self, X, y): """Fit the model using X, y as training data. Parameters ---------- X : array-like, sparse matrix shape = [n_samples, n_features] Training data. y : array-like, shape = [n_samples] Target values. Returns ------- self : object Returns an instance of self. """ X, y = check_X_y(X, y, ['csr', 'csc', 'coo'], y_numeric=True) X = as_float_array(X, copy=False) n_samples, n_features = X.shape X, y, X_mean, y_mean, X_std = self._center_data(X, y, self.fit_intercept, self.normalize) estimator_func, params = self._make_estimator_and_params(X, y) memory = self.memory if isinstance(memory, six.string_types): memory = Memory(cachedir=memory) scores_ = memory.cache( _resample_model, ignore=['verbose', 'n_jobs', 'pre_dispatch'] )( estimator_func, X, y, scaling=self.scaling, n_resampling=self.n_resampling, n_jobs=self.n_jobs, verbose=self.verbose, pre_dispatch=self.pre_dispatch, random_state=self.random_state, sample_fraction=self.sample_fraction, **params) if scores_.ndim == 1: scores_ = scores_[:, np.newaxis] self.all_scores_ = scores_ self.scores_ = np.max(self.all_scores_, axis=1) return self def _make_estimator_and_params(self, X, y): """Return the parameters passed to the estimator""" raise NotImplementedError def get_support(self, indices=False): """Return a mask, or list, of the features/indices selected.""" check_is_fitted(self, 'scores_') mask = self.scores_ > self.selection_threshold return mask if not indices else np.where(mask)[0] # XXX: the two function below are copy/pasted from feature_selection, # Should we add an intermediate base class? def transform(self, X): """Transform a new matrix using the selected features""" mask = self.get_support() X = check_array(X) if len(mask) != X.shape[1]: raise ValueError("X has a different shape than during fitting.") return check_array(X)[:, safe_mask(X, mask)] def inverse_transform(self, X): """Transform a new matrix using the selected features""" support = self.get_support() if X.ndim == 1: X = X[None, :] Xt = np.zeros((X.shape[0], support.size)) Xt[:, support] = X return Xt ############################################################################### # Randomized lasso: regression settings def _randomized_lasso(X, y, weights, mask, alpha=1., verbose=False, precompute=False, eps=np.finfo(np.float).eps, max_iter=500): X = X[safe_mask(X, mask)] y = y[mask] # Center X and y to avoid fit the intercept X -= X.mean(axis=0) y -= y.mean() alpha = np.atleast_1d(np.asarray(alpha, dtype=np.float)) X = (1 - weights) * X with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas_, _, coef_ = lars_path(X, y, Gram=precompute, copy_X=False, copy_Gram=False, alpha_min=np.min(alpha), method='lasso', verbose=verbose, max_iter=max_iter, eps=eps) if len(alpha) > 1: if len(alphas_) > 1: # np.min(alpha) < alpha_min interpolator = interp1d(alphas_[::-1], coef_[:, ::-1], bounds_error=False, fill_value=0.) scores = (interpolator(alpha) != 0.0) else: scores = np.zeros((X.shape[1], len(alpha)), dtype=np.bool) else: scores = coef_[:, -1] != 0.0 return scores class RandomizedLasso(BaseRandomizedLinearModel): """Randomized Lasso. Randomized Lasso works by resampling the train data and computing a Lasso on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- alpha : float, 'aic', or 'bic', optional The regularization parameter alpha parameter in the Lasso. Warning: this is not the alpha parameter in the stability selection article which is scaling. scaling : float, optional The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional Number of randomized models. selection_threshold: float, optional The score above which features should be selected. fit_intercept : boolean, optional whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True If True, the regressors X will be normalized before regression. precompute : True | False | 'auto' Whether to use a precomputed Gram matrix to speed up calculations. If set to 'auto' let us decide. The Gram matrix can also be passed as argument. max_iter : integer, optional Maximum number of iterations to perform in the Lars algorithm. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the 'tol' parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max of \ ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLasso >>> randomized_lasso = RandomizedLasso() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLogisticRegression, LogisticRegression """ def __init__(self, alpha='aic', scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.alpha = alpha self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.max_iter = max_iter self.verbose = verbose self.normalize = normalize self.precompute = precompute self.eps = eps self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): assert self.precompute in (True, False, None, 'auto') alpha = self.alpha if alpha in ('aic', 'bic'): model = LassoLarsIC(precompute=self.precompute, criterion=self.alpha, max_iter=self.max_iter, eps=self.eps) model.fit(X, y) self.alpha_ = alpha = model.alpha_ return _randomized_lasso, dict(alpha=alpha, max_iter=self.max_iter, eps=self.eps, precompute=self.precompute) ############################################################################### # Randomized logistic: classification settings def _randomized_logistic(X, y, weights, mask, C=1., verbose=False, fit_intercept=True, tol=1e-3): X = X[safe_mask(X, mask)] y = y[mask] if issparse(X): size = len(weights) weight_dia = sparse.dia_matrix((1 - weights, 0), (size, size)) X = X * weight_dia else: X *= (1 - weights) C = np.atleast_1d(np.asarray(C, dtype=np.float)) scores = np.zeros((X.shape[1], len(C)), dtype=np.bool) for this_C, this_scores in zip(C, scores.T): # XXX : would be great to do it with a warm_start ... clf = LogisticRegression(C=this_C, tol=tol, penalty='l1', dual=False, fit_intercept=fit_intercept) clf.fit(X, y) this_scores[:] = np.any( np.abs(clf.coef_) > 10 * np.finfo(np.float).eps, axis=0) return scores class RandomizedLogisticRegression(BaseRandomizedLinearModel): """Randomized Logistic Regression Randomized Regression works by resampling the train data and computing a LogisticRegression on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- C : float, optional, default=1 The regularization parameter C in the LogisticRegression. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional, default=200 Number of randomized models. selection_threshold : float, optional, default=0.25 The score above which features should be selected. fit_intercept : boolean, optional, default=True whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default=True If True, the regressors X will be normalized before regression. tol : float, optional, default=1e-3 tolerance for stopping criteria of LogisticRegression n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max \ of ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLogisticRegression >>> randomized_logistic = RandomizedLogisticRegression() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLasso, Lasso, ElasticNet """ def __init__(self, C=1, scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, tol=1e-3, fit_intercept=True, verbose=False, normalize=True, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.C = C self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.verbose = verbose self.normalize = normalize self.tol = tol self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): params = dict(C=self.C, tol=self.tol, fit_intercept=self.fit_intercept) return _randomized_logistic, params def _center_data(self, X, y, fit_intercept, normalize=False): """Center the data in X but not in y""" X, _, Xmean, _, X_std = center_data(X, y, fit_intercept, normalize=normalize) return X, y, Xmean, y, X_std ############################################################################### # Stability paths def _lasso_stability_path(X, y, mask, weights, eps): "Inner loop of lasso_stability_path" X = X * weights[np.newaxis, :] X = X[safe_mask(X, mask), :] y = y[mask] alpha_max = np.max(np.abs(np.dot(X.T, y))) / X.shape[0] alpha_min = eps * alpha_max # set for early stopping in path with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False, alpha_min=alpha_min) # Scale alpha by alpha_max alphas /= alphas[0] # Sort alphas in assending order alphas = alphas[::-1] coefs = coefs[:, ::-1] # Get rid of the alphas that are too small mask = alphas >= eps # We also want to keep the first one: it should be close to the OLS # solution mask[0] = True alphas = alphas[mask] coefs = coefs[:, mask] return alphas, coefs def lasso_stability_path(X, y, scaling=0.5, random_state=None, n_resampling=200, n_grid=100, sample_fraction=0.75, eps=4 * np.finfo(np.float).eps, n_jobs=1, verbose=False): """Stabiliy path based on randomized Lasso estimates Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- X : array-like, shape = [n_samples, n_features] training data. y : array-like, shape = [n_samples] target values. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. random_state : integer or numpy.random.RandomState, optional The generator used to randomize the design. n_resampling : int, optional, default=200 Number of randomized models. n_grid : int, optional, default=100 Number of grid points. The path is linearly reinterpolated on a grid between 0 and 1 before computing the scores. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. eps : float, optional Smallest value of alpha / alpha_max considered n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs verbose : boolean or integer, optional Sets the verbosity amount Returns ------- alphas_grid : array, shape ~ [n_grid] The grid points between 0 and 1: alpha/alpha_max scores_path : array, shape = [n_features, n_grid] The scores for each feature along the path. Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. """ rng = check_random_state(random_state) if not (0 < scaling < 1): raise ValueError("Parameter 'scaling' should be between 0 and 1." " Got %r instead." % scaling) n_samples, n_features = X.shape paths = Parallel(n_jobs=n_jobs, verbose=verbose)( delayed(_lasso_stability_path)( X, y, mask=rng.rand(n_samples) < sample_fraction, weights=1. - scaling * rng.random_integers(0, 1, size=(n_features,)), eps=eps) for k in range(n_resampling)) all_alphas = sorted(list(set(itertools.chain(*[p[0] for p in paths])))) # Take approximately n_grid values stride = int(max(1, int(len(all_alphas) / float(n_grid)))) all_alphas = all_alphas[::stride] if not all_alphas[-1] == 1: all_alphas.append(1.) all_alphas = np.array(all_alphas) scores_path = np.zeros((n_features, len(all_alphas))) for alphas, coefs in paths: if alphas[0] != 0: alphas = np.r_[0, alphas] coefs = np.c_[np.ones((n_features, 1)), coefs] if alphas[-1] != all_alphas[-1]: alphas = np.r_[alphas, all_alphas[-1]] coefs = np.c_[coefs, np.zeros((n_features, 1))] scores_path += (interp1d(alphas, coefs, kind='nearest', bounds_error=False, fill_value=0, axis=-1)(all_alphas) != 0) scores_path /= n_resampling return all_alphas, scores_path
bsd-3-clause
wdurhamh/statsmodels
statsmodels/tsa/mlemodel.py
36
2101
"""Base Classes for Likelihood Models in time series analysis Warning: imports numdifftools Created on Sun Oct 10 15:00:47 2010 Author: josef-pktd License: BSD """ import numpy as np try: import numdifftools as ndt except ImportError: pass from statsmodels.base.model import LikelihoodModel #copied from sandbox/regression/mle.py #TODO: I take it this is only a stub and should be included in another # model class? class TSMLEModel(LikelihoodModel): """ univariate time series model for estimation with maximum likelihood Note: This is not working yet """ def __init__(self, endog, exog=None): #need to override p,q (nar,nma) correctly super(TSMLEModel, self).__init__(endog, exog) #set default arma(1,1) self.nar = 1 self.nma = 1 #self.initialize() def geterrors(self, params): raise NotImplementedError def loglike(self, params): """ Loglikelihood for timeseries model Notes ----- needs to be overwritten by subclass """ raise NotImplementedError def score(self, params): """ Score vector for Arma model """ #return None #print params jac = ndt.Jacobian(self.loglike, stepMax=1e-4) return jac(params)[-1] def hessian(self, params): """ Hessian of arma model. Currently uses numdifftools """ #return None Hfun = ndt.Jacobian(self.score, stepMax=1e-4) return Hfun(params)[-1] def fit(self, start_params=None, maxiter=5000, method='fmin', tol=1e-08): '''estimate model by minimizing negative loglikelihood does this need to be overwritten ? ''' if start_params is None and hasattr(self, '_start_params'): start_params = self._start_params #start_params = np.concatenate((0.05*np.ones(self.nar + self.nma), [1])) mlefit = super(TSMLEModel, self).fit(start_params=start_params, maxiter=maxiter, method=method, tol=tol) return mlefit
bsd-3-clause
levilucio/SyVOLT
UMLRT2Kiltera_MM/Properties/from_thesis/HMM6_then_CompleteLHS.py
1
8761
from core.himesis import Himesis, HimesisPreConditionPatternLHS import uuid class HMM6_then_CompleteLHS(HimesisPreConditionPatternLHS): def __init__(self): """ Creates the himesis graph representing the AToM3 model HMM6_then_CompleteLHS. """ # Flag this instance as compiled now self.is_compiled = True super(HMM6_then_CompleteLHS, self).__init__(name='HMM6_then_CompleteLHS', num_nodes=0, edges=[]) # Set the graph attributes self["mm__"] = [] self["MT_constraint__"] = """#=============================================================================== # This code is executed after the nodes in the LHS have been matched. # You can access a matched node labelled n by: PreNode('n'). # To access attribute x of node n, use: PreNode('n')['x']. # The given constraint must evaluate to a boolean expression: # returning True enables the rule to be applied, # returning False forbids the rule from being applied. #=============================================================================== return True """ self["name"] = """""" self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'MM6_then') # Nodes that represent match classes #Nodes that represent apply classes # match class New() node self.add_node() self.vs[0]["MT_subtypeMatching__"] = False self.vs[0]["MT_pre__attr1"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[0]["MT_label__"] = """1""" self.vs[0]["MT_subtypes__"] = [] self.vs[0]["MT_dirty__"] = False self.vs[0]["mm__"] = """MT_pre__New""" self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'') # match class Name() node self.add_node() self.vs[1]["MT_subtypeMatching__"] = False self.vs[1]["MT_pre__attr1"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True """ self.vs[1]["MT_label__"] = """2""" self.vs[1]["MT_subtypes__"] = [] self.vs[1]["MT_dirty__"] = False self.vs[1]["mm__"] = """MT_pre__Name""" self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'') # Nodes that represent the match associations of the property. # Nodes that represent the apply associations of the property. # apply association New--channelNames-->Name node self.add_node() self.vs[2]["MT_subtypeMatching__"] = False self.vs[2]["MT_pre__attr1"] = """ #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return attr_value == "channelNames" """ self.vs[2]["MT_label__"] = """3""" self.vs[2]["MT_subtypes__"] = [] self.vs[2]["MT_dirty__"] = False self.vs[2]["mm__"] = """MT_pre__directLink_T""" self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc2') # Nodes that represent trace relations # Add the edges self.add_edges([ (0,2), # apply_class New() -> association channelNames (2,1), # association channelNames -> apply_class Name() ]) # Add the attribute equations self["equations"] = [((0,'pivot'),('constant','NEW')), ] def eval_attr11(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr12(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr13(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return attr_value == "channelNames" def constraint(self, PreNode, graph): """ Executable constraint code. @param PreNode: Function taking an integer as parameter and returns the node corresponding to that label. """ #=============================================================================== # This code is executed after the nodes in the LHS have been matched. # You can access a matched node labelled n by: PreNode('n'). # To access attribute x of node n, use: PreNode('n')['x']. # The given constraint must evaluate to a boolean expression: # returning True enables the rule to be applied, # returning False forbids the rule from being applied. #=============================================================================== return True
mit
Acidburn0zzz/webm.webm-tools
.ycm_extra_conf.py
18
5012
import os import ycm_core # These are the compilation flags that will be used in case there's no # compilation database set (by default, one is not set). # CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR. flags = [ '-Wall', '-Wextra', '-Werror', '-Wc++98-compat', '-Wno-long-long', '-Wno-variadic-macros', '-fexceptions', '-DNDEBUG', # THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which # language to use when compiling headers. So it will guess. Badly. So C++ # headers will be compiled as C headers. You don't want that so ALWAYS specify # a "-std=<something>". # For a C project, you would set this to something like 'c99' instead of # 'c++11'. '-std=c++11', # ...and the same thing goes for the magic -x option which specifies the # language that the files to be compiled are written in. This is mostly # relevant for c++ headers. # For a C project, you would set this to 'c' instead of 'c++'. '-x', 'c++', '-isystem', '../BoostParts', '-isystem', # This path will only work on OS X, but extra paths that don't exist are not # harmful '/System/Library/Frameworks/Python.framework/Headers', '-isystem', '../llvm/include', '-isystem', '../llvm/tools/clang/include', '-I', '.', '-I', './ClangCompleter', '-isystem', './tests/gmock/gtest', '-isystem', './tests/gmock/gtest/include', '-isystem', './tests/gmock', '-isystem', './tests/gmock/include', '-isystem', '/usr/include', '-isystem', '/usr/local/include', '-isystem', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/c++/v1', '-isystem', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include', '-I', '../libwebm' ] # Set this to the absolute path to the folder (NOT the file!) containing the # compile_commands.json file to use that instead of 'flags'. See here for # more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html # # Most projects will NOT need to set this to anything; you can just change the # 'flags' list of compilation flags. Notice that YCM itself uses that approach. compilation_database_folder = '' if os.path.exists( compilation_database_folder ): database = ycm_core.CompilationDatabase( compilation_database_folder ) else: database = None SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ] def DirectoryOfThisScript(): return os.path.dirname( os.path.abspath( __file__ ) ) def MakeRelativePathsInFlagsAbsolute( flags, working_directory ): if not working_directory: return list( flags ) new_flags = [] make_next_absolute = False path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ] for flag in flags: new_flag = flag if make_next_absolute: make_next_absolute = False if not flag.startswith( '/' ): new_flag = os.path.join( working_directory, flag ) for path_flag in path_flags: if flag == path_flag: make_next_absolute = True break if flag.startswith( path_flag ): path = flag[ len( path_flag ): ] new_flag = path_flag + os.path.join( working_directory, path ) break if new_flag: new_flags.append( new_flag ) return new_flags def IsHeaderFile( filename ): extension = os.path.splitext( filename )[ 1 ] return extension in [ '.h', '.hxx', '.hpp', '.hh' ] def GetCompilationInfoForFile( filename ): # The compilation_commands.json file generated by CMake does not have entries # for header files. So we do our best by asking the db for flags for a # corresponding source file, if any. If one exists, the flags for that file # should be good enough. if IsHeaderFile( filename ): basename = os.path.splitext( filename )[ 0 ] for extension in SOURCE_EXTENSIONS: replacement_file = basename + extension if os.path.exists( replacement_file ): compilation_info = database.GetCompilationInfoForFile( replacement_file ) if compilation_info.compiler_flags_: return compilation_info return None return database.GetCompilationInfoForFile( filename ) def FlagsForFile( filename, **kwargs ): if database: # Bear in mind that compilation_info.compiler_flags_ does NOT return a # python list, but a "list-like" StringVec object compilation_info = GetCompilationInfoForFile( filename ) if not compilation_info: return None final_flags = MakeRelativePathsInFlagsAbsolute( compilation_info.compiler_flags_, compilation_info.compiler_working_dir_ ) # NOTE: This is just for YouCompleteMe; it's highly likely that your project # does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR # ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT. try: final_flags.remove( '-stdlib=libc++' ) except ValueError: pass else: relative_to = DirectoryOfThisScript() final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to ) return { 'flags': final_flags, 'do_cache': True }
bsd-3-clause
BrunoCaimar/ArcREST
src/arcrest/_abstract/abstract.py
3
10254
from __future__ import absolute_import import zipfile import datetime import calendar import glob import mimetypes import os from ..packages.six.moves import http_client as httplib from ..web._base import BaseWebOperations ########################################################################### class BaseCMP(BaseWebOperations): """ base community mapping program class""" pass class BaseOpenData(BaseWebOperations): """ base opendata site""" pass ######################################################################## class BaseGeoEnrichment(BaseWebOperations): """ base geoenrichment class """ pass ######################################################################## class BaseBookmark(object): """ base Bookmark class """ pass ######################################################################## class BaseBaseMap(object): """base BaseMap object class""" pass ######################################################################## class BaseWebMap(object): """base webmap object class""" pass ######################################################################## class BaseOperationalLayerObject(object): """ base operational layer object class """ pass ######################################################################## class BaseGPObject(object): """ base geoprocessing object """ _value = None _paramName = None _dataType = None ######################################################################## class BaseDomain(object): """ all domain values inherit this class """ pass ######################################################################## class BaseDefinition(object): """ class that all definition objects inherit from """ pass ######################################################################## class BaseSymbol(object): """ class that all symbol object inherit from """ pass ######################################################################## class BaseRenderer(object): """ all renderers inherit this class """ pass ######################################################################## class BaseParameters(object): """ All parameter objects used for Portal/AGOL """ pass ######################################################################## class BaseSecurityHandler(BaseWebOperations): """ All Security Objects inherit from this class """ _token = None _valid = True _message = "" _is_portal = False #---------------------------------------------------------------------- @property def message(self): """ returns any messages """ return self._message #---------------------------------------------------------------------- @property def valid(self): """ returns boolean wether handler is valid """ return self._valid ######################################################################## class AbstractGeometry(object): """ Base Geometry Class """ pass ######################################################################## class BaseFilter(object): """ base filter class """ pass ######################################################################## class DynamicData(object): """base class for data source""" pass ######################################################################## class DataSource(object): """base class for data source""" pass ######################################################################## class BaseAGSServer(BaseWebOperations): """ base class from which all service inherit """ _url = None _proxy_url = None _proxy_port = None #---------------------------------------------------------------------- @property def proxy_port(self): """gets the proxy port""" return self._proxy_port #---------------------------------------------------------------------- @property def proxy_url(self): """ gets the proxy URL """ return self._proxy_url #---------------------------------------------------------------------- @proxy_url.setter def proxy_url(self, value): """ sets the proxy url """ self._proxy_url = value #---------------------------------------------------------------------- @proxy_port.setter def proxy_port(self, value): """ sets the proxy port """ if isinstance(value, int): self._proxy_port = value #---------------------------------------------------------------------- @property def url(self): return self._url #---------------------------------------------------------------------- @url.setter def url(self, value): self._url = value #---------------------------------------------------------------------- def _tostr(self,obj): """ converts a object to list, if object is a list, it creates a comma seperated string. """ if not obj: return '' if isinstance(obj, list): return ', '.join(map(self._tostr, obj)) return str(obj) #---------------------------------------------------------------------- def _unicode_convert(self, obj): """ converts unicode to anscii """ if isinstance(obj, dict): return {self._unicode_convert(key): self._unicode_convert(value) for key, value in obj.items()} elif isinstance(obj, list): return [self._unicode_convert(element) for element in obj] elif isinstance(obj, unicode): return obj.encode('utf-8') else: return obj # This function is a workaround to deal with what's typically described as a # problem with the web server closing a connection. This is problem # experienced with www.arcgis.com (first encountered 12/13/2012). The problem # and workaround is described here: # http://bobrochel.blogspot.com/2010/11/bad-servers-chunked-encoding-and.html def patch_http_response_read(func): def inner(*args): try: return func(*args) except httplib.IncompleteRead as e: return e return inner httplib.HTTPResponse.read = patch_http_response_read(httplib.HTTPResponse.read) ######################################################################## class BaseAGOLClass(BaseWebOperations): _token = None _org_url ="http://www.arcgis.com" _url = "http://www.arcgis.com/sharing/rest" _surl = "https://www.arcgis.com/sharing/rest" _referer_url = "http://www.arcgis.com" _useragent = "ArcREST" _token_url = 'https://www.arcgis.com/sharing/rest/generateToken' _proxy_url = None _proxy_port = None def initURL(self,org_url=None, token_url=None,referer_url=None): if org_url is not None and org_url != '': if not org_url.startswith('http://') and not org_url.startswith('https://'): org_url = 'http://' + org_url self._org_url = org_url if self._org_url.lower().find('/sharing/rest') > -1: self._url = self._org_url else: self._url = self._org_url + "/sharing/rest" if self._url.startswith('http://'): self._surl = self._url.replace('http://', 'https://') else: self._surl = self._url if token_url is None: self._token_url = self._surl + '/generateToken' else: self._token_url = token_url if referer_url is None: if not self._org_url.startswith('http://'): self._referer_url = self._org_url.replace('http://', 'https://') else: self._referer_url = self._org_url else: self._referer_url = referer_url #---------------------------------------------------------------------- def _unzip_file(self, zip_file, out_folder): """ unzips a file to a given folder """ try: zf = zipfile.ZipFile(zip_file, 'r') zf.extractall(path=out_folder) zf.close() del zf return True except: return False #---------------------------------------------------------------------- def _date_handler(self, obj): if isinstance(obj, datetime.datetime): return calendar.timegm(obj.utctimetuple()) * 1000 else: return obj #---------------------------------------------------------------------- def _list_files(self, path): """lists files in a given directory""" files = [] for f in glob.glob(pathname=path): files.append(f) files.sort() return files #---------------------------------------------------------------------- def _get_content_type(self, filename): """ gets the content type of a file """ mntype = mimetypes.guess_type(filename)[0] filename, fileExtension = os.path.splitext(filename) if mntype is None and\ fileExtension.lower() == ".csv": mntype = "text/csv" elif mntype is None and \ fileExtension.lower() == ".sd": mntype = "File/sd" elif mntype is None: #mntype = 'application/octet-stream' mntype= "File/%s" % fileExtension.replace('.', '') return mntype #---------------------------------------------------------------------- def _tostr(self,obj): """ converts a object to list, if object is a list, it creates a comma seperated string. """ if not obj: return '' if isinstance(obj, list): return ', '.join(map(self._tostr, obj)) return str(obj) #---------------------------------------------------------------------- def _unicode_convert(self, obj): """ converts unicode to anscii """ if isinstance(obj, dict): return {self._unicode_convert(key): self._unicode_convert(value) for key, value in obj.items()} elif isinstance(obj, list): return [self._unicode_convert(element) for element in obj] elif isinstance(obj, unicode): return obj.encode('utf-8') else: return obj
apache-2.0
SebastianDeiss/django-auth-qr2auth
django_auth_qr2auth/core.py
1
10436
# # Copyright (C) 2014-2015 Sebastian Deiss, all rights reserved. # # This file is a part of QR2Auth. # # QR2Auth is free software; you can redistribute it and/or modify it under the # terms of the MIT licensee. For further information see LICENSE.txt in the # parent folder. # # This file contains the QR2Auth core implementation. # from Crypto.Random.random import StrongRandom, Random from Crypto.Hash import HMAC, SHA512 from Crypto.Cipher import AES from django.conf import settings import logging import base64 import string import qrcode.image.svg logger = logging.getLogger(__name__) class QR2AuthCore(object): ''' QR2Auth core implementation. This class provides the core functionality for the challenge response protocol. QR2Auth is a challenge response protocol with symmetric keys. TODO: Review required! ''' def __init__(self, shared_secret=None, enc_key=None): ''' Initialize QR2Auth core :param str shared_secret: The users shared secret. :param str enc_key: The passphrase to decrypt the shared secret. ''' # TODO: make variables private! self.challenge = None self.start = None self.end = None if settings.Q2A_OTP_LENGTH in range(6, 11): self.otp_length = settings.Q2A_OTP_LENGTH else: self.otp_length = 8 # default: 8 digest OTP # encryption key for shared secret self.enc_key = enc_key self.shared_secret = shared_secret def set_shared_secret(self, shared_secret): ''' Setter for shared secret :param str shared_secret: The users shared secret. :rtype: void ''' self.shared_secret = shared_secret def set_challenge(self, challenge): ''' Setter for the challenge :param str challenge: The QR2Auth challenge. :rtype: void ''' self.challenge = challenge def get_challenge(self): ''' Generate a QR2Auth challenge. A QR2Auth challenge consists of 128 random bits generated by PyCrypto with StrongRandom. These random bits are hashed with SHA512. This hash value represents the challenge. :return: A tuple containing the QR2Auth challenge as well as the range of the OTP in the response hash value. :rtype: tuple ''' random_pool = StrongRandom() nonce = random_pool.getrandbits(128) nonce_hash = SHA512.new(str(nonce)).hexdigest() self.start = int(random_pool.randint(0, 128)) ''' Start and end of the range must be between 0 and the length of the hash We use Sha512, so in this case start and end must be between 0 and 128 ''' self.end = self.start + self.otp_length if self.end > len(nonce_hash): self.end = self.end - len(nonce_hash) self.challenge = nonce_hash return self.challenge, self.start, self.end def keygen(self): ''' Generate the secret key for QR2Auth aka shared secret. A QR2Auth secret key consists of 256 random bits generated by PyCrypto with StrongRandom. The random bits are hashed with SHA512. This hash value represents the secret key aka. shared secret. :return: A QRtoAuth shared secret. :rtype: str ''' random_pool = StrongRandom() key_seed = random_pool.getrandbits(256) key = SHA512.new(str(key_seed)) self.shared_secret = key.hexdigest() return self.shared_secret def xor_key(self): ''' Bitwise XOR the shared secret with the QR password. :return: A tuple containing the QR password and the XORed shared secret. :rtype: tuple ''' # The QR password has 4 digits and we need 128 digits for # XOR with the shared secret qrpassword = self.__pwgen() padded_pwd = qrpassword * 32 ''' convert strings to a list of character pair tuples go through each tuple, converting them to ASCII code (ord) perform exclusive or on the ASCII code then convert the result back to ASCII (chr) merge the resulting array of characters as a string ''' xored = ''.join(chr(ord(a) ^ ord(b)) for a, b in zip(padded_pwd, self.shared_secret)) return qrpassword, base64.encodestring(xored) def make_otp(self): ''' Create a QR2Auth one-time password :raise NotImplementedError: The server will never create an OTP, so there is no point to implement that. ''' raise NotImplementedError def verify_response(self, received_otp, start, end): ''' Verify the One time password (OTP) aka response. A QR2Auth Response consists of 8 digests of the HMAC-SHA512 value from the challenge with the shared_secret used as key for the HMAC. :param str received_otp: The submitted one time password from the client. :param str start: Start of the OTP range :param str end: End of the OTP range :return: True if the OTP is valid otherwise False :rtype: bool ''' # decrypt shared secret aes = AESCipher(self.enc_key) _shared_secret = aes.decrypt(self.shared_secret.__str__()) otp_hash = HMAC.new(_shared_secret, self.challenge.__str__(), SHA512) # convert the received otp range from string to int start = int(start) end = int(end) # do some logging logger.debug('CORE Using shared secret: %s' % _shared_secret) logger.debug('CORE Using challenge: %s' % self.challenge) logger.debug('CORE HMAC is: %s' % otp_hash.hexdigest()) logger.debug('CORE OTP range is: (%i, %i)' % (start, end)) if end < start: otp = otp_hash.hexdigest()[start:] otp += otp_hash.hexdigest()[:end] else: otp = otp_hash.hexdigest()[start:end] # and log the OTPs logger.debug('CORE Received OTP: %s' % received_otp) logger.debug('CORE Computed OTP: %s' % otp) if otp == received_otp: return True return False def qrgen(self, is_key=False, key=''): ''' Generate an SVG image containing the QR code :param bool is_key: True if a shared secret QR code is created otherwise False :param str key: The user's shared secret :return: An image object containing the SVG image :rtype: Object ''' qrfactory = qrcode.image.svg.SvgImage # generate the QR code if is_key is True: ''' Add a prefix so that another application can distinguish the key from the challenge. ''' qr_content = '{key}' qr_content += key # test vectors key_hmac = HMAC.new(self.shared_secret, self.shared_secret, SHA512) qr_content += ',' qr_content += key_hmac.hexdigest() # make the QR code qrimg = qrcode.make(qr_content, image_factory=qrfactory) else: qrimg = qrcode.make('{' + str(self.start) + ',' + str(self.end) + '}' + self.challenge, image_factory=qrfactory) return qrimg # # Internals # def __pwgen(self, size=4, chars=string.digits+string.ascii_lowercase): ''' Generate a password. This password is used as the QR password for the bitwise XOR of the shared secret. :param str size: The length of the password :param str chars: The characters the password should contain. In this case we want digits and ASCII lowercase letters. :return: A generated QR password :rtype: string ''' return ''.join(Random.random.choice(chars) for _ in range(size)) class AESCipher(object): ''' AES cipher for QR2Auth. AES is used to store the users shared_secret encrypted in the database. TODO: Review required! ''' def __init__(self, passphrase): ''' Initialize AESCipher :param str passphrase: The passphrase to generate an AES key. ''' self.passphrase = passphrase self.mode = AES.MODE_CBC self.iv = None self.block_size = 32 self.padding = '@' # use the Sha512 hash of the passphrase as AES key # TODO: Check if this key generation is strong enough self.key = SHA512.new(self.passphrase).digest()[:self.block_size] def encrypt(self, plaintext): ''' Encrypt a string with AES :param str plaintext: The text to encrypt :return: Returns the base64 encoded initialization vector + cipher text of the given plaintext. :rtype: str ''' # Initialization vector must be 16 bytes long self.iv = Random.new().read(self.block_size)[:16] aes = AES.new(self.key, self.mode, self.iv) return base64.b64encode(self.iv + aes.encrypt(self._pad(plaintext))) def decrypt(self, ciphertext): ''' Decrypt a string encrypted with AES :param str ciphertext: The base64 encoded initialization vector + cipher text. :return: Returns the plaintext of the given cipher text. :rtype: str ''' self.iv = base64.b64decode(ciphertext)[:16] ciphertext = base64.b64decode(ciphertext)[16:] aes = AES.new(self.key, AES.MODE_CBC, self.iv) return aes.decrypt(ciphertext).rstrip(self.padding) # # Internals # def _pad(self, s): ''' Pad the text before encryption, because the length of the text must be a multiple of the block size. :param str s: The string to add a padding. :return: Returns the given string + padding. :rtype: str ''' return s + (self.block_size - len(s) % self.block_size) * self.padding
mit
amyvmiwei/kbengine
kbe/res/scripts/common/Lib/idlelib/PyShell.py
6
58202
#! /usr/bin/env python3 import getopt import os import os.path import re import socket import subprocess import sys import threading import time import tokenize import traceback import types import io import linecache from code import InteractiveInterpreter from platform import python_version, system try: from tkinter import * except ImportError: print("** IDLE can't import Tkinter. " \ "Your Python may not be configured for Tk. **", file=sys.__stderr__) sys.exit(1) import tkinter.messagebox as tkMessageBox from idlelib.EditorWindow import EditorWindow, fixwordbreaks from idlelib.FileList import FileList from idlelib.ColorDelegator import ColorDelegator from idlelib.UndoDelegator import UndoDelegator from idlelib.OutputWindow import OutputWindow from idlelib.configHandler import idleConf from idlelib import idlever from idlelib import rpc from idlelib import Debugger from idlelib import RemoteDebugger from idlelib import macosxSupport HOST = '127.0.0.1' # python execution server on localhost loopback PORT = 0 # someday pass in host, port for remote debug capability # Override warnings module to write to warning_stream. Initialize to send IDLE # internal warnings to the console. ScriptBinding.check_syntax() will # temporarily redirect the stream to the shell window to display warnings when # checking user's code. warning_stream = sys.__stderr__ # None, at least on Windows, if no console. import warnings def idle_formatwarning(message, category, filename, lineno, line=None): """Format warnings the IDLE way.""" s = "\nWarning (from warnings module):\n" s += ' File \"%s\", line %s\n' % (filename, lineno) if line is None: line = linecache.getline(filename, lineno) line = line.strip() if line: s += " %s\n" % line s += "%s: %s\n" % (category.__name__, message) return s def idle_showwarning( message, category, filename, lineno, file=None, line=None): """Show Idle-format warning (after replacing warnings.showwarning). The differences are the formatter called, the file=None replacement, which can be None, the capture of the consequence AttributeError, and the output of a hard-coded prompt. """ if file is None: file = warning_stream try: file.write(idle_formatwarning( message, category, filename, lineno, line=line)) file.write(">>> ") except (AttributeError, OSError): pass # if file (probably __stderr__) is invalid, skip warning. _warnings_showwarning = None def capture_warnings(capture): "Replace warning.showwarning with idle_showwarning, or reverse." global _warnings_showwarning if capture: if _warnings_showwarning is None: _warnings_showwarning = warnings.showwarning warnings.showwarning = idle_showwarning else: if _warnings_showwarning is not None: warnings.showwarning = _warnings_showwarning _warnings_showwarning = None capture_warnings(True) def extended_linecache_checkcache(filename=None, orig_checkcache=linecache.checkcache): """Extend linecache.checkcache to preserve the <pyshell#...> entries Rather than repeating the linecache code, patch it to save the <pyshell#...> entries, call the original linecache.checkcache() (skipping them), and then restore the saved entries. orig_checkcache is bound at definition time to the original method, allowing it to be patched. """ cache = linecache.cache save = {} for key in list(cache): if key[:1] + key[-1:] == '<>': save[key] = cache.pop(key) orig_checkcache(filename) cache.update(save) # Patch linecache.checkcache(): linecache.checkcache = extended_linecache_checkcache class PyShellEditorWindow(EditorWindow): "Regular text edit window in IDLE, supports breakpoints" def __init__(self, *args): self.breakpoints = [] EditorWindow.__init__(self, *args) self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here) self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here) self.text.bind("<<open-python-shell>>", self.flist.open_shell) self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(), 'breakpoints.lst') # whenever a file is changed, restore breakpoints def filename_changed_hook(old_hook=self.io.filename_change_hook, self=self): self.restore_file_breaks() old_hook() self.io.set_filename_change_hook(filename_changed_hook) if self.io.filename: self.restore_file_breaks() rmenu_specs = [ ("Cut", "<<cut>>", "rmenu_check_cut"), ("Copy", "<<copy>>", "rmenu_check_copy"), ("Paste", "<<paste>>", "rmenu_check_paste"), (None, None, None), ("Set Breakpoint", "<<set-breakpoint-here>>", None), ("Clear Breakpoint", "<<clear-breakpoint-here>>", None) ] def set_breakpoint(self, lineno): text = self.text filename = self.io.filename text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1)) try: i = self.breakpoints.index(lineno) except ValueError: # only add if missing, i.e. do once self.breakpoints.append(lineno) try: # update the subprocess debugger debug = self.flist.pyshell.interp.debugger debug.set_breakpoint_here(filename, lineno) except: # but debugger may not be active right now.... pass def set_breakpoint_here(self, event=None): text = self.text filename = self.io.filename if not filename: text.bell() return lineno = int(float(text.index("insert"))) self.set_breakpoint(lineno) def clear_breakpoint_here(self, event=None): text = self.text filename = self.io.filename if not filename: text.bell() return lineno = int(float(text.index("insert"))) try: self.breakpoints.remove(lineno) except: pass text.tag_remove("BREAK", "insert linestart",\ "insert lineend +1char") try: debug = self.flist.pyshell.interp.debugger debug.clear_breakpoint_here(filename, lineno) except: pass def clear_file_breaks(self): if self.breakpoints: text = self.text filename = self.io.filename if not filename: text.bell() return self.breakpoints = [] text.tag_remove("BREAK", "1.0", END) try: debug = self.flist.pyshell.interp.debugger debug.clear_file_breaks(filename) except: pass def store_file_breaks(self): "Save breakpoints when file is saved" # XXX 13 Dec 2002 KBK Currently the file must be saved before it can # be run. The breaks are saved at that time. If we introduce # a temporary file save feature the save breaks functionality # needs to be re-verified, since the breaks at the time the # temp file is created may differ from the breaks at the last # permanent save of the file. Currently, a break introduced # after a save will be effective, but not persistent. # This is necessary to keep the saved breaks synched with the # saved file. # # Breakpoints are set as tagged ranges in the text. Certain # kinds of edits cause these ranges to be deleted: Inserting # or deleting a line just before a breakpoint, and certain # deletions prior to a breakpoint. These issues need to be # investigated and understood. It's not clear if they are # Tk issues or IDLE issues, or whether they can actually # be fixed. Since a modified file has to be saved before it is # run, and since self.breakpoints (from which the subprocess # debugger is loaded) is updated during the save, the visible # breaks stay synched with the subprocess even if one of these # unexpected breakpoint deletions occurs. breaks = self.breakpoints filename = self.io.filename try: with open(self.breakpointPath, "r") as fp: lines = fp.readlines() except OSError: lines = [] try: with open(self.breakpointPath, "w") as new_file: for line in lines: if not line.startswith(filename + '='): new_file.write(line) self.update_breakpoints() breaks = self.breakpoints if breaks: new_file.write(filename + '=' + str(breaks) + '\n') except OSError as err: if not getattr(self.root, "breakpoint_error_displayed", False): self.root.breakpoint_error_displayed = True tkMessageBox.showerror(title='IDLE Error', message='Unable to update breakpoint list:\n%s' % str(err), parent=self.text) def restore_file_breaks(self): self.text.update() # this enables setting "BREAK" tags to be visible if self.io is None: # can happen if IDLE closes due to the .update() call return filename = self.io.filename if filename is None: return if os.path.isfile(self.breakpointPath): with open(self.breakpointPath, "r") as fp: lines = fp.readlines() for line in lines: if line.startswith(filename + '='): breakpoint_linenumbers = eval(line[len(filename)+1:]) for breakpoint_linenumber in breakpoint_linenumbers: self.set_breakpoint(breakpoint_linenumber) def update_breakpoints(self): "Retrieves all the breakpoints in the current window" text = self.text ranges = text.tag_ranges("BREAK") linenumber_list = self.ranges_to_linenumbers(ranges) self.breakpoints = linenumber_list def ranges_to_linenumbers(self, ranges): lines = [] for index in range(0, len(ranges), 2): lineno = int(float(ranges[index].string)) end = int(float(ranges[index+1].string)) while lineno < end: lines.append(lineno) lineno += 1 return lines # XXX 13 Dec 2002 KBK Not used currently # def saved_change_hook(self): # "Extend base method - clear breaks if module is modified" # if not self.get_saved(): # self.clear_file_breaks() # EditorWindow.saved_change_hook(self) def _close(self): "Extend base method - clear breaks when module is closed" self.clear_file_breaks() EditorWindow._close(self) class PyShellFileList(FileList): "Extend base class: IDLE supports a shell and breakpoints" # override FileList's class variable, instances return PyShellEditorWindow # instead of EditorWindow when new edit windows are created. EditorWindow = PyShellEditorWindow pyshell = None def open_shell(self, event=None): if self.pyshell: self.pyshell.top.wakeup() else: self.pyshell = PyShell(self) if self.pyshell: if not self.pyshell.begin(): return None return self.pyshell class ModifiedColorDelegator(ColorDelegator): "Extend base class: colorizer for the shell window itself" def __init__(self): ColorDelegator.__init__(self) self.LoadTagDefs() def recolorize_main(self): self.tag_remove("TODO", "1.0", "iomark") self.tag_add("SYNC", "1.0", "iomark") ColorDelegator.recolorize_main(self) def LoadTagDefs(self): ColorDelegator.LoadTagDefs(self) theme = idleConf.GetOption('main','Theme','name') self.tagdefs.update({ "stdin": {'background':None,'foreground':None}, "stdout": idleConf.GetHighlight(theme, "stdout"), "stderr": idleConf.GetHighlight(theme, "stderr"), "console": idleConf.GetHighlight(theme, "console"), }) def removecolors(self): # Don't remove shell color tags before "iomark" for tag in self.tagdefs: self.tag_remove(tag, "iomark", "end") class ModifiedUndoDelegator(UndoDelegator): "Extend base class: forbid insert/delete before the I/O mark" def insert(self, index, chars, tags=None): try: if self.delegate.compare(index, "<", "iomark"): self.delegate.bell() return except TclError: pass UndoDelegator.insert(self, index, chars, tags) def delete(self, index1, index2=None): try: if self.delegate.compare(index1, "<", "iomark"): self.delegate.bell() return except TclError: pass UndoDelegator.delete(self, index1, index2) class MyRPCClient(rpc.RPCClient): def handle_EOF(self): "Override the base class - just re-raise EOFError" raise EOFError class ModifiedInterpreter(InteractiveInterpreter): def __init__(self, tkconsole): self.tkconsole = tkconsole locals = sys.modules['__main__'].__dict__ InteractiveInterpreter.__init__(self, locals=locals) self.save_warnings_filters = None self.restarting = False self.subprocess_arglist = None self.port = PORT self.original_compiler_flags = self.compile.compiler.flags _afterid = None rpcclt = None rpcsubproc = None def spawn_subprocess(self): if self.subprocess_arglist is None: self.subprocess_arglist = self.build_subprocess_arglist() self.rpcsubproc = subprocess.Popen(self.subprocess_arglist) def build_subprocess_arglist(self): assert (self.port!=0), ( "Socket should have been assigned a port number.") w = ['-W' + s for s in sys.warnoptions] # Maybe IDLE is installed and is being accessed via sys.path, # or maybe it's not installed and the idle.py script is being # run from the IDLE source directory. del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc', default=False, type='bool') if __name__ == 'idlelib.PyShell': command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,) else: command = "__import__('run').main(%r)" % (del_exitf,) return [sys.executable] + w + ["-c", command, str(self.port)] def start_subprocess(self): addr = (HOST, self.port) # GUI makes several attempts to acquire socket, listens for connection for i in range(3): time.sleep(i) try: self.rpcclt = MyRPCClient(addr) break except OSError as err: pass else: self.display_port_binding_error() return None # if PORT was 0, system will assign an 'ephemeral' port. Find it out: self.port = self.rpcclt.listening_sock.getsockname()[1] # if PORT was not 0, probably working with a remote execution server if PORT != 0: # To allow reconnection within the 2MSL wait (cf. Stevens TCP # V1, 18.6), set SO_REUSEADDR. Note that this can be problematic # on Windows since the implementation allows two active sockets on # the same address! self.rpcclt.listening_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.spawn_subprocess() #time.sleep(20) # test to simulate GUI not accepting connection # Accept the connection from the Python execution server self.rpcclt.listening_sock.settimeout(10) try: self.rpcclt.accept() except socket.timeout as err: self.display_no_subprocess_error() return None self.rpcclt.register("console", self.tkconsole) self.rpcclt.register("stdin", self.tkconsole.stdin) self.rpcclt.register("stdout", self.tkconsole.stdout) self.rpcclt.register("stderr", self.tkconsole.stderr) self.rpcclt.register("flist", self.tkconsole.flist) self.rpcclt.register("linecache", linecache) self.rpcclt.register("interp", self) self.transfer_path(with_cwd=True) self.poll_subprocess() return self.rpcclt def restart_subprocess(self, with_cwd=False): if self.restarting: return self.rpcclt self.restarting = True # close only the subprocess debugger debug = self.getdebugger() if debug: try: # Only close subprocess debugger, don't unregister gui_adap! RemoteDebugger.close_subprocess_debugger(self.rpcclt) except: pass # Kill subprocess, spawn a new one, accept connection. self.rpcclt.close() self.terminate_subprocess() console = self.tkconsole was_executing = console.executing console.executing = False self.spawn_subprocess() try: self.rpcclt.accept() except socket.timeout as err: self.display_no_subprocess_error() return None self.transfer_path(with_cwd=with_cwd) console.stop_readline() # annotate restart in shell window and mark it console.text.delete("iomark", "end-1c") if was_executing: console.write('\n') console.showprompt() halfbar = ((int(console.width) - 16) // 2) * '=' console.write(halfbar + ' RESTART ' + halfbar) console.text.mark_set("restart", "end-1c") console.text.mark_gravity("restart", "left") console.showprompt() # restart subprocess debugger if debug: # Restarted debugger connects to current instance of debug GUI gui = RemoteDebugger.restart_subprocess_debugger(self.rpcclt) # reload remote debugger breakpoints for all PyShellEditWindows debug.load_breakpoints() self.compile.compiler.flags = self.original_compiler_flags self.restarting = False return self.rpcclt def __request_interrupt(self): self.rpcclt.remotecall("exec", "interrupt_the_server", (), {}) def interrupt_subprocess(self): threading.Thread(target=self.__request_interrupt).start() def kill_subprocess(self): if self._afterid is not None: self.tkconsole.text.after_cancel(self._afterid) try: self.rpcclt.listening_sock.close() except AttributeError: # no socket pass try: self.rpcclt.close() except AttributeError: # no socket pass self.terminate_subprocess() self.tkconsole.executing = False self.rpcclt = None def terminate_subprocess(self): "Make sure subprocess is terminated" try: self.rpcsubproc.kill() except OSError: # process already terminated return else: try: self.rpcsubproc.wait() except OSError: return def transfer_path(self, with_cwd=False): if with_cwd: # Issue 13506 path = [''] # include Current Working Directory path.extend(sys.path) else: path = sys.path self.runcommand("""if 1: import sys as _sys _sys.path = %r del _sys \n""" % (path,)) active_seq = None def poll_subprocess(self): clt = self.rpcclt if clt is None: return try: response = clt.pollresponse(self.active_seq, wait=0.05) except (EOFError, OSError, KeyboardInterrupt): # lost connection or subprocess terminated itself, restart # [the KBI is from rpc.SocketIO.handle_EOF()] if self.tkconsole.closing: return response = None self.restart_subprocess() if response: self.tkconsole.resetoutput() self.active_seq = None how, what = response console = self.tkconsole.console if how == "OK": if what is not None: print(repr(what), file=console) elif how == "EXCEPTION": if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"): self.remote_stack_viewer() elif how == "ERROR": errmsg = "PyShell.ModifiedInterpreter: Subprocess ERROR:\n" print(errmsg, what, file=sys.__stderr__) print(errmsg, what, file=console) # we received a response to the currently active seq number: try: self.tkconsole.endexecuting() except AttributeError: # shell may have closed pass # Reschedule myself if not self.tkconsole.closing: self._afterid = self.tkconsole.text.after( self.tkconsole.pollinterval, self.poll_subprocess) debugger = None def setdebugger(self, debugger): self.debugger = debugger def getdebugger(self): return self.debugger def open_remote_stack_viewer(self): """Initiate the remote stack viewer from a separate thread. This method is called from the subprocess, and by returning from this method we allow the subprocess to unblock. After a bit the shell requests the subprocess to open the remote stack viewer which returns a static object looking at the last exception. It is queried through the RPC mechanism. """ self.tkconsole.text.after(300, self.remote_stack_viewer) return def remote_stack_viewer(self): from idlelib import RemoteObjectBrowser oid = self.rpcclt.remotequeue("exec", "stackviewer", ("flist",), {}) if oid is None: self.tkconsole.root.bell() return item = RemoteObjectBrowser.StubObjectTreeItem(self.rpcclt, oid) from idlelib.TreeWidget import ScrolledCanvas, TreeNode top = Toplevel(self.tkconsole.root) theme = idleConf.GetOption('main','Theme','name') background = idleConf.GetHighlight(theme, 'normal')['background'] sc = ScrolledCanvas(top, bg=background, highlightthickness=0) sc.frame.pack(expand=1, fill="both") node = TreeNode(sc.canvas, None, item) node.expand() # XXX Should GC the remote tree when closing the window gid = 0 def execsource(self, source): "Like runsource() but assumes complete exec source" filename = self.stuffsource(source) self.execfile(filename, source) def execfile(self, filename, source=None): "Execute an existing file" if source is None: with tokenize.open(filename) as fp: source = fp.read() try: code = compile(source, filename, "exec") except (OverflowError, SyntaxError): self.tkconsole.resetoutput() tkerr = self.tkconsole.stderr print('*** Error in script or command!\n', file=tkerr) print('Traceback (most recent call last):', file=tkerr) InteractiveInterpreter.showsyntaxerror(self, filename) self.tkconsole.showprompt() else: self.runcode(code) def runsource(self, source): "Extend base class method: Stuff the source in the line cache first" filename = self.stuffsource(source) self.more = 0 self.save_warnings_filters = warnings.filters[:] warnings.filterwarnings(action="error", category=SyntaxWarning) # at the moment, InteractiveInterpreter expects str assert isinstance(source, str) #if isinstance(source, str): # from idlelib import IOBinding # try: # source = source.encode(IOBinding.encoding) # except UnicodeError: # self.tkconsole.resetoutput() # self.write("Unsupported characters in input\n") # return try: # InteractiveInterpreter.runsource() calls its runcode() method, # which is overridden (see below) return InteractiveInterpreter.runsource(self, source, filename) finally: if self.save_warnings_filters is not None: warnings.filters[:] = self.save_warnings_filters self.save_warnings_filters = None def stuffsource(self, source): "Stuff source in the filename cache" filename = "<pyshell#%d>" % self.gid self.gid = self.gid + 1 lines = source.split("\n") linecache.cache[filename] = len(source)+1, 0, lines, filename return filename def prepend_syspath(self, filename): "Prepend sys.path with file's directory if not already included" self.runcommand("""if 1: _filename = %r import sys as _sys from os.path import dirname as _dirname _dir = _dirname(_filename) if not _dir in _sys.path: _sys.path.insert(0, _dir) del _filename, _sys, _dirname, _dir \n""" % (filename,)) def showsyntaxerror(self, filename=None): """Override Interactive Interpreter method: Use Colorizing Color the offending position instead of printing it and pointing at it with a caret. """ tkconsole = self.tkconsole text = tkconsole.text text.tag_remove("ERROR", "1.0", "end") type, value, tb = sys.exc_info() msg = getattr(value, 'msg', '') or value or "<no detail available>" lineno = getattr(value, 'lineno', '') or 1 offset = getattr(value, 'offset', '') or 0 if offset == 0: lineno += 1 #mark end of offending line if lineno == 1: pos = "iomark + %d chars" % (offset-1) else: pos = "iomark linestart + %d lines + %d chars" % \ (lineno-1, offset-1) tkconsole.colorize_syntax_error(text, pos) tkconsole.resetoutput() self.write("SyntaxError: %s\n" % msg) tkconsole.showprompt() def showtraceback(self): "Extend base class method to reset output properly" self.tkconsole.resetoutput() self.checklinecache() InteractiveInterpreter.showtraceback(self) if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"): self.tkconsole.open_stack_viewer() def checklinecache(self): c = linecache.cache for key in list(c.keys()): if key[:1] + key[-1:] != "<>": del c[key] def runcommand(self, code): "Run the code without invoking the debugger" # The code better not raise an exception! if self.tkconsole.executing: self.display_executing_dialog() return 0 if self.rpcclt: self.rpcclt.remotequeue("exec", "runcode", (code,), {}) else: exec(code, self.locals) return 1 def runcode(self, code): "Override base class method" if self.tkconsole.executing: self.interp.restart_subprocess() self.checklinecache() if self.save_warnings_filters is not None: warnings.filters[:] = self.save_warnings_filters self.save_warnings_filters = None debugger = self.debugger try: self.tkconsole.beginexecuting() if not debugger and self.rpcclt is not None: self.active_seq = self.rpcclt.asyncqueue("exec", "runcode", (code,), {}) elif debugger: debugger.run(code, self.locals) else: exec(code, self.locals) except SystemExit: if not self.tkconsole.closing: if tkMessageBox.askyesno( "Exit?", "Do you want to exit altogether?", default="yes", master=self.tkconsole.text): raise else: self.showtraceback() else: raise except: if use_subprocess: print("IDLE internal error in runcode()", file=self.tkconsole.stderr) self.showtraceback() self.tkconsole.endexecuting() else: if self.tkconsole.canceled: self.tkconsole.canceled = False print("KeyboardInterrupt", file=self.tkconsole.stderr) else: self.showtraceback() finally: if not use_subprocess: try: self.tkconsole.endexecuting() except AttributeError: # shell may have closed pass def write(self, s): "Override base class method" return self.tkconsole.stderr.write(s) def display_port_binding_error(self): tkMessageBox.showerror( "Port Binding Error", "IDLE can't bind to a TCP/IP port, which is necessary to " "communicate with its Python execution server. This might be " "because no networking is installed on this computer. " "Run IDLE with the -n command line switch to start without a " "subprocess and refer to Help/IDLE Help 'Running without a " "subprocess' for further details.", master=self.tkconsole.text) def display_no_subprocess_error(self): tkMessageBox.showerror( "Subprocess Startup Error", "IDLE's subprocess didn't make connection. Either IDLE can't " "start a subprocess or personal firewall software is blocking " "the connection.", master=self.tkconsole.text) def display_executing_dialog(self): tkMessageBox.showerror( "Already executing", "The Python Shell window is already executing a command; " "please wait until it is finished.", master=self.tkconsole.text) class PyShell(OutputWindow): shell_title = "Python " + python_version() + " Shell" # Override classes ColorDelegator = ModifiedColorDelegator UndoDelegator = ModifiedUndoDelegator # Override menus menu_specs = [ ("file", "_File"), ("edit", "_Edit"), ("debug", "_Debug"), ("options", "_Options"), ("windows", "_Windows"), ("help", "_Help"), ] if sys.platform == "darwin": menu_specs[-2] = ("windows", "_Window") # New classes from idlelib.IdleHistory import History def __init__(self, flist=None): if use_subprocess: ms = self.menu_specs if ms[2][0] != "shell": ms.insert(2, ("shell", "She_ll")) self.interp = ModifiedInterpreter(self) if flist is None: root = Tk() fixwordbreaks(root) root.withdraw() flist = PyShellFileList(root) # OutputWindow.__init__(self, flist, None, None) # ## self.config(usetabs=1, indentwidth=8, context_use_ps1=1) self.usetabs = True # indentwidth must be 8 when using tabs. See note in EditorWindow: self.indentwidth = 8 self.context_use_ps1 = True # text = self.text text.configure(wrap="char") text.bind("<<newline-and-indent>>", self.enter_callback) text.bind("<<plain-newline-and-indent>>", self.linefeed_callback) text.bind("<<interrupt-execution>>", self.cancel_callback) text.bind("<<end-of-file>>", self.eof_callback) text.bind("<<open-stack-viewer>>", self.open_stack_viewer) text.bind("<<toggle-debugger>>", self.toggle_debugger) text.bind("<<toggle-jit-stack-viewer>>", self.toggle_jit_stack_viewer) if use_subprocess: text.bind("<<view-restart>>", self.view_restart_mark) text.bind("<<restart-shell>>", self.restart_shell) # self.save_stdout = sys.stdout self.save_stderr = sys.stderr self.save_stdin = sys.stdin from idlelib import IOBinding self.stdin = PseudoInputFile(self, "stdin", IOBinding.encoding) self.stdout = PseudoOutputFile(self, "stdout", IOBinding.encoding) self.stderr = PseudoOutputFile(self, "stderr", IOBinding.encoding) self.console = PseudoOutputFile(self, "console", IOBinding.encoding) if not use_subprocess: sys.stdout = self.stdout sys.stderr = self.stderr sys.stdin = self.stdin try: # page help() text to shell. import pydoc # import must be done here to capture i/o rebinding. # XXX KBK 27Dec07 use a textView someday, but must work w/o subproc pydoc.pager = pydoc.plainpager except: sys.stderr = sys.__stderr__ raise # self.history = self.History(self.text) # self.pollinterval = 50 # millisec def get_standard_extension_names(self): return idleConf.GetExtensions(shell_only=True) reading = False executing = False canceled = False endoffile = False closing = False _stop_readline_flag = False def set_warning_stream(self, stream): global warning_stream warning_stream = stream def get_warning_stream(self): return warning_stream def toggle_debugger(self, event=None): if self.executing: tkMessageBox.showerror("Don't debug now", "You can only toggle the debugger when idle", master=self.text) self.set_debugger_indicator() return "break" else: db = self.interp.getdebugger() if db: self.close_debugger() else: self.open_debugger() def set_debugger_indicator(self): db = self.interp.getdebugger() self.setvar("<<toggle-debugger>>", not not db) def toggle_jit_stack_viewer(self, event=None): pass # All we need is the variable def close_debugger(self): db = self.interp.getdebugger() if db: self.interp.setdebugger(None) db.close() if self.interp.rpcclt: RemoteDebugger.close_remote_debugger(self.interp.rpcclt) self.resetoutput() self.console.write("[DEBUG OFF]\n") sys.ps1 = ">>> " self.showprompt() self.set_debugger_indicator() def open_debugger(self): if self.interp.rpcclt: dbg_gui = RemoteDebugger.start_remote_debugger(self.interp.rpcclt, self) else: dbg_gui = Debugger.Debugger(self) self.interp.setdebugger(dbg_gui) dbg_gui.load_breakpoints() sys.ps1 = "[DEBUG ON]\n>>> " self.showprompt() self.set_debugger_indicator() def beginexecuting(self): "Helper for ModifiedInterpreter" self.resetoutput() self.executing = 1 def endexecuting(self): "Helper for ModifiedInterpreter" self.executing = 0 self.canceled = 0 self.showprompt() def close(self): "Extend EditorWindow.close()" if self.executing: response = tkMessageBox.askokcancel( "Kill?", "The program is still running!\n Do you want to kill it?", default="ok", parent=self.text) if response is False: return "cancel" self.stop_readline() self.canceled = True self.closing = True return EditorWindow.close(self) def _close(self): "Extend EditorWindow._close(), shut down debugger and execution server" self.close_debugger() if use_subprocess: self.interp.kill_subprocess() # Restore std streams sys.stdout = self.save_stdout sys.stderr = self.save_stderr sys.stdin = self.save_stdin # Break cycles self.interp = None self.console = None self.flist.pyshell = None self.history = None EditorWindow._close(self) def ispythonsource(self, filename): "Override EditorWindow method: never remove the colorizer" return True def short_title(self): return self.shell_title COPYRIGHT = \ 'Type "copyright", "credits" or "license()" for more information.' def begin(self): self.text.mark_set("iomark", "insert") self.resetoutput() if use_subprocess: nosub = '' client = self.interp.start_subprocess() if not client: self.close() return False else: nosub = ("==== No Subprocess ====\n\n" + "WARNING: Running IDLE without a Subprocess is deprecated\n" + "and will be removed in a later version. See Help/IDLE Help\n" + "for details.\n\n") sys.displayhook = rpc.displayhook self.write("Python %s on %s\n%s\n%s" % (sys.version, sys.platform, self.COPYRIGHT, nosub)) self.showprompt() import tkinter tkinter._default_root = None # 03Jan04 KBK What's this? return True def stop_readline(self): if not self.reading: # no nested mainloop to exit. return self._stop_readline_flag = True self.top.quit() def readline(self): save = self.reading try: self.reading = 1 self.top.mainloop() # nested mainloop() finally: self.reading = save if self._stop_readline_flag: self._stop_readline_flag = False return "" line = self.text.get("iomark", "end-1c") if len(line) == 0: # may be EOF if we quit our mainloop with Ctrl-C line = "\n" self.resetoutput() if self.canceled: self.canceled = 0 if not use_subprocess: raise KeyboardInterrupt if self.endoffile: self.endoffile = 0 line = "" return line def isatty(self): return True def cancel_callback(self, event=None): try: if self.text.compare("sel.first", "!=", "sel.last"): return # Active selection -- always use default binding except: pass if not (self.executing or self.reading): self.resetoutput() self.interp.write("KeyboardInterrupt\n") self.showprompt() return "break" self.endoffile = 0 self.canceled = 1 if (self.executing and self.interp.rpcclt): if self.interp.getdebugger(): self.interp.restart_subprocess() else: self.interp.interrupt_subprocess() if self.reading: self.top.quit() # exit the nested mainloop() in readline() return "break" def eof_callback(self, event): if self.executing and not self.reading: return # Let the default binding (delete next char) take over if not (self.text.compare("iomark", "==", "insert") and self.text.compare("insert", "==", "end-1c")): return # Let the default binding (delete next char) take over if not self.executing: self.resetoutput() self.close() else: self.canceled = 0 self.endoffile = 1 self.top.quit() return "break" def linefeed_callback(self, event): # Insert a linefeed without entering anything (still autoindented) if self.reading: self.text.insert("insert", "\n") self.text.see("insert") else: self.newline_and_indent_event(event) return "break" def enter_callback(self, event): if self.executing and not self.reading: return # Let the default binding (insert '\n') take over # If some text is selected, recall the selection # (but only if this before the I/O mark) try: sel = self.text.get("sel.first", "sel.last") if sel: if self.text.compare("sel.last", "<=", "iomark"): self.recall(sel, event) return "break" except: pass # If we're strictly before the line containing iomark, recall # the current line, less a leading prompt, less leading or # trailing whitespace if self.text.compare("insert", "<", "iomark linestart"): # Check if there's a relevant stdin range -- if so, use it prev = self.text.tag_prevrange("stdin", "insert") if prev and self.text.compare("insert", "<", prev[1]): self.recall(self.text.get(prev[0], prev[1]), event) return "break" next = self.text.tag_nextrange("stdin", "insert") if next and self.text.compare("insert lineend", ">=", next[0]): self.recall(self.text.get(next[0], next[1]), event) return "break" # No stdin mark -- just get the current line, less any prompt indices = self.text.tag_nextrange("console", "insert linestart") if indices and \ self.text.compare(indices[0], "<=", "insert linestart"): self.recall(self.text.get(indices[1], "insert lineend"), event) else: self.recall(self.text.get("insert linestart", "insert lineend"), event) return "break" # If we're between the beginning of the line and the iomark, i.e. # in the prompt area, move to the end of the prompt if self.text.compare("insert", "<", "iomark"): self.text.mark_set("insert", "iomark") # If we're in the current input and there's only whitespace # beyond the cursor, erase that whitespace first s = self.text.get("insert", "end-1c") if s and not s.strip(): self.text.delete("insert", "end-1c") # If we're in the current input before its last line, # insert a newline right at the insert point if self.text.compare("insert", "<", "end-1c linestart"): self.newline_and_indent_event(event) return "break" # We're in the last line; append a newline and submit it self.text.mark_set("insert", "end-1c") if self.reading: self.text.insert("insert", "\n") self.text.see("insert") else: self.newline_and_indent_event(event) self.text.tag_add("stdin", "iomark", "end-1c") self.text.update_idletasks() if self.reading: self.top.quit() # Break out of recursive mainloop() else: self.runit() return "break" def recall(self, s, event): # remove leading and trailing empty or whitespace lines s = re.sub(r'^\s*\n', '' , s) s = re.sub(r'\n\s*$', '', s) lines = s.split('\n') self.text.undo_block_start() try: self.text.tag_remove("sel", "1.0", "end") self.text.mark_set("insert", "end-1c") prefix = self.text.get("insert linestart", "insert") if prefix.rstrip().endswith(':'): self.newline_and_indent_event(event) prefix = self.text.get("insert linestart", "insert") self.text.insert("insert", lines[0].strip()) if len(lines) > 1: orig_base_indent = re.search(r'^([ \t]*)', lines[0]).group(0) new_base_indent = re.search(r'^([ \t]*)', prefix).group(0) for line in lines[1:]: if line.startswith(orig_base_indent): # replace orig base indentation with new indentation line = new_base_indent + line[len(orig_base_indent):] self.text.insert('insert', '\n'+line.rstrip()) finally: self.text.see("insert") self.text.undo_block_stop() def runit(self): line = self.text.get("iomark", "end-1c") # Strip off last newline and surrounding whitespace. # (To allow you to hit return twice to end a statement.) i = len(line) while i > 0 and line[i-1] in " \t": i = i-1 if i > 0 and line[i-1] == "\n": i = i-1 while i > 0 and line[i-1] in " \t": i = i-1 line = line[:i] more = self.interp.runsource(line) def open_stack_viewer(self, event=None): if self.interp.rpcclt: return self.interp.remote_stack_viewer() try: sys.last_traceback except: tkMessageBox.showerror("No stack trace", "There is no stack trace yet.\n" "(sys.last_traceback is not defined)", master=self.text) return from idlelib.StackViewer import StackBrowser sv = StackBrowser(self.root, self.flist) def view_restart_mark(self, event=None): self.text.see("iomark") self.text.see("restart") def restart_shell(self, event=None): "Callback for Run/Restart Shell Cntl-F6" self.interp.restart_subprocess(with_cwd=True) def showprompt(self): self.resetoutput() try: s = str(sys.ps1) except: s = "" self.console.write(s) self.text.mark_set("insert", "end-1c") self.set_line_and_column() self.io.reset_undo() def resetoutput(self): source = self.text.get("iomark", "end-1c") if self.history: self.history.store(source) if self.text.get("end-2c") != "\n": self.text.insert("end-1c", "\n") self.text.mark_set("iomark", "end-1c") self.set_line_and_column() def write(self, s, tags=()): if isinstance(s, str) and len(s) and max(s) > '\uffff': # Tk doesn't support outputting non-BMP characters # Let's assume what printed string is not very long, # find first non-BMP character and construct informative # UnicodeEncodeError exception. for start, char in enumerate(s): if char > '\uffff': break raise UnicodeEncodeError("UCS-2", char, start, start+1, 'Non-BMP character not supported in Tk') try: self.text.mark_gravity("iomark", "right") count = OutputWindow.write(self, s, tags, "iomark") self.text.mark_gravity("iomark", "left") except: raise ###pass # ### 11Aug07 KBK if we are expecting exceptions # let's find out what they are and be specific. if self.canceled: self.canceled = 0 if not use_subprocess: raise KeyboardInterrupt return count def rmenu_check_cut(self): try: if self.text.compare('sel.first', '<', 'iomark'): return 'disabled' except TclError: # no selection, so the index 'sel.first' doesn't exist return 'disabled' return super().rmenu_check_cut() def rmenu_check_paste(self): if self.text.compare('insert','<','iomark'): return 'disabled' return super().rmenu_check_paste() class PseudoFile(io.TextIOBase): def __init__(self, shell, tags, encoding=None): self.shell = shell self.tags = tags self._encoding = encoding @property def encoding(self): return self._encoding @property def name(self): return '<%s>' % self.tags def isatty(self): return True class PseudoOutputFile(PseudoFile): def writable(self): return True def write(self, s): if self.closed: raise ValueError("write to closed file") if type(s) is not str: if not isinstance(s, str): raise TypeError('must be str, not ' + type(s).__name__) # See issue #19481 s = str.__str__(s) return self.shell.write(s, self.tags) class PseudoInputFile(PseudoFile): def __init__(self, shell, tags, encoding=None): PseudoFile.__init__(self, shell, tags, encoding) self._line_buffer = '' def readable(self): return True def read(self, size=-1): if self.closed: raise ValueError("read from closed file") if size is None: size = -1 elif not isinstance(size, int): raise TypeError('must be int, not ' + type(size).__name__) result = self._line_buffer self._line_buffer = '' if size < 0: while True: line = self.shell.readline() if not line: break result += line else: while len(result) < size: line = self.shell.readline() if not line: break result += line self._line_buffer = result[size:] result = result[:size] return result def readline(self, size=-1): if self.closed: raise ValueError("read from closed file") if size is None: size = -1 elif not isinstance(size, int): raise TypeError('must be int, not ' + type(size).__name__) line = self._line_buffer or self.shell.readline() if size < 0: size = len(line) eol = line.find('\n', 0, size) if eol >= 0: size = eol + 1 self._line_buffer = line[size:] return line[:size] def close(self): self.shell.close() usage_msg = """\ USAGE: idle [-deins] [-t title] [file]* idle [-dns] [-t title] (-c cmd | -r file) [arg]* idle [-dns] [-t title] - [arg]* -h print this help message and exit -n run IDLE without a subprocess (DEPRECATED, see Help/IDLE Help for details) The following options will override the IDLE 'settings' configuration: -e open an edit window -i open a shell window The following options imply -i and will open a shell: -c cmd run the command in a shell, or -r file run script from file -d enable the debugger -s run $IDLESTARTUP or $PYTHONSTARTUP before anything else -t title set title of shell window A default edit window will be bypassed when -c, -r, or - are used. [arg]* are passed to the command (-c) or script (-r) in sys.argv[1:]. Examples: idle Open an edit window or shell depending on IDLE's configuration. idle foo.py foobar.py Edit the files, also open a shell if configured to start with shell. idle -est "Baz" foo.py Run $IDLESTARTUP or $PYTHONSTARTUP, edit foo.py, and open a shell window with the title "Baz". idle -c "import sys; print(sys.argv)" "foo" Open a shell window and run the command, passing "-c" in sys.argv[0] and "foo" in sys.argv[1]. idle -d -s -r foo.py "Hello World" Open a shell window, run a startup script, enable the debugger, and run foo.py, passing "foo.py" in sys.argv[0] and "Hello World" in sys.argv[1]. echo "import sys; print(sys.argv)" | idle - "foobar" Open a shell window, run the script piped in, passing '' in sys.argv[0] and "foobar" in sys.argv[1]. """ def main(): global flist, root, use_subprocess capture_warnings(True) use_subprocess = True enable_shell = False enable_edit = False debug = False cmd = None script = None startup = False try: opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:") except getopt.error as msg: sys.stderr.write("Error: %s\n" % str(msg)) sys.stderr.write(usage_msg) sys.exit(2) for o, a in opts: if o == '-c': cmd = a enable_shell = True if o == '-d': debug = True enable_shell = True if o == '-e': enable_edit = True if o == '-h': sys.stdout.write(usage_msg) sys.exit() if o == '-i': enable_shell = True if o == '-n': print(" Warning: running IDLE without a subprocess is deprecated.", file=sys.stderr) use_subprocess = False if o == '-r': script = a if os.path.isfile(script): pass else: print("No script file: ", script) sys.exit() enable_shell = True if o == '-s': startup = True enable_shell = True if o == '-t': PyShell.shell_title = a enable_shell = True if args and args[0] == '-': cmd = sys.stdin.read() enable_shell = True # process sys.argv and sys.path: for i in range(len(sys.path)): sys.path[i] = os.path.abspath(sys.path[i]) if args and args[0] == '-': sys.argv = [''] + args[1:] elif cmd: sys.argv = ['-c'] + args elif script: sys.argv = [script] + args elif args: enable_edit = True pathx = [] for filename in args: pathx.append(os.path.dirname(filename)) for dir in pathx: dir = os.path.abspath(dir) if not dir in sys.path: sys.path.insert(0, dir) else: dir = os.getcwd() if dir not in sys.path: sys.path.insert(0, dir) # check the IDLE settings configuration (but command line overrides) edit_start = idleConf.GetOption('main', 'General', 'editor-on-startup', type='bool') enable_edit = enable_edit or edit_start enable_shell = enable_shell or not enable_edit # start editor and/or shell windows: root = Tk(className="Idle") # set application icon icondir = os.path.join(os.path.dirname(__file__), 'Icons') if system() == 'Windows': iconfile = os.path.join(icondir, 'idle.ico') root.wm_iconbitmap(default=iconfile) elif TkVersion >= 8.5: ext = '.png' if TkVersion >= 8.6 else '.gif' iconfiles = [os.path.join(icondir, 'idle_%d%s' % (size, ext)) for size in (16, 32, 48)] icons = [PhotoImage(file=iconfile) for iconfile in iconfiles] root.wm_iconphoto(True, *icons) fixwordbreaks(root) root.withdraw() flist = PyShellFileList(root) macosxSupport.setupApp(root, flist) if enable_edit: if not (cmd or script): for filename in args[:]: if flist.open(filename) is None: # filename is a directory actually, disconsider it args.remove(filename) if not args: flist.new() if enable_shell: shell = flist.open_shell() if not shell: return # couldn't open shell if macosxSupport.isAquaTk() and flist.dict: # On OSX: when the user has double-clicked on a file that causes # IDLE to be launched the shell window will open just in front of # the file she wants to see. Lower the interpreter window when # there are open files. shell.top.lower() else: shell = flist.pyshell # Handle remaining options. If any of these are set, enable_shell # was set also, so shell must be true to reach here. if debug: shell.open_debugger() if startup: filename = os.environ.get("IDLESTARTUP") or \ os.environ.get("PYTHONSTARTUP") if filename and os.path.isfile(filename): shell.interp.execfile(filename) if cmd or script: shell.interp.runcommand("""if 1: import sys as _sys _sys.argv = %r del _sys \n""" % (sys.argv,)) if cmd: shell.interp.execsource(cmd) elif script: shell.interp.prepend_syspath(script) shell.interp.execfile(script) elif shell: # If there is a shell window and no cmd or script in progress, # check for problematic OS X Tk versions and print a warning # message in the IDLE shell window; this is less intrusive # than always opening a separate window. tkversionwarning = macosxSupport.tkVersionWarning(root) if tkversionwarning: shell.interp.runcommand("print('%s')" % tkversionwarning) while flist.inversedict: # keep IDLE running while files are open. root.mainloop() root.destroy() capture_warnings(False) if __name__ == "__main__": sys.modules['PyShell'] = sys.modules['__main__'] main() capture_warnings(False) # Make sure turned off; see issue 18081
lgpl-3.0
ctxis/canape
CANAPE.Scripting/Lib/encodings/utf_32.py
375
5127
""" Python 'utf-32' Codec """ import codecs, sys ### Codec APIs encode = codecs.utf_32_encode def decode(input, errors='strict'): return codecs.utf_32_decode(input, errors, True) class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors='strict'): codecs.IncrementalEncoder.__init__(self, errors) self.encoder = None def encode(self, input, final=False): if self.encoder is None: result = codecs.utf_32_encode(input, self.errors)[0] if sys.byteorder == 'little': self.encoder = codecs.utf_32_le_encode else: self.encoder = codecs.utf_32_be_encode return result return self.encoder(input, self.errors)[0] def reset(self): codecs.IncrementalEncoder.reset(self) self.encoder = None def getstate(self): # state info we return to the caller: # 0: stream is in natural order for this platform # 2: endianness hasn't been determined yet # (we're never writing in unnatural order) return (2 if self.encoder is None else 0) def setstate(self, state): if state: self.encoder = None else: if sys.byteorder == 'little': self.encoder = codecs.utf_32_le_encode else: self.encoder = codecs.utf_32_be_encode class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def __init__(self, errors='strict'): codecs.BufferedIncrementalDecoder.__init__(self, errors) self.decoder = None def _buffer_decode(self, input, errors, final): if self.decoder is None: (output, consumed, byteorder) = \ codecs.utf_32_ex_decode(input, errors, 0, final) if byteorder == -1: self.decoder = codecs.utf_32_le_decode elif byteorder == 1: self.decoder = codecs.utf_32_be_decode elif consumed >= 4: raise UnicodeError("UTF-32 stream does not start with BOM") return (output, consumed) return self.decoder(input, self.errors, final) def reset(self): codecs.BufferedIncrementalDecoder.reset(self) self.decoder = None def getstate(self): # additonal state info from the base class must be None here, # as it isn't passed along to the caller state = codecs.BufferedIncrementalDecoder.getstate(self)[0] # additional state info we pass to the caller: # 0: stream is in natural order for this platform # 1: stream is in unnatural order # 2: endianness hasn't been determined yet if self.decoder is None: return (state, 2) addstate = int((sys.byteorder == "big") != (self.decoder is codecs.utf_32_be_decode)) return (state, addstate) def setstate(self, state): # state[1] will be ignored by BufferedIncrementalDecoder.setstate() codecs.BufferedIncrementalDecoder.setstate(self, state) state = state[1] if state == 0: self.decoder = (codecs.utf_32_be_decode if sys.byteorder == "big" else codecs.utf_32_le_decode) elif state == 1: self.decoder = (codecs.utf_32_le_decode if sys.byteorder == "big" else codecs.utf_32_be_decode) else: self.decoder = None class StreamWriter(codecs.StreamWriter): def __init__(self, stream, errors='strict'): self.encoder = None codecs.StreamWriter.__init__(self, stream, errors) def reset(self): codecs.StreamWriter.reset(self) self.encoder = None def encode(self, input, errors='strict'): if self.encoder is None: result = codecs.utf_32_encode(input, errors) if sys.byteorder == 'little': self.encoder = codecs.utf_32_le_encode else: self.encoder = codecs.utf_32_be_encode return result else: return self.encoder(input, errors) class StreamReader(codecs.StreamReader): def reset(self): codecs.StreamReader.reset(self) try: del self.decode except AttributeError: pass def decode(self, input, errors='strict'): (object, consumed, byteorder) = \ codecs.utf_32_ex_decode(input, errors, 0, False) if byteorder == -1: self.decode = codecs.utf_32_le_decode elif byteorder == 1: self.decode = codecs.utf_32_be_decode elif consumed>=4: raise UnicodeError,"UTF-32 stream does not start with BOM" return (object, consumed) ### encodings module API def getregentry(): return codecs.CodecInfo( name='utf-32', encode=encode, decode=decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
gpl-3.0
openiitbombayx/edx-platform
common/djangoapps/third_party_auth/settings.py
76
3717
"""Settings for the third-party auth module. The flow for settings registration is: The base settings file contains a boolean, ENABLE_THIRD_PARTY_AUTH, indicating whether this module is enabled. startup.py probes the ENABLE_THIRD_PARTY_AUTH. If true, it: a) loads this module. b) calls apply_settings(), passing in the Django settings """ _FIELDS_STORED_IN_SESSION = ['auth_entry', 'next'] _MIDDLEWARE_CLASSES = ( 'third_party_auth.middleware.ExceptionMiddleware', ) _SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/dashboard' def apply_settings(django_settings): """Set provider-independent settings.""" # Whitelisted URL query parameters retrained in the pipeline session. # Params not in this whitelist will be silently dropped. django_settings.FIELDS_STORED_IN_SESSION = _FIELDS_STORED_IN_SESSION # Register and configure python-social-auth with Django. django_settings.INSTALLED_APPS += ( 'social.apps.django_app.default', 'third_party_auth', ) # Inject exception middleware to make redirects fire. django_settings.MIDDLEWARE_CLASSES += _MIDDLEWARE_CLASSES # Where to send the user if there's an error during social authentication # and we cannot send them to a more specific URL # (see middleware.ExceptionMiddleware). django_settings.SOCIAL_AUTH_LOGIN_ERROR_URL = '/' # Where to send the user once social authentication is successful. django_settings.SOCIAL_AUTH_LOGIN_REDIRECT_URL = _SOCIAL_AUTH_LOGIN_REDIRECT_URL # Inject our customized auth pipeline. All auth backends must work with # this pipeline. django_settings.SOCIAL_AUTH_PIPELINE = ( 'third_party_auth.pipeline.parse_query_params', 'social.pipeline.social_auth.social_details', 'social.pipeline.social_auth.social_uid', 'social.pipeline.social_auth.auth_allowed', 'social.pipeline.social_auth.social_user', 'third_party_auth.pipeline.associate_by_email_if_login_api', 'social.pipeline.user.get_username', 'third_party_auth.pipeline.set_pipeline_timeout', 'third_party_auth.pipeline.ensure_user_information', 'social.pipeline.user.create_user', 'social.pipeline.social_auth.associate_user', 'social.pipeline.social_auth.load_extra_data', 'social.pipeline.user.user_details', 'third_party_auth.pipeline.set_logged_in_cookies', 'third_party_auth.pipeline.login_analytics', ) # Required so that we can use unmodified PSA OAuth2 backends: django_settings.SOCIAL_AUTH_STRATEGY = 'third_party_auth.strategy.ConfigurationModelStrategy' # We let the user specify their email address during signup. django_settings.SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email'] # Disable exceptions by default for prod so you get redirect behavior # instead of a Django error page. During development you may want to # enable this when you want to get stack traces rather than redirections. django_settings.SOCIAL_AUTH_RAISE_EXCEPTIONS = False # Allow users to login using social auth even if their account is not verified yet # The 'ensure_user_information' step controls this and only allows brand new users # to login without verification. Repeat logins are not permitted until the account # gets verified. django_settings.INACTIVE_USER_LOGIN = True django_settings.INACTIVE_USER_URL = '/auth/inactive' # Context processors required under Django. django_settings.SOCIAL_AUTH_UUID_LENGTH = 4 django_settings.TEMPLATE_CONTEXT_PROCESSORS += ( 'social.apps.django_app.context_processors.backends', 'social.apps.django_app.context_processors.login_redirect', )
agpl-3.0
j-carpentier/nova
nova/tests/unit/objects/test_instance_pci_requests.py
32
7584
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_serialization import jsonutils from nova import objects from nova.tests.unit.objects import test_objects FAKE_UUID = '79a53d6b-0893-4838-a971-15f4f382e7c2' FAKE_REQUEST_UUID = '69b53d6b-0793-4839-c981-f5c4f382e7d2' # NOTE(danms): Yes, these are the same right now, but going forward, # we have changes to make which will be reflected in the format # in instance_extra, but not in system_metadata. fake_pci_requests = [ {'count': 2, 'spec': [{'vendor_id': '8086', 'device_id': '1502'}], 'alias_name': 'alias_1', 'is_new': False, 'request_id': FAKE_REQUEST_UUID}, {'count': 2, 'spec': [{'vendor_id': '6502', 'device_id': '07B5'}], 'alias_name': 'alias_2', 'is_new': True, 'request_id': FAKE_REQUEST_UUID}, ] fake_legacy_pci_requests = [ {'count': 2, 'spec': [{'vendor_id': '8086', 'device_id': '1502'}], 'alias_name': 'alias_1'}, {'count': 1, 'spec': [{'vendor_id': '6502', 'device_id': '07B5'}], 'alias_name': 'alias_2'}, ] class _TestInstancePCIRequests(object): @mock.patch('nova.db.instance_extra_get_by_instance_uuid') def test_get_by_instance_uuid(self, mock_get): mock_get.return_value = { 'instance_uuid': FAKE_UUID, 'pci_requests': jsonutils.dumps(fake_pci_requests), } requests = objects.InstancePCIRequests.get_by_instance_uuid( self.context, FAKE_UUID) self.assertEqual(2, len(requests.requests)) for index, request in enumerate(requests.requests): self.assertEqual(fake_pci_requests[index]['alias_name'], request.alias_name) self.assertEqual(fake_pci_requests[index]['count'], request.count) self.assertEqual(fake_pci_requests[index]['spec'], [dict(x.items()) for x in request.spec]) @mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid') def test_get_by_instance_uuid_and_newness(self, mock_get): pcir = objects.InstancePCIRequests mock_get.return_value = objects.InstancePCIRequests( instance_uuid='fake-uuid', requests=[objects.InstancePCIRequest(count=1, is_new=False), objects.InstancePCIRequest(count=2, is_new=True)]) old_req = pcir.get_by_instance_uuid_and_newness(self.context, 'fake-uuid', False) mock_get.return_value = objects.InstancePCIRequests( instance_uuid='fake-uuid', requests=[objects.InstancePCIRequest(count=1, is_new=False), objects.InstancePCIRequest(count=2, is_new=True)]) new_req = pcir.get_by_instance_uuid_and_newness(self.context, 'fake-uuid', True) self.assertEqual(1, old_req.requests[0].count) self.assertEqual(2, new_req.requests[0].count) @mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid') def test_get_by_instance_current(self, mock_get): instance = objects.Instance(uuid='fake-uuid', system_metadata={}) objects.InstancePCIRequests.get_by_instance(self.context, instance) mock_get.assert_called_once_with(self.context, 'fake-uuid') def test_get_by_instance_legacy(self): fakesysmeta = { 'pci_requests': jsonutils.dumps([fake_legacy_pci_requests[0]]), 'new_pci_requests': jsonutils.dumps([fake_legacy_pci_requests[1]]), } instance = objects.Instance(uuid='fake-uuid', system_metadata=fakesysmeta) requests = objects.InstancePCIRequests.get_by_instance(self.context, instance) self.assertEqual(2, len(requests.requests)) self.assertEqual('alias_1', requests.requests[0].alias_name) self.assertFalse(requests.requests[0].is_new) self.assertEqual('alias_2', requests.requests[1].alias_name) self.assertTrue(requests.requests[1].is_new) def test_new_compatibility(self): request = objects.InstancePCIRequest(is_new=False) self.assertFalse(request.new) def test_backport_1_0(self): requests = objects.InstancePCIRequests( requests=[objects.InstancePCIRequest(count=1, request_id=FAKE_UUID), objects.InstancePCIRequest(count=2, request_id=FAKE_UUID)]) primitive = requests.obj_to_primitive(target_version='1.0') backported = objects.InstancePCIRequests.obj_from_primitive( primitive) self.assertEqual('1.0', backported.VERSION) self.assertEqual(2, len(backported.requests)) self.assertFalse(backported.requests[0].obj_attr_is_set('request_id')) self.assertFalse(backported.requests[1].obj_attr_is_set('request_id')) def test_obj_from_db(self): req = objects.InstancePCIRequests.obj_from_db(None, FAKE_UUID, None) self.assertEqual(FAKE_UUID, req.instance_uuid) self.assertEqual(0, len(req.requests)) db_req = jsonutils.dumps(fake_pci_requests) req = objects.InstancePCIRequests.obj_from_db(None, FAKE_UUID, db_req) self.assertEqual(FAKE_UUID, req.instance_uuid) self.assertEqual(2, len(req.requests)) self.assertEqual('alias_1', req.requests[0].alias_name) def test_from_request_spec_instance_props(self): requests = objects.InstancePCIRequests( requests=[objects.InstancePCIRequest(count=1, request_id=FAKE_UUID, spec=[{'vendor_id': '8086', 'device_id': '1502'}]) ], instance_uuid=FAKE_UUID) result = jsonutils.to_primitive(requests) result = objects.InstancePCIRequests.from_request_spec_instance_props( result) self.assertEqual(1, len(result.requests)) self.assertEqual(1, result.requests[0].count) self.assertEqual(FAKE_UUID, result.requests[0].request_id) self.assertEqual([{'vendor_id': '8086', 'device_id': '1502'}], result.requests[0].spec) class TestInstancePCIRequests(test_objects._LocalTest, _TestInstancePCIRequests): pass class TestRemoteInstancePCIRequests(test_objects._RemoteTest, _TestInstancePCIRequests): pass
apache-2.0
tjyang/vitess
test/python_client_test.py
1
13676
#!/usr/bin/env python # # Copyright 2015 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can # be found in the LICENSE file. """This test uses vtgateclienttest to test the vtdb python vtgate client. """ import logging import struct import unittest import environment from protocols_flavor import protocols_flavor import utils from vtdb import dbexceptions from vtdb import keyrange from vtdb import keyrange_constants from vtdb import vtgate_client from vtdb import vtgate_cursor vtgateclienttest_process = None vtgateclienttest_port = None vtgateclienttest_grpc_port = None def setUpModule(): global vtgateclienttest_process global vtgateclienttest_port global vtgateclienttest_grpc_port try: environment.topo_server().setup() vtgateclienttest_port = environment.reserve_ports(1) args = environment.binary_args('vtgateclienttest') + [ '-log_dir', environment.vtlogroot, '-port', str(vtgateclienttest_port), ] if protocols_flavor().vtgate_python_protocol() == 'grpc': vtgateclienttest_grpc_port = environment.reserve_ports(1) args.extend(['-grpc_port', str(vtgateclienttest_grpc_port)]) if protocols_flavor().service_map(): args.extend(['-service_map', ','.join(protocols_flavor().service_map())]) vtgateclienttest_process = utils.run_bg(args) utils.wait_for_vars('vtgateclienttest', vtgateclienttest_port) except: tearDownModule() raise def tearDownModule(): utils.kill_sub_process(vtgateclienttest_process, soft=True) if vtgateclienttest_process: vtgateclienttest_process.wait() environment.topo_server().teardown() class TestPythonClientBase(unittest.TestCase): """Base class for Python client tests.""" CONNECT_TIMEOUT = 10.0 # A packed keyspace_id from the middle of the full keyrange. KEYSPACE_ID_0X80 = struct.Struct('!Q').pack(0x80 << 56) def setUp(self): super(TestPythonClientBase, self).setUp() addr = 'localhost:%d' % vtgateclienttest_port protocol = protocols_flavor().vtgate_python_protocol() self.conn = vtgate_client.connect(protocol, addr, 30.0) logging.info( 'Start: %s, protocol %s.', '.'.join(self.id().split('.')[-2:]), protocol) def tearDown(self): self.conn.close() def _open_keyspace_ids_cursor(self): return self.conn.cursor( 'keyspace', 'master', keyspace_ids=[self.KEYSPACE_ID_0X80]) def _open_keyranges_cursor(self): kr = keyrange.KeyRange(keyrange_constants.NON_PARTIAL_KEYRANGE) return self.conn.cursor('keyspace', 'master', keyranges=[kr]) def _open_batch_cursor(self): return self.conn.cursor(keyspace=None, tablet_type='master') def _open_stream_keyranges_cursor(self): kr = keyrange.KeyRange(keyrange_constants.NON_PARTIAL_KEYRANGE) return self.conn.cursor( 'keyspace', 'master', keyranges=[kr], cursorclass=vtgate_cursor.StreamVTGateCursor) def _open_stream_keyspace_ids_cursor(self): return self.conn.cursor( 'keyspace', 'master', keyspace_ids=[self.KEYSPACE_ID_0X80], cursorclass=vtgate_cursor.StreamVTGateCursor) class TestPythonClientErrors(TestPythonClientBase): """Test cases to verify that the Python client can handle errors correctly.""" def test_execute_integrity_errors(self): """Test we raise dbexceptions.IntegrityError for Execute calls.""" # Special query that makes vtgateclienttest return an IntegrityError. self._verify_exception_for_execute( 'error://integrity error', dbexceptions.IntegrityError) def test_partial_integrity_errors(self): """Raise an IntegrityError when Execute returns a partial error.""" # Special query that makes vtgateclienttest return a partial error. self._verify_exception_for_execute( 'partialerror://integrity error', dbexceptions.IntegrityError) def _verify_exception_for_execute(self, query, exception): """Verify that we raise a specific exception for all Execute calls. Args: query: query string to use for execute calls. exception: exception class that we expect the execute call to raise. """ # FIXME(alainjobart) add test for Execute once factory supports it # FIXME(alainjobart) add test for ExecuteShards once factory supports it # ExecuteKeyspaceIds test cursor = self._open_keyspace_ids_cursor() with self.assertRaises(exception): cursor.execute(query, {}) cursor.close() # ExecuteKeyRanges test cursor = self._open_keyranges_cursor() with self.assertRaises(exception): cursor.execute(query, {}) cursor.close() # ExecuteEntityIds test cursor = self.conn.cursor('keyspace', 'master') with self.assertRaises(exception): cursor.execute( query, {}, entity_keyspace_id_map={1: self.KEYSPACE_ID_0X80}, entity_column_name='user_id') cursor.close() # ExecuteBatchKeyspaceIds test cursor = self._open_batch_cursor() with self.assertRaises(exception): cursor.executemany( sql=None, params_list=[ dict( sql=query, bind_variables={}, keyspace='keyspace', keyspace_ids=[self.KEYSPACE_ID_0X80])]) cursor.close() # ExecuteBatchShard test cursor = self._open_batch_cursor() with self.assertRaises(exception): cursor.executemany( sql=None, params_list=[ dict( sql=query, bind_variables={}, keyspace='keyspace', shards=[keyrange_constants.SHARD_ZERO])]) cursor.close() def _verify_exception_for_stream_execute(self, query, exception): """Verify that we raise a specific exception for all StreamExecute calls. Args: query: query string to use for StreamExecute calls. exception: exception class that we expect StreamExecute to raise. """ # StreamExecuteKeyspaceIds test cursor = self._open_stream_keyspace_ids_cursor() with self.assertRaises(exception): cursor.execute(query, {}) cursor.close() # StreamExecuteKeyRanges test cursor = self._open_stream_keyranges_cursor() with self.assertRaises(exception): cursor.execute(query, {}) cursor.close() def test_streaming_integrity_error(self): """Test we raise dbexceptions.IntegrityError for StreamExecute calls.""" # TODO(aaijazi): this test doesn't work for all clients yet. if protocols_flavor().vtgate_python_protocol() != 'gorpc': return self._verify_exception_for_stream_execute( 'error://integrity error', dbexceptions.IntegrityError) def test_transient_error(self): """Test we raise dbexceptions.TransientError for Execute calls.""" # TODO(aaijazi): this test doesn't work for all clients yet. if protocols_flavor().vtgate_python_protocol() != 'gorpc': return # Special query that makes vtgateclienttest return a TransientError. self._verify_exception_for_execute( 'error://transient error', dbexceptions.TransientError) def test_streaming_transient_error(self): """Test we raise dbexceptions.IntegrityError for StreamExecute calls.""" # TODO(aaijazi): this test doesn't work for all clients yet. if protocols_flavor().vtgate_python_protocol() != 'gorpc': return self._verify_exception_for_stream_execute( 'error://transient error', dbexceptions.TransientError) def test_error(self): """Test a regular server error raises the right exception.""" error_request = 'error://unknown error' error_caller_id = vtgate_client.CallerID(principal=error_request) # Begin test with self.assertRaisesRegexp(dbexceptions.DatabaseError, 'forced error'): self.conn.begin(error_caller_id) # Commit test with self.assertRaisesRegexp(dbexceptions.DatabaseError, 'forced error'): self.conn.begin(error_caller_id) # Rollback test with self.assertRaisesRegexp(dbexceptions.DatabaseError, 'forced error'): self.conn.begin(error_caller_id) # GetSrvKeyspace test with self.assertRaisesRegexp(dbexceptions.DatabaseError, 'forced error'): self.conn.get_srv_keyspace(error_request) class TestPythonClient(TestPythonClientBase): """Non-error test cases for the Python client.""" def test_success_get_srv_keyspace(self): """Test we get the right results from get_srv_keyspace. We only test the successful cases. """ # big has one big shard big = self.conn.get_srv_keyspace('big') self.assertEquals(big.name, 'big') self.assertEquals(big.sharding_col_name, 'sharding_column_name') self.assertEquals(big.sharding_col_type, keyrange_constants.KIT_UINT64) self.assertEquals(big.served_from, {'master': 'other_keyspace'}) self.assertEquals(big.get_shards('replica'), [{'Name': 'shard0', 'KeyRange': { 'Start': '\x40\x00\x00\x00\x00\x00\x00\x00', 'End': '\x80\x00\x00\x00\x00\x00\x00\x00', }}]) self.assertEquals(big.get_shard_count('replica'), 1) self.assertEquals(big.get_shard_count('rdonly'), 0) self.assertEquals(big.get_shard_names('replica'), ['shard0']) self.assertEquals(big.keyspace_id_to_shard_name_for_db_type( 0x6000000000000000, 'replica'), 'shard0') with self.assertRaises(ValueError): big.keyspace_id_to_shard_name_for_db_type(0x2000000000000000, 'replica') # small has no shards small = self.conn.get_srv_keyspace('small') self.assertEquals(small.name, 'small') self.assertEquals(small.sharding_col_name, '') self.assertEquals(small.sharding_col_type, keyrange_constants.KIT_UNSET) self.assertEquals(small.served_from, {}) self.assertEquals(small.get_shards('replica'), []) self.assertEquals(small.get_shard_count('replica'), 0) with self.assertRaises(ValueError): small.keyspace_id_to_shard_name_for_db_type(0x6000000000000000, 'replica') def test_effective_caller_id(self): """Test that the passed in effective_caller_id is parsed correctly. Pass a special sql query that sends the expected effective_caller_id through different vtgate interfaces. Make sure the good_effective_caller_id works, and the bad_effective_caller_id raises a DatabaseError. """ # Special query that makes vtgateclienttest match effective_caller_id. effective_caller_id_test_query = ( 'callerid://{"principal":"pr", "component":"co", "subcomponent":"su"}') good_effective_caller_id = vtgate_client.CallerID( principal='pr', component='co', subcomponent='su') bad_effective_caller_id = vtgate_client.CallerID( principal='pr_wrong', component='co_wrong', subcomponent='su_wrong') def check_good_and_bad_effective_caller_ids(cursor, cursor_execute_method): cursor.set_effective_caller_id(good_effective_caller_id) with self.assertRaises(dbexceptions.DatabaseError) as cm: cursor_execute_method(cursor) self.assertIn('SUCCESS:', str(cm.exception)) cursor.set_effective_caller_id(bad_effective_caller_id) with self.assertRaises(dbexceptions.DatabaseError) as cm: cursor_execute_method(cursor) self.assertNotIn('SUCCESS:', str(cm.exception)) def cursor_execute_keyspace_ids_method(cursor): cursor.execute(effective_caller_id_test_query, {}) check_good_and_bad_effective_caller_ids( self._open_keyspace_ids_cursor(), cursor_execute_keyspace_ids_method) def cursor_execute_key_ranges_method(cursor): cursor.execute(effective_caller_id_test_query, {}) check_good_and_bad_effective_caller_ids( self._open_keyranges_cursor(), cursor_execute_key_ranges_method) def cursor_execute_entity_ids_method(cursor): cursor.execute( effective_caller_id_test_query, {}, entity_keyspace_id_map={1: self.KEYSPACE_ID_0X80}, entity_column_name='user_id') check_good_and_bad_effective_caller_ids( self.conn.cursor('keyspace', 'master'), cursor_execute_entity_ids_method) def cursor_execute_batch_keyspace_ids_method(cursor): cursor.executemany( sql=None, params_list=[dict( sql=effective_caller_id_test_query, bind_variables={}, keyspace='keyspace', keyspace_ids=[self.KEYSPACE_ID_0X80])]) check_good_and_bad_effective_caller_ids( self._open_batch_cursor(), cursor_execute_batch_keyspace_ids_method) def cursor_execute_batch_shard_method(cursor): cursor.executemany( sql=None, params_list=[dict( sql=effective_caller_id_test_query, bind_variables={}, keyspace='keyspace', shards=[keyrange_constants.SHARD_ZERO])]) check_good_and_bad_effective_caller_ids( self._open_batch_cursor(), cursor_execute_batch_shard_method) def cursor_stream_execute_keyspace_ids_method(cursor): cursor.execute(sql=effective_caller_id_test_query, bind_variables={}) check_good_and_bad_effective_caller_ids( self._open_stream_keyspace_ids_cursor(), cursor_stream_execute_keyspace_ids_method) def cursor_stream_execute_keyranges_method(cursor): cursor.execute(sql=effective_caller_id_test_query, bind_variables={}) check_good_and_bad_effective_caller_ids( self._open_stream_keyranges_cursor(), cursor_stream_execute_keyranges_method) if __name__ == '__main__': utils.main()
bsd-3-clause
GaetanCambier/CouchPotatoServer
libs/suds/sudsobject.py
201
11165
# This program is free software; you can redistribute it and/or modify # it under the terms of the (LGPL) GNU Lesser General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library Lesser General Public License for more details at # ( http://www.gnu.org/licenses/lgpl.html ). # # You should have received a copy of the GNU Lesser General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # written by: Jeff Ortel ( jortel@redhat.com ) """ The I{sudsobject} module provides a collection of suds objects that are primarily used for the highly dynamic interactions with wsdl/xsd defined types. """ from logging import getLogger from suds import * from new import classobj log = getLogger(__name__) def items(sobject): """ Extract the I{items} from a suds object much like the items() method works on I{dict}. @param sobject: A suds object @type sobject: L{Object} @return: A list of items contained in I{sobject}. @rtype: [(key, value),...] """ for item in sobject: yield item def asdict(sobject): """ Convert a sudsobject into a dictionary. @param sobject: A suds object @type sobject: L{Object} @return: A python dictionary containing the items contained in I{sobject}. @rtype: dict """ return dict(items(sobject)) def merge(a, b): """ Merge all attributes and metadata from I{a} to I{b}. @param a: A I{source} object @type a: L{Object} @param b: A I{destination} object @type b: L{Object} """ for item in a: setattr(b, item[0], item[1]) b.__metadata__ = b.__metadata__ return b def footprint(sobject): """ Get the I{virtual footprint} of the object. This is really a count of the attributes in the branch with a significant value. @param sobject: A suds object. @type sobject: L{Object} @return: The branch footprint. @rtype: int """ n = 0 for a in sobject.__keylist__: v = getattr(sobject, a) if v is None: continue if isinstance(v, Object): n += footprint(v) continue if hasattr(v, '__len__'): if len(v): n += 1 continue n +=1 return n class Factory: cache = {} @classmethod def subclass(cls, name, bases, dict={}): if not isinstance(bases, tuple): bases = (bases,) name = name.encode('utf-8') key = '.'.join((name, str(bases))) subclass = cls.cache.get(key) if subclass is None: subclass = classobj(name, bases, dict) cls.cache[key] = subclass return subclass @classmethod def object(cls, classname=None, dict={}): if classname is not None: subclass = cls.subclass(classname, Object) inst = subclass() else: inst = Object() for a in dict.items(): setattr(inst, a[0], a[1]) return inst @classmethod def metadata(cls): return Metadata() @classmethod def property(cls, name, value=None): subclass = cls.subclass(name, Property) return subclass(value) class Object: def __init__(self): self.__keylist__ = [] self.__printer__ = Printer() self.__metadata__ = Metadata() def __setattr__(self, name, value): builtin = name.startswith('__') and name.endswith('__') if not builtin and \ name not in self.__keylist__: self.__keylist__.append(name) self.__dict__[name] = value def __delattr__(self, name): try: del self.__dict__[name] builtin = name.startswith('__') and name.endswith('__') if not builtin: self.__keylist__.remove(name) except: cls = self.__class__.__name__ raise AttributeError, "%s has no attribute '%s'" % (cls, name) def __getitem__(self, name): if isinstance(name, int): name = self.__keylist__[int(name)] return getattr(self, name) def __setitem__(self, name, value): setattr(self, name, value) def __iter__(self): return Iter(self) def __len__(self): return len(self.__keylist__) def __contains__(self, name): return name in self.__keylist__ def __repr__(self): return str(self) def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return self.__printer__.tostr(self) class Iter: def __init__(self, sobject): self.sobject = sobject self.keylist = self.__keylist(sobject) self.index = 0 def next(self): keylist = self.keylist nkeys = len(self.keylist) while self.index < nkeys: k = keylist[self.index] self.index += 1 if hasattr(self.sobject, k): v = getattr(self.sobject, k) return (k, v) raise StopIteration() def __keylist(self, sobject): keylist = sobject.__keylist__ try: keyset = set(keylist) ordering = sobject.__metadata__.ordering ordered = set(ordering) if not ordered.issuperset(keyset): log.debug( '%s must be superset of %s, ordering ignored', keylist, ordering) raise KeyError() return ordering except: return keylist def __iter__(self): return self class Metadata(Object): def __init__(self): self.__keylist__ = [] self.__printer__ = Printer() class Facade(Object): def __init__(self, name): Object.__init__(self) md = self.__metadata__ md.facade = name class Property(Object): def __init__(self, value): Object.__init__(self) self.value = value def items(self): for item in self: if item[0] != 'value': yield item def get(self): return self.value def set(self, value): self.value = value return self class Printer: """ Pretty printing of a Object object. """ @classmethod def indent(cls, n): return '%*s'%(n*3,' ') def tostr(self, object, indent=-2): """ get s string representation of object """ history = [] return self.process(object, history, indent) def process(self, object, h, n=0, nl=False): """ print object using the specified indent (n) and newline (nl). """ if object is None: return 'None' if isinstance(object, Object): if len(object) == 0: return '<empty>' else: return self.print_object(object, h, n+2, nl) if isinstance(object, dict): if len(object) == 0: return '<empty>' else: return self.print_dictionary(object, h, n+2, nl) if isinstance(object, (list,tuple)): if len(object) == 0: return '<empty>' else: return self.print_collection(object, h, n+2) if isinstance(object, basestring): return '"%s"' % tostr(object) return '%s' % tostr(object) def print_object(self, d, h, n, nl=False): """ print complex using the specified indent (n) and newline (nl). """ s = [] cls = d.__class__ md = d.__metadata__ if d in h: s.append('(') s.append(cls.__name__) s.append(')') s.append('...') return ''.join(s) h.append(d) if nl: s.append('\n') s.append(self.indent(n)) if cls != Object: s.append('(') if isinstance(d, Facade): s.append(md.facade) else: s.append(cls.__name__) s.append(')') s.append('{') for item in d: if self.exclude(d, item): continue item = self.unwrap(d, item) s.append('\n') s.append(self.indent(n+1)) if isinstance(item[1], (list,tuple)): s.append(item[0]) s.append('[]') else: s.append(item[0]) s.append(' = ') s.append(self.process(item[1], h, n, True)) s.append('\n') s.append(self.indent(n)) s.append('}') h.pop() return ''.join(s) def print_dictionary(self, d, h, n, nl=False): """ print complex using the specified indent (n) and newline (nl). """ if d in h: return '{}...' h.append(d) s = [] if nl: s.append('\n') s.append(self.indent(n)) s.append('{') for item in d.items(): s.append('\n') s.append(self.indent(n+1)) if isinstance(item[1], (list,tuple)): s.append(tostr(item[0])) s.append('[]') else: s.append(tostr(item[0])) s.append(' = ') s.append(self.process(item[1], h, n, True)) s.append('\n') s.append(self.indent(n)) s.append('}') h.pop() return ''.join(s) def print_collection(self, c, h, n): """ print collection using the specified indent (n) and newline (nl). """ if c in h: return '[]...' h.append(c) s = [] for item in c: s.append('\n') s.append(self.indent(n)) s.append(self.process(item, h, n-2)) s.append(',') h.pop() return ''.join(s) def unwrap(self, d, item): """ translate (unwrap) using an optional wrapper function """ nopt = ( lambda x: x ) try: md = d.__metadata__ pmd = getattr(md, '__print__', None) if pmd is None: return item wrappers = getattr(pmd, 'wrappers', {}) fn = wrappers.get(item[0], nopt) return (item[0], fn(item[1])) except: pass return item def exclude(self, d, item): """ check metadata for excluded items """ try: md = d.__metadata__ pmd = getattr(md, '__print__', None) if pmd is None: return False excludes = getattr(pmd, 'excludes', []) return ( item[0] in excludes ) except: pass return False
gpl-3.0
ibc/MediaSoup
worker/deps/gyp/test/small/gyptest-small.py
12
1496
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Runs small tests. """ import imp import os import platform import sys import unittest import TestGyp test = TestGyp.TestGyp() # Add pylib to the import path (so tests can import their dependencies). # This is consistant with the path.append done in the top file "gyp". sys.path.insert(0, os.path.join(test._cwd, 'pylib')) # Add new test suites here. files_to_test = [ 'pylib/gyp/MSVSSettings_test.py', 'pylib/gyp/easy_xml_test.py', 'pylib/gyp/generator/msvs_test.py', 'pylib/gyp/generator/ninja_test.py', 'pylib/gyp/generator/xcode_test.py', 'pylib/gyp/common_test.py', 'pylib/gyp/input_test.py', ] # Collect all the suites from the above files. suites = [] for filename in files_to_test: # Carve the module name out of the path. name = os.path.splitext(os.path.split(filename)[1])[0] # Find the complete module path. full_filename = os.path.join(test._cwd, filename) # Load the module. module = imp.load_source(name, full_filename) # Add it to the list of test suites. suites.append(unittest.defaultTestLoader.loadTestsFromModule(module)) # Create combined suite. all_tests = unittest.TestSuite(suites) # Run all the tests. result = unittest.TextTestRunner(verbosity=2).run(all_tests) if result.failures or result.errors: test.fail_test() test.pass_test()
isc
SravanthiSinha/edx-platform
openedx/core/djangoapps/credit/urls.py
54
1261
""" URLs for the credit app. """ from django.conf.urls import patterns, url, include from openedx.core.djangoapps.credit import views, routers from openedx.core.djangoapps.credit.api.provider import get_credit_provider_info PROVIDER_ID_PATTERN = r'(?P<provider_id>[^/]+)' V1_URLS = patterns( '', url( r'^providers/$', views.get_providers_detail, name='providers_detail' ), url( r'^providers/{provider_id}/$'.format(provider_id=PROVIDER_ID_PATTERN), get_credit_provider_info, name='get_provider_info' ), url( r'^providers/{provider_id}/request/$'.format(provider_id=PROVIDER_ID_PATTERN), views.create_credit_request, name='create_request' ), url( r'^providers/{provider_id}/callback/?$'.format(provider_id=PROVIDER_ID_PATTERN), views.credit_provider_callback, name='provider_callback' ), url( r'^eligibility/$', views.get_eligibility_for_user, name='eligibility_details' ), ) router = routers.SimpleRouter() # pylint: disable=invalid-name router.register(r'courses', views.CreditCourseViewSet) V1_URLS += router.urls urlpatterns = patterns( '', url(r'^v1/', include(V1_URLS)), )
agpl-3.0
tdhopper/scikit-learn
sklearn/linear_model/passive_aggressive.py
97
10879
# Authors: Rob Zinkov, Mathieu Blondel # License: BSD 3 clause from .stochastic_gradient import BaseSGDClassifier from .stochastic_gradient import BaseSGDRegressor from .stochastic_gradient import DEFAULT_EPSILON class PassiveAggressiveClassifier(BaseSGDClassifier): """Passive Aggressive Classifier Read more in the :ref:`User Guide <passive_aggressive>`. Parameters ---------- C : float Maximum step size (regularization). Defaults to 1.0. fit_intercept : bool, default=False Whether the intercept should be estimated or not. If False, the data is assumed to be already centered. n_iter : int, optional The number of passes over the training data (aka epochs). Defaults to 5. shuffle : bool, default=True Whether or not the training data should be shuffled after each epoch. random_state : int seed, RandomState instance, or None (default) The seed of the pseudo random number generator to use when shuffling the data. verbose : integer, optional The verbosity level n_jobs : integer, optional The number of CPUs to use to do the OVA (One Versus All, for multi-class problems) computation. -1 means 'all CPUs'. Defaults to 1. loss : string, optional The loss function to be used: hinge: equivalent to PA-I in the reference paper. squared_hinge: equivalent to PA-II in the reference paper. warm_start : bool, optional When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. class_weight : dict, {class_label: weight} or "balanced" or None, optional Preset for the class_weight fit parameter. Weights associated with classes. If not given, all classes are supposed to have weight one. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` Attributes ---------- coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\ n_features] Weights assigned to the features. intercept_ : array, shape = [1] if n_classes == 2 else [n_classes] Constants in decision function. See also -------- SGDClassifier Perceptron References ---------- Online Passive-Aggressive Algorithms <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf> K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006) """ def __init__(self, C=1.0, fit_intercept=True, n_iter=5, shuffle=True, verbose=0, loss="hinge", n_jobs=1, random_state=None, warm_start=False, class_weight=None): BaseSGDClassifier.__init__(self, penalty=None, fit_intercept=fit_intercept, n_iter=n_iter, shuffle=shuffle, verbose=verbose, random_state=random_state, eta0=1.0, warm_start=warm_start, class_weight=class_weight, n_jobs=n_jobs) self.C = C self.loss = loss def partial_fit(self, X, y, classes=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Subset of the training data y : numpy array of shape [n_samples] Subset of the target values classes : array, shape = [n_classes] Classes across all calls to partial_fit. Can be obtained by via `np.unique(y_all)`, where y_all is the target vector of the entire dataset. This argument is required for the first call to partial_fit and can be omitted in the subsequent calls. Note that y doesn't need to contain all labels in `classes`. Returns ------- self : returns an instance of self. """ if self.class_weight == 'balanced': raise ValueError("class_weight 'balanced' is not supported for " "partial_fit. For 'balanced' weights, use " "`sklearn.utils.compute_class_weight` with " "`class_weight='balanced'`. In place of y you " "can use a large enough subset of the full " "training set target to properly estimate the " "class frequency distributions. Pass the " "resulting weights as the class_weight " "parameter.") lr = "pa1" if self.loss == "hinge" else "pa2" return self._partial_fit(X, y, alpha=1.0, C=self.C, loss="hinge", learning_rate=lr, n_iter=1, classes=classes, sample_weight=None, coef_init=None, intercept_init=None) def fit(self, X, y, coef_init=None, intercept_init=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training data y : numpy array of shape [n_samples] Target values coef_init : array, shape = [n_classes,n_features] The initial coefficients to warm-start the optimization. intercept_init : array, shape = [n_classes] The initial intercept to warm-start the optimization. Returns ------- self : returns an instance of self. """ lr = "pa1" if self.loss == "hinge" else "pa2" return self._fit(X, y, alpha=1.0, C=self.C, loss="hinge", learning_rate=lr, coef_init=coef_init, intercept_init=intercept_init) class PassiveAggressiveRegressor(BaseSGDRegressor): """Passive Aggressive Regressor Read more in the :ref:`User Guide <passive_aggressive>`. Parameters ---------- C : float Maximum step size (regularization). Defaults to 1.0. epsilon : float If the difference between the current prediction and the correct label is below this threshold, the model is not updated. fit_intercept : bool Whether the intercept should be estimated or not. If False, the data is assumed to be already centered. Defaults to True. n_iter : int, optional The number of passes over the training data (aka epochs). Defaults to 5. shuffle : bool, default=True Whether or not the training data should be shuffled after each epoch. random_state : int seed, RandomState instance, or None (default) The seed of the pseudo random number generator to use when shuffling the data. verbose : integer, optional The verbosity level loss : string, optional The loss function to be used: epsilon_insensitive: equivalent to PA-I in the reference paper. squared_epsilon_insensitive: equivalent to PA-II in the reference paper. warm_start : bool, optional When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. Attributes ---------- coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\ n_features] Weights assigned to the features. intercept_ : array, shape = [1] if n_classes == 2 else [n_classes] Constants in decision function. See also -------- SGDRegressor References ---------- Online Passive-Aggressive Algorithms <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf> K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006) """ def __init__(self, C=1.0, fit_intercept=True, n_iter=5, shuffle=True, verbose=0, loss="epsilon_insensitive", epsilon=DEFAULT_EPSILON, random_state=None, warm_start=False): BaseSGDRegressor.__init__(self, penalty=None, l1_ratio=0, epsilon=epsilon, eta0=1.0, fit_intercept=fit_intercept, n_iter=n_iter, shuffle=shuffle, verbose=verbose, random_state=random_state, warm_start=warm_start) self.C = C self.loss = loss def partial_fit(self, X, y): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Subset of training data y : numpy array of shape [n_samples] Subset of target values Returns ------- self : returns an instance of self. """ lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2" return self._partial_fit(X, y, alpha=1.0, C=self.C, loss="epsilon_insensitive", learning_rate=lr, n_iter=1, sample_weight=None, coef_init=None, intercept_init=None) def fit(self, X, y, coef_init=None, intercept_init=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training data y : numpy array of shape [n_samples] Target values coef_init : array, shape = [n_features] The initial coefficients to warm-start the optimization. intercept_init : array, shape = [1] The initial intercept to warm-start the optimization. Returns ------- self : returns an instance of self. """ lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2" return self._fit(X, y, alpha=1.0, C=self.C, loss="epsilon_insensitive", learning_rate=lr, coef_init=coef_init, intercept_init=intercept_init)
bsd-3-clause
jofusa/datadogpy
tests/unit/api/test_api.py
3
9607
# stdlib from copy import deepcopy from functools import wraps import os import tempfile from time import time # 3p import mock from nose.tools import assert_raises, assert_true, assert_false # datadog from datadog import initialize, api from datadog.api import Metric from datadog.api.exceptions import ApiNotInitialized from datadog.util.compat import is_p3k from tests.unit.api.helper import ( DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY) def preserve_environ_datadog(func): """ Decorator to preserve the original environment value. """ @wraps(func) def wrapper(env_name, *args, **kwds): environ_api_param = os.environ.get(env_name) try: return func(env_name, *args, **kwds) finally: # restore the original environ value if environ_api_param: os.environ[env_name] = environ_api_param elif os.environ.get(env_name): del os.environ[env_name] return wrapper class TestInitialization(DatadogAPINoInitialization): def test_no_initialization_fails(self, test='sisi'): assert_raises(ApiNotInitialized, MyCreatable.create) # No API key => only stats in statsd mode should work initialize() api._api_key = None assert_raises(ApiNotInitialized, MyCreatable.create) # Finally, initialize with an API key initialize(api_key=API_KEY, api_host=API_HOST) MyCreatable.create() assert self.request_mock.request.call_count == 1 @mock.patch('datadog.util.config.get_config_path') def test_get_hostname(self, mock_config_path): # Generate a fake agent config tmpfilepath = os.path.join(tempfile.gettempdir(), "tmp-agentconfig") with open(tmpfilepath, "wb") as f: if is_p3k(): f.write(bytes("[Main]\n", 'UTF-8')) f.write(bytes("hostname: {0}\n".format(HOST_NAME), 'UTF-8')) else: f.write("[Main]\n") f.write("hostname: {0}\n".format(HOST_NAME)) # Mock get_config_path to return this fake agent config mock_config_path.return_value = tmpfilepath initialize() assert api._host_name == HOST_NAME, api._host_name def test_request_parameters(self): # Test API, application keys, API host and proxies initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST, proxies=FAKE_PROXY) # Make a simple API call MyCreatable.create() _, options = self.request_mock.request.call_args assert 'params' in options assert 'api_key' in options['params'] assert options['params']['api_key'] == API_KEY assert 'application_key' in options['params'] assert options['params']['application_key'] == APP_KEY assert 'proxies' in options assert options['proxies'] == FAKE_PROXY assert 'headers' in options assert options['headers'] == {'Content-Type': 'application/json'} def test_initialization_from_env(self): @preserve_environ_datadog def test_api_params_from_env(env_name, attr_name, env_value): """ Set env_name environment variable to env_value Assert api.attr_name = env_value """ os.environ[env_name] = env_value initialize() self.assertEquals(getattr(api, attr_name), env_value) @preserve_environ_datadog def test_api_params_default(env_name, attr_name, expected_value): """ Unset env_name environment variable Assert api.attr_name = expected_value """ if os.environ.get(env_name): del os.environ[env_name] initialize() self.assertEquals(getattr(api, attr_name), expected_value) @preserve_environ_datadog def test_api_params_from_params(env_name, parameter, attr_name, value ): """ Unset env_name environment variable Initialize API with parameter=value Assert api.attr_name = value """ if os.environ.get(env_name): del os.environ[env_name] initialize(api_host='http://localhost') self.assertEquals(api._api_host, 'http://localhost') # Default values test_api_params_default("DATADOG_API_KEY", "_api_key", None) test_api_params_default("DATADOG_APP_KEY", "_application_key", None) test_api_params_default("DATADOG_HOST", "_api_host", "https://app.datadoghq.com") # From environment test_api_params_from_env("DATADOG_API_KEY", "_api_key", env_value="apikey") test_api_params_from_env("DATADOG_APP_KEY", "_application_key", env_value="appkey") test_api_params_from_env("DATADOG_HOST", "_api_host", env_value="http://localhost") # From parameters test_api_params_from_params("DATADOG_API_KEY", "api_key", "_api_key", "apikey2") test_api_params_from_params("DATADOG_APP_KEY", "app_key", "_application_key", "appkey2") test_api_params_from_params("DATADOG_HOST", "api_host", "_api_host", "http://127.0.0.1") class TestResources(DatadogAPIWithInitialization): def test_creatable(self): MyCreatable.create(mydata="val") self.request_called_with('POST', "host/api/v1/creatables", data={'mydata': "val"}) MyCreatable.create(mydata="val", attach_host_name=True) self.request_called_with('POST', "host/api/v1/creatables", data={'mydata': "val", 'host': api._host_name}) def test_getable(self): getable_object_id = 123 MyGetable.get(getable_object_id, otherparam="val") self.request_called_with('GET', "host/api/v1/getables/" + str(getable_object_id), params={'otherparam': "val"}) def test_listable(self): MyListable.get_all(otherparam="val") self.request_called_with('GET', "host/api/v1/listables", params={'otherparam': "val"}) def test_updatable(self): updatable_object_id = 123 MyUpdatable.update(updatable_object_id, params={'myparam': "val1"}, mydata="val2") self.request_called_with('PUT', "host/api/v1/updatables/" + str(updatable_object_id), params={'myparam': "val1"}, data={'mydata': "val2"}) def test_detalable(self): deletable_object_id = 123 MyDeletable.delete(deletable_object_id, otherparam="val") self.request_called_with('DELETE', "host/api/v1/deletables/" + str(deletable_object_id), params={'otherparam': "val"}) def test_actionable(self): actionable_object_id = 123 MyActionable.trigger_class_action('POST', "actionname", id=actionable_object_id, mydata="val") self.request_called_with('POST', "host/api/v1/actionables/" + str(actionable_object_id) + "/actionname", data={'mydata': "val"}) MyActionable.trigger_action('POST', "actionname", id=actionable_object_id, mydata="val") self.request_called_with('POST', "host/api/v1/actionname/" + str(actionable_object_id), data={'mydata': "val"}) class TestMetricResource(DatadogAPIWithInitialization): def submit_and_assess_metric_payload(self, serie): """ Helper to assess the metric payload format. """ now = time() if isinstance(serie, dict): Metric.send(**deepcopy(serie)) serie = [serie] else: Metric.send(deepcopy(serie)) payload = self.get_request_data() for i, metric in enumerate(payload['series']): assert set(metric.keys()) == set(['metric', 'points', 'host']) assert metric['metric'] == serie[i]['metric'] assert metric['host'] == api._host_name # points is a list of 1 point assert isinstance(metric['points'], list) and len(metric['points']) == 1 # it consists of a [time, value] pair assert len(metric['points'][0]) == 2 # its value == value we sent assert metric['points'][0][1] == serie[i]['points'] # it's time not so far from current time assert now - 1 < metric['points'][0][0] < now + 1 def test_metric_submit_query_switch(self): """ Endpoints are different for submission and queries. """ Metric.send(points="val") self.request_called_with('POST', "host/api/v1/series", data={'series': [{'points': "val", 'host': api._host_name}]}) Metric.query(start="val1", end="val2") self.request_called_with('GET', "host/api/v1/query", params={'from': "val1", 'to': "val2"}) def test_points_submission(self): """ Assess the data payload format, when submitting a single or multiple points. """ # Single point serie = dict(metric='metric.1', points=13) self.submit_and_assess_metric_payload(serie) # Multiple point serie = [dict(metric='metric.1', points=13), dict(metric='metric.2', points=19)] self.submit_and_assess_metric_payload(serie)
bsd-3-clause