code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# Copyright 2012-2017, Damian Johnson and The Tor Project
# See LICENSE for licensing information
"""
:class:`~test.task.Task` that can be ran with :func:`~test.task.run_tasks` to initialize our tests. tasks are...
::
Initialization Tasks
|- STEM_VERSION - checks our version of stem
|- TOR_VERSION - checks our version of tor
|- PYTHON_VERSION - checks our version of python
|- CRYPTO_VERSION - checks our version of cryptography
|- PYNACL_VERSION - checks our version of pynacl
|- MOCK_VERSION - checks our version of mock
|- PYFLAKES_VERSION - checks our version of pyflakes
|- PYCODESTYLE_VERSION - checks our version of pycodestyle
|- CLEAN_PYC - removes any *.pyc without a corresponding *.py
|- IMPORT_TESTS - ensure all test modules have been imported
|- UNUSED_TESTS - checks to see if any tests are missing from our settings
|- PYFLAKES_TASK - static checks
+- PYCODESTYLE_TASK - style checks
"""
import importlib
import multiprocessing
import os
import re
import sys
import time
import stem
import stem.prereq
import stem.util.conf
import stem.util.system
import stem.util.test_tools
import stem.version
import test
import test.output
from test.output import STATUS, ERROR, NO_NL, println
CONFIG = stem.util.conf.config_dict('test', {
'integ.test_directory': './test/data',
'test.unit_tests': '',
'test.integ_tests': '',
})
SRC_PATHS = [os.path.join(test.STEM_BASE, path) for path in (
'stem',
'test',
'run_tests.py',
'cache_manual.py',
'cache_fallback_directories.py',
'setup.py',
'tor-prompt',
os.path.join('docs', 'republish.py'),
os.path.join('docs', 'roles.py'),
)]
PYFLAKES_UNAVAILABLE = 'Static error checking requires pyflakes version 0.7.3 or later. Please install it from ...\n http://pypi.python.org/pypi/pyflakes\n'
PYCODESTYLE_UNAVAILABLE = 'Style checks require pycodestyle version 1.4.2 or later. Please install it from...\n http://pypi.python.org/pypi/pycodestyle\n'
def _check_tor_version(tor_path):
return str(test.tor_version(tor_path)).split()[0]
def _clean_orphaned_pyc(paths):
"""
Deletes any file with a *.pyc extention without a corresponding *.py.
:param list paths: paths to search for orphaned pyc files
"""
return ['removed %s' % path for path in stem.util.test_tools.clean_orphaned_pyc(paths)]
def _import_tests():
"""
Ensure all tests have been imported. This is important so tests can
register if they're asynchronous.
"""
for module in (CONFIG['test.unit_tests'].splitlines() + CONFIG['test.integ_tests'].splitlines()):
importlib.import_module(module.rsplit('.', 1)[0])
def _check_for_unused_tests(paths):
"""
The 'test.unit_tests' and 'test.integ_tests' in our settings.cfg defines the
tests that we run. We do it this way so that we can control the order in
which our tests are run but there's a disadvantage: when we add new test
modules we can easily forget to add it there.
Checking to see if we have any unittest.TestCase subclasses not covered by
our settings.
:param list paths: paths to search for unused tests
"""
unused_tests = []
for path in paths:
for py_path in stem.util.system.files_with_suffix(path, '.py'):
if os.path.normpath(CONFIG['integ.test_directory']) in py_path:
continue
with open(py_path) as f:
file_contents = f.read()
test_match = re.search('^class (\S*)\(unittest.TestCase\):$', file_contents, re.MULTILINE)
if test_match:
class_name = test_match.groups()[0]
module_name = py_path.replace(os.path.sep, '.')[len(test.STEM_BASE) + 1:-3] + '.' + class_name
if not (module_name in CONFIG['test.unit_tests'] or module_name in CONFIG['test.integ_tests']):
unused_tests.append(module_name)
if unused_tests:
raise ValueError('Test modules are missing from our test/settings.cfg:\n%s' % '\n'.join(unused_tests))
def run(category, *tasks):
"""
Runs a series of :class:`test.Task` instances. This simply prints 'done'
or 'failed' for each unless we fail one that is marked as being required. If
that happens then we print its error message and call sys.exit().
:param str category: label for the series of tasks
:param list tasks: **Task** instances to be ran
"""
test.output.print_divider(category, True)
for task in tasks:
if task is None:
continue
task.run()
if task.is_required and task.error:
println('\n%s\n' % task.error, ERROR)
sys.exit(1)
println()
class Task(object):
"""
Task we can process while running our tests. The runner can return either a
message or list of strings for its results.
"""
def __init__(self, label, runner, args = None, is_required = True, print_result = True, print_runtime = False, background = False):
super(Task, self).__init__()
self.label = label
self.runner = runner
self.args = args
self.is_required = is_required
self.print_result = print_result
self.print_runtime = print_runtime
self.error = None
self.is_successful = False
self.result = None
self._is_background_task = background
self._background_process = None
self._background_pipe = None
def run(self):
start_time = time.time()
println(' %s...' % self.label, STATUS, NO_NL)
padding = 50 - len(self.label)
println(' ' * padding, NO_NL)
try:
if self._is_background_task:
def _run_wrapper(conn, runner, args):
os.nice(15)
conn.send(runner(*args) if args else runner())
conn.close()
self._background_pipe, child_pipe = multiprocessing.Pipe()
self._background_process = multiprocessing.Process(target = _run_wrapper, args = (child_pipe, self.runner, self.args))
self._background_process.start()
else:
self.result = self.runner(*self.args) if self.args else self.runner()
self.is_successful = True
output_msg = 'running' if self._is_background_task else 'done'
if self.result and self.print_result and isinstance(self.result, str):
output_msg = self.result
elif self.print_runtime:
output_msg += ' (%0.1fs)' % (time.time() - start_time)
println(output_msg, STATUS)
if self.print_result and isinstance(self.result, (list, tuple)):
for line in self.result:
println(' %s' % line, STATUS)
except Exception as exc:
output_msg = str(exc)
if not output_msg or self.is_required:
output_msg = 'failed'
println(output_msg, ERROR)
self.error = exc
def join(self):
if self._background_process:
try:
self.result = self._background_pipe.recv()
self._background_process.join()
except IOError:
pass
class ModuleVersion(Task):
def __init__(self, label, modules, prereq_check = None):
if isinstance(modules, str):
modules = [modules] # normalize to a list
def version_check():
if prereq_check is None or prereq_check():
for module in modules:
if stem.util.test_tools._module_exists(module):
return importlib.import_module(module).__version__
return 'missing'
super(ModuleVersion, self).__init__(label, version_check)
class StaticCheckTask(Task):
def __init__(self, label, runner, args = None, is_available = None, unavailable_msg = None, background = True):
super(StaticCheckTask, self).__init__(label, runner, args, is_required = False, print_result = False, print_runtime = not background, background = background)
self.is_available = is_available
self.unavailable_msg = unavailable_msg
def run(self):
if self.is_available:
return super(StaticCheckTask, self).run()
else:
println(' %s...' % self.label, STATUS, NO_NL)
println(' ' * (50 - len(self.label)), NO_NL)
println('unavailable', STATUS)
STEM_VERSION = Task('checking stem version', lambda: stem.__version__)
TOR_VERSION = Task('checking tor version', _check_tor_version)
PYTHON_VERSION = Task('checking python version', lambda: '.'.join(map(str, sys.version_info[:3])))
CRYPTO_VERSION = ModuleVersion('checking cryptography version', 'cryptography', stem.prereq.is_crypto_available)
PYNACL_VERSION = ModuleVersion('checking pynacl version', 'nacl', stem.prereq._is_pynacl_available)
MOCK_VERSION = ModuleVersion('checking mock version', ['unittest.mock', 'mock'], stem.prereq.is_mock_available)
PYFLAKES_VERSION = ModuleVersion('checking pyflakes version', 'pyflakes')
PYCODESTYLE_VERSION = ModuleVersion('checking pycodestyle version', ['pycodestyle', 'pep8'])
CLEAN_PYC = Task('checking for orphaned .pyc files', _clean_orphaned_pyc, (SRC_PATHS,), print_runtime = True)
IMPORT_TESTS = Task('importing test modules', _import_tests, print_runtime = True)
UNUSED_TESTS = Task('checking for unused tests', _check_for_unused_tests, [(
os.path.join(test.STEM_BASE, 'test', 'unit'),
os.path.join(test.STEM_BASE, 'test', 'integ'),
)], print_runtime = True)
PYFLAKES_TASK = StaticCheckTask(
'running pyflakes',
stem.util.test_tools.pyflakes_issues,
args = (SRC_PATHS,),
is_available = stem.util.test_tools.is_pyflakes_available(),
unavailable_msg = PYFLAKES_UNAVAILABLE,
)
PYCODESTYLE_TASK = StaticCheckTask(
'running pycodestyle',
stem.util.test_tools.stylistic_issues,
args = (SRC_PATHS, True, True, True),
is_available = stem.util.test_tools.is_pycodestyle_available(),
unavailable_msg = PYCODESTYLE_UNAVAILABLE,
)
|
patrickod/stem
|
test/task.py
|
Python
|
lgpl-3.0
| 9,464
|
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.xhtml import *
from scap.model.xs.AnyUriType import AnyUriType
logger = logging.getLogger(__name__)
class UriType(AnyUriType):
MODEL_MAP = {
}
|
cjaymes/pyscap
|
src/scap/model/xhtml/UriType.py
|
Python
|
gpl-3.0
| 865
|
import os
import LCEngine4 as LCEngine
from PyQt4 import QtGui, QtCore
from Code import Books
from Code import ControlPosicion
from Code import Partida
from Code import DBgames
from Code.QT import Colocacion
from Code.QT import Controles
from Code.QT import Iconos
from Code.QT import QTVarios
from Code.QT import Columnas
from Code.QT import Grid
from Code.QT import QTUtil2
from Code.QT import Delegados
from Code.QT import WBG_Summary
class TabEngine(QtGui.QWidget):
def __init__(self, tabsAnalisis, procesador, configuracion):
QtGui.QWidget.__init__(self)
self.analyzing = False
self.posicion = None
self.li_analysis = []
self.gestor_motor = None
self.current_mrm = None
self.dbop = tabsAnalisis.dbop
self.procesador = procesador
self.configuracion = configuracion
self.siFigurines = configuracion.figurinesPGN
self.tabsAnalisis = tabsAnalisis
self.bt_start = Controles.PB(self, "", self.start).ponIcono(Iconos.Pelicula_Seguir(), 32)
self.bt_stop = Controles.PB(self, "", self.stop).ponIcono(Iconos.Pelicula_Pausa(), 32)
self.bt_stop.hide()
self.lb_engine = Controles.LB(self, _("Engine") + ":")
liMotores = configuracion.comboMotoresCompleto() # (nombre, clave)
default = configuracion.tutor.clave
engine = self.dbop.getconfig("ENGINE", default)
if len([clave for nombre,clave in liMotores if clave==engine]) == 0:
engine = default
self.cb_engine = Controles.CB(self, liMotores, engine).capturaCambiado(self.reset_motor)
multipv = self.dbop.getconfig("ENGINE_MULTIPV", 10)
lb_multipv = Controles.LB(self, _("Multi PV")+": ")
self.sb_multipv = Controles.SB(self, multipv, 1, 500).tamMaximo(50)
self.lb_analisis = Controles.LB(self, "").ponFondoN("#C9D2D7").ponTipoLetra(puntos=configuracion.puntosPGN)
oColumnas = Columnas.ListaColumnas()
oColumnas.nueva("PDT", _("Evaluation"), 120, siCentrado=True)
delegado = Delegados.EtiquetaPOS(True, siLineas=False) if self.siFigurines else None
oColumnas.nueva("SOL", "", 100, siCentrado=True, edicion=delegado)
oColumnas.nueva("PGN", _("Solution"), 860)
self.grid_analysis = Grid.Grid(self, oColumnas, siSelecFilas=True, siCabeceraVisible=False)
self.grid_analysis.tipoLetra(puntos=configuracion.puntosPGN)
self.grid_analysis.ponAltoFila(configuracion.altoFilaPGN)
# self.registrarGrid(self.grid_analysis)
ly_lin1 = Colocacion.H().control(self.bt_start).control(self.bt_stop).control(self.lb_engine)
ly_lin1.control(self.cb_engine)
ly_lin1.espacio(50).control(lb_multipv).control(self.sb_multipv).relleno()
ly = Colocacion.V().otro(ly_lin1).control(self.lb_analisis).control(self.grid_analysis).margen(3)
self.setLayout(ly)
self.reset_motor()
def saveCurrent(self):
if self.current_mrm:
fenM2 = self.current_posicion.fenM2()
dic = self.dbop.getfenvalue(fenM2)
if "ANALISIS" in dic:
mrm_ant = dic["ANALISIS"]
if mrm_ant.getdepth0() > self.current_mrm.getdepth0():
return
dic["ANALISIS"] = self.current_mrm
self.dbop.setfenvalue(fenM2, dic)
def setData(self, label, posicion):
self.saveCurrent()
self.posicion = posicion
self.lb_analisis.ponTexto(label)
if self.analyzing:
self.analyzing = False
self.gestor_motor.ac_final(0)
partida = Partida.Partida(self.posicion)
self.gestor_motor.ac_inicio(partida)
self.analyzing = True
QtCore.QTimer.singleShot(1000, self.lee_analisis)
else:
fenM2 = posicion.fenM2()
dic = self.dbop.getfenvalue(fenM2)
if "ANALISIS" in dic:
self.show_analisis(dic["ANALISIS"])
else:
self.li_analysis = []
self.grid_analysis.refresh()
def start(self):
self.current_mrm = None
self.current_posicion = None
self.sb_multipv.setDisabled(True)
self.cb_engine.setDisabled(True)
self.analyzing = True
self.sb_multipv.setDisabled(True)
self.show_stop()
multipv = self.sb_multipv.valor()
self.gestor_motor.actMultiPV(multipv)
partida = Partida.Partida(self.posicion)
self.gestor_motor.ac_inicio(partida)
QtCore.QTimer.singleShot(1000, self.lee_analisis)
def show_start(self):
self.bt_stop.hide()
self.bt_start.show()
def show_stop(self):
self.bt_start.hide()
self.bt_stop.show()
def show_analisis(self, mrm):
self.current_mrm = mrm
self.current_posicion = self.posicion
li = []
for rm in mrm.liMultiPV:
partida = Partida.Partida(self.posicion)
partida.leerPV(rm.pv)
pgn = partida.pgnBaseRAW()
lit = pgn.split(" ")
siBlancas = self.posicion.siBlancas
if siBlancas:
pgn0 = lit[0].split(".")[-1]
pgn1 = " ".join(lit[1:])
else:
pgn0 = lit[1]
pgn1 = " ".join(lit[2:])
if self.siFigurines:
partida.ms_sol = pgn0, siBlancas, None, None, None, None, False, False
else:
partida.ms_sol = pgn0
partida.ms_pgn = pgn1
partida.ms_pdt = rm.abrTextoPDT()
li.append(partida)
self.li_analysis = li
self.grid_analysis.refresh()
def lee_analisis(self):
if self.analyzing:
mrm = self.gestor_motor.ac_estado()
self.show_analisis(mrm)
QtCore.QTimer.singleShot(2000, self.lee_analisis)
def stop(self):
self.saveCurrent()
self.sb_multipv.setDisabled(False)
self.cb_engine.setDisabled(False)
self.analyzing = False
self.show_start()
if self.gestor_motor:
self.gestor_motor.ac_final(0)
def reset_motor(self):
self.saveCurrent()
clave = self.cb_engine.valor()
if not clave:
return
self.analyzing = False
if self.gestor_motor:
self.gestor_motor.terminar()
self.stop()
conf_motor = self.configuracion.buscaRivalExt(clave)
multipv = self.sb_multipv.valor()
self.gestor_motor = self.procesador.creaGestorMotor(conf_motor, 0, 0, siMultiPV=multipv > 1)
def gridNumDatos(self, grid):
return len(self.li_analysis)
def gridDato(self, grid, fila, oColumna):
if oColumna.clave == "PDT":
return self.li_analysis[fila].ms_pdt
elif oColumna.clave == "SOL":
return self.li_analysis[fila].ms_sol
else:
return self.li_analysis[fila].ms_pgn
def saveConfig(self):
self.dbop.setconfig("ENGINE", self.cb_engine.valor())
self.dbop.setconfig("ENGINE_MULTIPV", self.sb_multipv.valor())
class TabBook(QtGui.QWidget):
def __init__(self, tabsAnalisis, book, configuracion):
QtGui.QWidget.__init__(self)
self.tabsAnalisis = tabsAnalisis
self.posicion = None
self.leido = False
self.book = book
book.polyglot()
self.li_moves = []
self.siFigurines = configuracion.figurinesPGN
oColumnas = Columnas.ListaColumnas()
delegado = Delegados.EtiquetaPOS(True, siLineas=False) if self.siFigurines else None
for x in range(20):
oColumnas.nueva(x, "", 80, siCentrado=True, edicion = delegado)
self.grid_moves = Grid.Grid(self, oColumnas, siSelecFilas=True, siCabeceraMovible=False, siCabeceraVisible=False)
self.grid_moves.tipoLetra(puntos=configuracion.puntosPGN)
self.grid_moves.ponAltoFila(configuracion.altoFilaPGN)
ly = Colocacion.V().control(self.grid_moves).margen(3)
self.setLayout(ly)
def gridNumDatos(self, grid):
return len(self.li_moves)
def gridDato(self, grid, fila, oColumna):
mv = self.li_moves[fila]
li = mv.dato
key = int(oColumna.clave)
pgn = li[key]
if self.siFigurines:
siBlancas = " w " in mv.fen
return pgn, siBlancas, None, None, None, None, False, True
else:
return pgn
def gridDobleClick(self, grid, fila, oColumna):
self.lee_subnivel(fila)
self.grid_moves.refresh()
def gridBotonDerecho(self, grid, fila, columna, modificadores):
self.borra_subnivel(fila)
self.grid_moves.refresh()
def setData(self, posicion):
self.posicion = posicion
self.start()
def borra_subnivel(self, fila):
alm = self.li_moves[fila]
nv = alm.nivel
if nv == 0:
return
li = []
for x in range(fila, 0, -1):
alm1 = self.li_moves[x]
if alm1.nivel < nv:
break
li.append(x)
for x in range(fila+1, len(self.li_moves)):
alm1 = self.li_moves[x]
if alm1.nivel < nv:
break
li.append(x)
li.sort(reverse=True)
for x in li:
del self.li_moves[x]
def lee_subnivel(self, fila):
alm_base = self.li_moves[fila]
if alm_base.nivel >= 17:
return
LCEngine.setFen(alm_base.fen)
if LCEngine.movePV(alm_base.desde, alm_base.hasta, alm_base.coronacion):
fen = LCEngine.getFen()
for alm in self.book.almListaJugadas(fen):
nv = alm.nivel = alm_base.nivel + 1
alm.dato = [""] * 20
alm.dato[nv] = alm.pgn
alm.dato[nv+1] = alm.porc
alm.dato[nv+2] = "%d" % alm.weight
fila += 1
self.li_moves.insert(fila, alm)
def lee(self):
if not self.leido and self.posicion:
fen = self.posicion.fen()
self.li_moves = self.book.almListaJugadas(fen)
for alm in self.li_moves:
alm.nivel = 0
alm.dato = [""]*20
alm.dato[0] = alm.pgn
alm.dato[1] = alm.porc
alm.dato[2] = "%d" % alm.weight
self.leido = True
def start(self):
self.leido = False
self.lee()
self.grid_moves.refresh()
def stop(self):
pass
class TabDatabase(QtGui.QWidget):
def __init__(self, tabsAnalisis, procesador, dbstat):
QtGui.QWidget.__init__(self)
self.tabsAnalisis = tabsAnalisis
self.pv = None
self.bookGuide = self
self.dbstat = dbstat
self.wsummary = WBG_Summary.WSummaryBase(procesador, dbstat)
layout = Colocacion.H().control(self.wsummary)
self.setLayout(layout)
def setData(self, pv):
self.pv = pv
self.wsummary.actualizaPV(self.pv)
def start(self):
self.wsummary.actualizaPV(self.pv)
def stop(self):
self.dbstat.close()
class TreeMoves(QtGui.QTreeWidget):
def __init__(self, owner):
QtGui.QTreeWidget.__init__(self, owner)
self.owner = owner
def mousePressEvent(self, event):
QtGui.QTreeWidget.mousePressEvent(self, event)
self.resizeColumnToContents(0)
self.owner.seleccionado()
class TabTree(QtGui.QWidget):
def __init__(self, tabsAnalisis, configuracion):
QtGui.QWidget.__init__(self)
self.tabsAnalisis = tabsAnalisis
self.tree = TreeMoves(self)
self.tree.setAlternatingRowColors(True)
self.tree.setIndentation(24)
self.tree.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.tree.customContextMenuRequested.connect(self.menuContexto)
self.tree.setStyleSheet("selection-background-color: #F1D369; selection-color: #000000;")
self.tree.setFont(Controles.TipoLetra(puntos=configuracion.puntosPGN))
self.tree.setHeaderLabels((_("Moves"), _("Opening")))
bt_act = Controles.PB(self, _("Update"), self.bt_update, plano=False).ponIcono(Iconos.Pelicula_Seguir(), 16)
self.lb_analisis = Controles.LB(self, "").ponFondoN("#C9D2D7").ponTipoLetra(puntos=configuracion.puntosPGN)
ly_act = Colocacion.H().control(bt_act).control(self.lb_analisis).relleno(1)
layout = Colocacion.V().otro(ly_act).control(self.tree)
self.setLayout(layout)
self.dicItems = {}
def seleccionado(self):
item = self.tree.currentItem()
if item:
data_item = self.dicItems[str(item)]
self.lb_analisis.ponTexto(data_item.game())
lipv = data_item.listaPV()
self.tabsAnalisis.panelOpening.goto_next_lipv(lipv)
self.tree.resizeColumnToContents(0)
def bt_update(self):
self.tree.clear()
dbop = self.tabsAnalisis.dbop
levelbase = len(dbop.basePV.split(" "))
def haz(trdata, iparent, nivel):
for move, hijo in trdata.dicHijos.iteritems():
item = QtGui.QTreeWidgetItem(iparent)
item.setText(0, hijo.pgn)
item.setText(1, hijo.opening)
hijo.item = item
if nivel < (levelbase + 1):
item.setExpanded(True)
self.dicItems[str(item)] = hijo
haz(hijo, item, nivel+1)
self.tree_data = self.tabsAnalisis.dbop.totree()
haz(self.tree_data, self.tree, 1)
self.tree.resizeColumnToContents(0)
self.lb_analisis.ponTexto("")
def start(self):
if len(self.dicItems) == 0:
self.bt_update()
def stop(self):
pass
def setData(self, data):
pass
def menuContexto(self, position):
item = self.tree.currentItem()
if not item:
return
menu = QTVarios.LCMenu(self)
menu1 = menu.submenu(_("Expand"), Iconos.Mas22())
menu1.opcion("expandall", _("All"), Iconos.PuntoVerde())
menu1.separador()
menu1.opcion("expandthis", _("This branch"), Iconos.PuntoAmarillo())
menu.separador()
menu1 = menu.submenu(_("Collapse"), Iconos.Menos22())
menu1.opcion("collapseall", _("All"), Iconos.PuntoVerde())
menu1.separador()
menu1.opcion("collapsethis", _("This branch"), Iconos.PuntoAmarillo())
resp = menu.lanza()
if resp:
if resp == "expandthis":
quien, siExpand = item, True
elif resp == "expandall":
quien, siExpand = None, True
elif resp == "collapsethis":
quien, siExpand = item, False
elif resp == "collapseall":
quien, siExpand = None, False
def work(data):
item = data.item
if item:
item.setExpanded(siExpand)
for uno, datauno in data.dicHijos.iteritems():
work(datauno)
data = self.dicItems[str(quien)] if quien else self.tree_data
work(data)
self.tree.resizeColumnToContents(0)
class TabsAnalisis(QtGui.QWidget):
def __init__(self, panelOpening, procesador, configuracion):
QtGui.QWidget.__init__(self)
self.panelOpening = panelOpening
self.dbop = panelOpening.dbop
self.procesador = procesador
self.configuracion = configuracion
self.partida = None
self.njg = None
self.tabtree = TabTree(self, configuracion)
self.tabengine = TabEngine(self, procesador, configuracion)
self.liTabs = [("engine", self.tabengine), ("tree", self.tabtree),]
self.tabActive = 0
self.tabs = Controles.Tab(panelOpening)
self.tabs.ponTipoLetra(puntos=self.configuracion.puntosPGN)
self.tabs.setTabIcon(0, Iconos.Motor())
self.tabs.nuevaTab(self.tabengine, _("Engine"))
self.tabs.nuevaTab(self.tabtree, _("Tree"))
self.tabs.setTabIcon(1, Iconos.Arbol())
self.tabs.dispatchChange(self.tabChanged)
tabButton = QtGui.QToolButton(self)
tabButton.setIcon(Iconos.Nuevo())
tabButton.clicked.connect(self.creaTab)
li = [(_("Analysis of next move"), True), (_("Analysis of current move"), False)]
self.cb_nextmove = Controles.CB(self, li, True).capturaCambiado(self.changedNextMove)
corner_widget = QtGui.QWidget(self)
lyCorner = Colocacion.H().control(self.cb_nextmove).control(tabButton).margen(0)
corner_widget.setLayout(lyCorner)
self.tabs.setCornerWidget(corner_widget)
self.tabs.setTabsClosable(True)
self.tabs.tabCloseRequested.connect(self.tabCloseRequested)
layout = Colocacion.V()
layout.control(self.tabs).margen(0)
self.setLayout(layout)
def changedNextMove(self):
if self.partida:
self.setPosicion(self.partida, self.njg)
def tabChanged(self, ntab):
self.tabActive = ntab
if ntab > 0:
tipo, wtab = self.liTabs[ntab]
wtab.start()
def tabCloseRequested(self, ntab):
tipo, wtab = self.liTabs[ntab]
wtab.stop()
if ntab > 1:
del self.liTabs[ntab]
self.tabs.removeTab(ntab)
del wtab
def creaTab(self):
menu = QTVarios.LCMenu(self)
menu.opcion("book", _("Polyglot book"), Iconos.Libros())
menu.separador()
menu.opcion("dbase", _("Database"), Iconos.Database())
# menu.separador()
# menu.opcion("tree", _("Tree"), Iconos.Arbol())
resp = menu.lanza()
pos = 0
if resp == "book":
book = self.seleccionaLibro()
if book:
tabbook = TabBook(self, book, self.configuracion)
self.liTabs.append((resp, tabbook))
pos = len(self.liTabs)-1
self.tabs.nuevaTab(tabbook, book.nombre, pos)
self.tabs.setTabIcon(pos, Iconos.Libros())
self.setPosicion(self.partida, self.njg, pos)
# elif resp == "tree":
# tabtree = TabTree(self, self.configuracion)
# self.liTabs.append(("tree", tabtree))
# pos = len(self.liTabs)-1
# self.tabs.nuevaTab(tabtree, _("Tree"), pos)
# self.tabs.setTabIcon(pos, Iconos.Arbol())
# tabtree.bt_update()
elif resp == "dbase":
nomfichgames = QTVarios.selectDB(self, self.configuracion, False, True)
if nomfichgames:
dbSTAT = DBgames.TreeSTAT(nomfichgames + "_s1")
tabdb = TabDatabase(self, self.procesador, dbSTAT)
self.liTabs.append((resp, tabdb))
pos = len(self.liTabs) - 1
self.setPosicion(self.partida, self.njg, pos)
nombre = os.path.basename(nomfichgames)[:-4]
self.tabs.nuevaTab(tabdb, nombre, pos)
self.tabs.setTabIcon(pos, Iconos.Database())
self.tabs.activa(pos)
def setPosicion(self, partida, njg, numTab=None):
if partida is None:
return
jg = partida.jugada(njg)
self.partida = partida
self.njg = njg
next = self.cb_nextmove.valor()
if jg:
if njg == 0:
pv = partida.pv_hasta(njg) if next else ""
else:
pv = partida.pv_hasta(njg if next else njg - 1)
posicion = jg.posicion if next else jg.posicionBase
else:
posicion = ControlPosicion.ControlPosicion().posInicial()
pv = ""
for ntab, (tipo, tab) in enumerate(self.liTabs):
if ntab == 0:
p = Partida.Partida()
p.leerPV(pv)
tab.setData(p.pgnHTML(siFigurines=self.configuracion.figurinesPGN), posicion)
else:
data = pv if tipo == "dbase" else posicion
if numTab is not None:
if ntab != numTab:
continue
if ntab > 1:
tab.setData(data)
tab.start()
def seleccionaLibro(self):
listaLibros = Books.ListaLibros()
listaLibros.recuperaVar(self.configuracion.ficheroBooks)
listaLibros.comprueba()
menu = QTVarios.LCMenu(self)
rondo = QTVarios.rondoPuntos()
for book in listaLibros.lista:
menu.opcion(("x", book), book.nombre, rondo.otro())
menu.separador()
menu.opcion(("n", None), _("Install new book"), Iconos.Nuevo())
resp = menu.lanza()
if resp:
orden, book = resp
if orden == "x":
pass
elif orden == "n":
fbin = QTUtil2.leeFichero(self, listaLibros.path, "bin", titulo=_("Polyglot book"))
if fbin:
listaLibros.path = os.path.dirname(fbin)
nombre = os.path.basename(fbin)[:-4]
book = Books.Libro("P", nombre, fbin, True)
listaLibros.nuevo(book)
listaLibros.guardaVar(self.configuracion.ficheroBooks)
else:
book = None
return book
def saveConfig(self):
for tipo, wtab in self.liTabs:
if tipo == "engine":
wtab.saveConfig()
|
lukasmonk/lucaschess
|
Code/QT/POLAnalisis.py
|
Python
|
gpl-2.0
| 21,503
|
"""Django module for the OS2datascanner project."""
|
os2webscanner/os2webscanner
|
django-os2webscanner/os2webscanner/__init__.py
|
Python
|
mpl-2.0
| 53
|
from nose.tools import ok_, eq_
from crashstats.symbols import utils
from crashstats.base.tests.testbase import TestCase
from .base import (
ZIP_FILE,
TAR_FILE,
TGZ_FILE,
TARGZ_FILE
)
class TestUtils(TestCase):
def test_preview_zip(self):
with open(ZIP_FILE) as f:
result = utils.preview_archive_content(f, 'application/zip')
# the sample.zip file contains...
ok_('south-africa-flag.jpeg' in result)
# and it's 69183 bytes
ok_('69183' in result)
def test_preview_tar(self):
with open(TAR_FILE) as f:
result = utils.preview_archive_content(f, 'application/x-tar')
# the sample.tar file contains...
ok_('south-africa-flag.jpeg' in result)
# and it's 69183 bytes
ok_('69183' in result)
def test_preview_tgz(self):
with open(TGZ_FILE) as f:
result = utils.preview_archive_content(f, 'application/x-gzip')
# the sample.tgz file contains...
ok_('south-africa-flag.jpeg' in result)
# and it's 69183 bytes
ok_('69183' in result)
def test_preview_targz(self):
with open(TARGZ_FILE) as f:
result = utils.preview_archive_content(f, 'application/x-gzip')
# the sample.tar.gz file contains...
ok_('south-africa-flag.jpeg' in result)
# and it's 69183 bytes
ok_('69183' in result)
def test_filename_to_mimetype(self):
function = utils.filename_to_mimetype
eq_(function(ZIP_FILE), 'application/zip')
eq_(function(TGZ_FILE), 'application/x-gzip')
eq_(function(TARGZ_FILE), 'application/x-gzip')
eq_(function(TAR_FILE), 'application/x-tar')
|
bsmedberg/socorro
|
webapp-django/crashstats/symbols/tests/test_utils.py
|
Python
|
mpl-2.0
| 1,780
|
#!/bin/env python
# Copyright 2015 Brno University of Technology (author: Karel Vesely)
# Apache 2.0
import sys,operator
# Append Levenshtein alignment of 'hypothesis' and 'reference' into 'CTM':
# (i.e. the output of 'align-text' post-processed by 'wer_per_utt_details.pl')
# The tags in the appended column are:
# 'C' = correct
# 'S' = substitution
# 'I' = insertion
# 'U' = unknown (not part of scored segment)
if len(sys.argv) != 4:
print 'Usage: %s eval-in ctm-in ctm-eval-out' % __file__
sys.exit(1)
dummy, eval_in, ctm_in, ctm_eval_out = sys.argv
if ctm_eval_out == '-': ctm_eval_out = '/dev/stdout'
# Read the evalutation,
eval_vec = dict()
with open(eval_in, 'r') as f:
while True:
# Reading 4 lines encoding one utterance,
ref = f.readline()
hyp = f.readline()
op = f.readline()
csid = f.readline()
if not ref: break
# Parse the input,
utt,tag,hyp_vec = hyp.split(' ',2)
assert(tag == 'hyp')
utt,tag,op_vec = op.split(' ',2)
assert(tag == 'op')
hyp_vec = hyp_vec.split()
op_vec = op_vec.split()
# Fill create eval vector with symbols 'C', 'S', 'I',
assert(utt not in eval_vec)
eval_vec[utt] = []
for op,hyp in zip(op_vec, hyp_vec):
if hyp != '<eps>': eval_vec[utt].append(op)
# Load the 'ctm' into dictionary,
ctm = dict()
with open(ctm_in) as f:
for l in f:
utt, ch, beg, dur, wrd, conf = l.split()
if not utt in ctm: ctm[utt] = []
ctm[utt].append((utt, ch, float(beg), float(dur), wrd, float(conf)))
# Build the 'ctm' with 'eval' column added,
ctm_eval = []
for utt,ctm_part in ctm.iteritems():
ctm_part.sort(key = operator.itemgetter(2)) # Sort by 'beg' time,
# extending the 'tuple' by '+':
merged = [ tup + (evl,) for tup,evl in zip(ctm_part,eval_vec[utt]) ]
ctm_eval.extend(merged)
# Sort again,
ctm_eval.sort(key = operator.itemgetter(0,1,2))
# Store,
with open(ctm_eval_out,'w') as f:
for tup in ctm_eval:
f.write('%s %s %f %f %s %f %s\n' % tup)
|
StevenLOL/aicyber_semeval_2016_ivector
|
System_2/steps/conf/append_eval_to_ctm.py
|
Python
|
gpl-3.0
| 1,989
|
#######################################################################
# Copyright (C) 2017 Shangtong Zhang(zhangshangtong.cpp@gmail.com) #
# Permission given to modify the code as long as you keep this #
# declaration at the top #
#######################################################################
import torch
import numpy as np
from ..utils import *
import torch.multiprocessing as mp
from collections import deque
from skimage.io import imsave
class BaseAgent:
def __init__(self, config):
self.config = config
self.logger = get_logger(tag=config.tag, log_level=config.log_level)
self.task_ind = 0
def close(self):
close_obj(self.task)
def save(self, filename):
torch.save(self.network.state_dict(), '%s.model' % (filename))
with open('%s.stats' % (filename), 'wb') as f:
pickle.dump(self.config.state_normalizer.state_dict(), f)
def load(self, filename):
state_dict = torch.load('%s.model' % filename, map_location=lambda storage, loc: storage)
self.network.load_state_dict(state_dict)
with open('%s.stats' % (filename), 'rb') as f:
self.config.state_normalizer.load_state_dict(pickle.load(f))
def eval_step(self, state):
raise NotImplementedError
def eval_episode(self):
env = self.config.eval_env
state = env.reset()
while True:
action = self.eval_step(state)
state, reward, done, info = env.step(action)
ret = info[0]['episodic_return']
if ret is not None:
break
return ret
def eval_episodes(self):
episodic_returns = []
for ep in range(self.config.eval_episodes):
total_rewards = self.eval_episode()
episodic_returns.append(np.sum(total_rewards))
self.logger.info('steps %d, episodic_return_test %.2f(%.2f)' % (
self.total_steps, np.mean(episodic_returns), np.std(episodic_returns) / np.sqrt(len(episodic_returns))
))
self.logger.add_scalar('episodic_return_test', np.mean(episodic_returns), self.total_steps)
return {
'episodic_return_test': np.mean(episodic_returns),
}
def record_online_return(self, info, offset=0):
if isinstance(info, dict):
ret = info['episodic_return']
if ret is not None:
self.logger.add_scalar('episodic_return_train', ret, self.total_steps + offset)
self.logger.info('steps %d, episodic_return_train %s' % (self.total_steps + offset, ret))
elif isinstance(info, tuple):
for i, info_ in enumerate(info):
self.record_online_return(info_, i)
else:
raise NotImplementedError
def switch_task(self):
config = self.config
if not config.tasks:
return
segs = np.linspace(0, config.max_steps, len(config.tasks) + 1)
if self.total_steps > segs[self.task_ind + 1]:
self.task_ind += 1
self.task = config.tasks[self.task_ind]
self.states = self.task.reset()
self.states = config.state_normalizer(self.states)
def record_episode(self, dir, env):
mkdir(dir)
steps = 0
state = env.reset()
while True:
self.record_obs(env, dir, steps)
action = self.record_step(state)
state, reward, done, info = env.step(action)
ret = info[0]['episodic_return']
steps += 1
if ret is not None:
break
def record_step(self, state):
raise NotImplementedError
# For DMControl
def record_obs(self, env, dir, steps):
env = env.env.envs[0]
obs = env.render(mode='rgb_array')
imsave('%s/%04d.png' % (dir, steps), obs)
class BaseActor(mp.Process):
STEP = 0
RESET = 1
EXIT = 2
SPECS = 3
NETWORK = 4
CACHE = 5
def __init__(self, config):
mp.Process.__init__(self)
self.config = config
self.__pipe, self.__worker_pipe = mp.Pipe()
self._state = None
self._task = None
self._network = None
self._total_steps = 0
self.__cache_len = 2
if not config.async_actor:
self.start = lambda: None
self.step = self._sample
self.close = lambda: None
self._set_up()
self._task = config.task_fn()
def _sample(self):
transitions = []
for _ in range(self.config.sgd_update_frequency):
transition = self._transition()
if transition is not None:
transitions.append(transition)
return transitions
def run(self):
self._set_up()
config = self.config
self._task = config.task_fn()
cache = deque([], maxlen=2)
while True:
op, data = self.__worker_pipe.recv()
if op == self.STEP:
if not len(cache):
cache.append(self._sample())
cache.append(self._sample())
self.__worker_pipe.send(cache.popleft())
cache.append(self._sample())
elif op == self.EXIT:
self.__worker_pipe.close()
return
elif op == self.NETWORK:
self._network = data
else:
raise NotImplementedError
def _transition(self):
raise NotImplementedError
def _set_up(self):
pass
def step(self):
self.__pipe.send([self.STEP, None])
return self.__pipe.recv()
def close(self):
self.__pipe.send([self.EXIT, None])
self.__pipe.close()
def set_network(self, net):
if not self.config.async_actor:
self._network = net
else:
self.__pipe.send([self.NETWORK, net])
|
ShangtongZhang/DeepRL
|
deep_rl/agent/BaseAgent.py
|
Python
|
mit
| 5,970
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import contextlib
import glob
import logging
import os
import re
import shutil
import subprocess
import tempfile
from typing import Dict, FrozenSet, List, Optional, Set, Sequence, Tuple, Union
import elftools.elf.elffile
from elftools.construct import ConstructError
import elftools.common.exceptions
from pkg_resources import parse_version
from snapcraft import file_utils
from snapcraft.internal import common, errors, repo
logger = logging.getLogger(__name__)
def _ldd_resolve(soname: str, soname_path: str) -> Tuple[str, str]:
logger.debug(f"_ldd_resolve: {soname!r} {soname_path!r}")
# If found, resolve the path components. We can safely determine that
# ldd found the match if it returns an absolute path. For additional
# safety, check that it exists. See example ldd output in ldd() below.
# If not found, ldd should use a string like "not found", but we do not
# really care what that string is with this approach as it has to start
# with "/" and point to a valid file.
if soname_path.startswith("/") and os.path.exists(soname_path):
abs_path = os.path.abspath(soname_path)
return soname, abs_path
# Not found, use the soname.
return soname, soname
def ldd(path: str, ld_library_paths: List[str]) -> Dict[str, str]:
"""Return a set of resolved library mappings using specified library paths.
Returns a dictionary of mappings of soname -> soname_path.
If library is not resolved, the soname itself is the soname_path.
"""
libraries: Dict[str, str] = dict()
env = os.environ.copy()
env["LD_LIBRARY_PATH"] = ":".join(ld_library_paths)
logger.debug(f"invoking ldd with ld library paths: {ld_library_paths!r}")
try:
# ldd output sample:
# linux-vdso.so.1 => (0x00007ffdc13ec000) <== ubuntu 16.04 ldd
# linux-vdso.so.1 (0x00007ffdc13ec000) <== newer ldd
# /lib64/ld-linux-x86-64.so.2 (0x00007fb3c5298000)
# libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007fb3bef03000)
# libmissing.so.2 => not found
ldd_lines = (
subprocess.check_output(["ldd", path], env=env).decode().splitlines()
)
logger.debug(f"ldd output:\n{ldd_lines}")
except subprocess.CalledProcessError:
logger.warning("Unable to determine library dependencies for {!r}".format(path))
return libraries
for line in ldd_lines:
# First match against libraries that are found.
match = re.match(r"\t(.*) => (.*) \(0x", line)
if not match:
# Now find those not found, or not providing the address...
match = re.match(r"\t(.*) => (.*)", line)
# Ignore ld-linux, linux-vdso, etc. that don't match these regex.
# As Ubuntu 16.04's ldd provides an empty string for the found
# path (in group 2) on linux-vdso, check for this and ignore it.
# See example output above for reference.
if not match or match.group(2) == "":
continue
soname, soname_path = _ldd_resolve(match.group(1), match.group(2))
libraries[soname] = soname_path
logger.debug(f"ldd results: {libraries!r}")
return libraries
class NeededLibrary:
"""Represents an ELF library version."""
def __init__(self, *, name: str) -> None:
self.name = name
self.versions = set() # type: Set[str]
def add_version(self, version: str) -> None:
self.versions.add(version)
ElfArchitectureTuple = Tuple[str, str, str]
SonameCacheDict = Dict[Tuple[ElfArchitectureTuple, str], str]
# Old pyelftools uses byte strings for section names. Some data is
# also returned as bytes, which is handled below.
if parse_version(elftools.__version__) >= parse_version("0.24"):
_DEBUG_INFO: Union[str, bytes] = ".debug_info"
_DYNAMIC: Union[str, bytes] = ".dynamic"
_GNU_VERSION_R: Union[str, bytes] = ".gnu.version_r"
_INTERP: Union[str, bytes] = ".interp"
else:
_DEBUG_INFO = b".debug_info"
_GNU_VERSION_R = b".gnu.version_r"
class SonameCache:
"""A cache for sonames."""
def __getitem__(self, key):
return self._soname_paths[key]
def __setitem__(self, key, item):
# Initial API error checks
if not isinstance(key, tuple):
raise EnvironmentError(
"The key for SonameCache has to be a (arch, soname) tuple."
)
if not isinstance(key[0], tuple) or len(key[0]) != 3:
raise EnvironmentError(
"The first element of the key needs to of type ElfArchitectureTuple."
)
if not isinstance(key[1], str):
raise EnvironmentError(
"The second element of the key needs to be "
"of type str representing the soname."
)
self._soname_paths[key] = item
def __contains__(self, key):
return key in self._soname_paths
def __init__(self):
"""Initialize a cache for sonames"""
self._soname_paths = dict() # type: SonameCacheDict
def reset_except_root(self, root):
"""Reset the cache values that aren't contained within root."""
new_soname_paths = dict() # type: SonameCacheDict
for key, value in self._soname_paths.items():
if value is not None and value.startswith(root):
new_soname_paths[key] = value
self._soname_paths = new_soname_paths
class Library:
"""Represents the SONAME and path to the library."""
def __init__(
self,
*,
soname: str,
soname_path: str,
search_paths: List[str],
core_base_path: Optional[str],
arch: ElfArchitectureTuple,
soname_cache: SonameCache,
) -> None:
self.soname = soname
self.soname_path = soname_path
self.search_paths = search_paths
self.core_base_path = core_base_path
self.arch = arch
self.soname_cache = soname_cache
# Resolve path, if possible.
self.path = self._crawl_for_path()
if core_base_path is not None and self.path.startswith(core_base_path):
self.in_base_snap = True
else:
self.in_base_snap = False
logger.debug(
"{soname} with original path {original_path} found on {path} in base: {in_base}".format(
soname=soname,
original_path=soname_path,
path=self.path,
in_base=self.in_base_snap,
)
)
def _update_soname_cache(self, resolved_path: str) -> None:
self.soname_cache[self.arch, self.soname] = resolved_path
def _is_valid_elf(self, resolved_path: str) -> bool:
if not os.path.exists(resolved_path) or not ElfFile.is_elf(resolved_path):
return False
try:
elf_file = ElfFile(path=resolved_path)
except errors.CorruptedElfFileError as error:
# Log if the ELF file seems corrupted.
logger.warning(error.get_brief())
return False
return elf_file.arch == self.arch
def _crawl_for_path(self) -> str:
# Speed things up and return what was already found once.
if (self.arch, self.soname) in self.soname_cache:
return self.soname_cache[self.arch, self.soname]
logger.debug("Crawling to find soname {!r}".format(self.soname))
valid_search_paths = [p for p in self.search_paths if os.path.exists(p)]
in_search_paths = any(
self.soname_path.startswith(p) for p in valid_search_paths
)
# Expedite path crawling if we have a valid elf file that lives
# inside the search paths.
if in_search_paths and self._is_valid_elf(self.soname_path):
self._update_soname_cache(self.soname_path)
return self.soname_path
for path in valid_search_paths:
for root, directories, files in os.walk(path):
if self.soname not in files:
continue
file_path = os.path.join(root, self.soname.strip("/"))
if self._is_valid_elf(file_path):
self._update_soname_cache(file_path)
return file_path
# Required for libraries on the host and the fetching mechanism.
self._update_soname_cache(self.soname_path)
return self.soname_path
# Old versions of pyelftools return bytes rather than strings for
# certain APIs. So we pass those values through this function to get
# a consistent result.
def _ensure_str(s):
if isinstance(s, bytes):
return s.decode("ascii")
assert isinstance(s, str)
return s
class ElfFile:
"""ElfFile represents and elf file on a path and its attributes."""
@classmethod
def is_elf(cls, path: str) -> bool:
if not os.path.isfile(path):
# ELF binaries are regular files
return False
with open(path, "rb") as bin_file:
return bin_file.read(4) == b"\x7fELF"
def __init__(self, *, path: str) -> None:
"""Initialize an ElfFile instance.
:param str path: path to an elf_file within a snapcraft project.
"""
self.path = path
self.dependencies = set() # type: Set[Library]
self.arch: Optional[ElfArchitectureTuple] = None
self.interp: str = ""
self.soname: str = ""
self.needed: Dict[str, NeededLibrary] = dict()
self.execstack_set: bool = False
self.is_dynamic: bool = True
self.build_id: str = ""
self.has_debug_info: bool = False
# String of elf enum type, e.g. "ET_DYN", "ET_EXEC", etc.
self.elf_type: str = "ET_NONE"
try:
self._extract_attributes()
except (UnicodeDecodeError, AttributeError, ConstructError) as exception:
raise errors.CorruptedElfFileError(path, exception)
def _extract_attributes(self) -> None: # noqa: C901
with open(self.path, "rb") as fp:
elf = elftools.elf.elffile.ELFFile(fp)
# A set of fields to identify the architecture of the ELF file:
# EI_CLASS: 32/64 bit (e.g. amd64 vs. x32)
# EI_DATA: byte orer (e.g. ppc64 vs. ppc64le)
# e_machine: instruction set (e.g. x86-64 vs. arm64)
#
# For amd64 binaries, this will evaluate to:
# ('ELFCLASS64', 'ELFDATA2LSB', 'EM_X86_64')
self.arch = (
elf.header.e_ident.EI_CLASS,
elf.header.e_ident.EI_DATA,
elf.header.e_machine,
)
for segment in elf.iter_segments():
if isinstance(segment, elftools.elf.dynamic.DynamicSegment):
self.is_dynamic = True
for tag in segment.iter_tags("DT_NEEDED"):
needed = _ensure_str(tag.needed)
self.needed[needed] = NeededLibrary(name=needed)
for tag in segment.iter_tags("DT_SONAME"):
self.soname = _ensure_str(tag.soname)
elif segment["p_type"] == "PT_GNU_STACK":
# p_flags holds the bit mask for this segment.
# See `man 5 elf`.
mode = segment["p_flags"]
if mode & elftools.elf.constants.P_FLAGS.PF_X:
self.execstack_set = True
elif isinstance(segment, elftools.elf.segments.InterpSegment):
self.interp = segment.get_interp_name()
elif isinstance(segment, elftools.elf.segments.NoteSegment):
for note in segment.iter_notes():
if note.n_name == "GNU" and note.n_type == "NT_GNU_BUILD_ID":
self.build_id = _ensure_str(note.n_desc)
# If we are processing a detached debug info file, these
# sections will be present but empty.
verneed_section = elf.get_section_by_name(_GNU_VERSION_R)
if isinstance(verneed_section, elftools.elf.gnuversions.GNUVerNeedSection):
for library, versions in verneed_section.iter_versions():
library_name = _ensure_str(library.name)
# If the ELF file only references weak symbols
# from a library, it may be absent from DT_NEEDED
# but still have an entry in .gnu.version_r for
# symbol versions.
if library_name not in self.needed:
continue
lib = self.needed[library_name]
for version in versions:
lib.add_version(_ensure_str(version.name))
debug_info_section = elf.get_section_by_name(_DEBUG_INFO)
self.has_debug_info = (
debug_info_section is not None
and debug_info_section.header.sh_type != "SHT_NOBITS"
)
self.elf_type = elf.header["e_type"]
def is_linker_compatible(self, *, linker_version: str) -> bool:
"""Determines if linker will work given the required glibc version."""
version_required = self.get_required_glibc()
r = parse_version(version_required) <= parse_version(linker_version)
logger.debug(
"Checking if linker {!r} will work with "
"GLIBC_{} required by {!r}: {!r}".format(
linker_version, version_required, self.path, r
)
)
return r
def get_required_glibc(self) -> str:
"""Returns the required glibc version for this ELF file."""
with contextlib.suppress(AttributeError):
return self._required_glibc # type: ignore
version_required = ""
for lib in self.needed.values():
for version in lib.versions:
if not version.startswith("GLIBC_"):
continue
version = version[6:]
if parse_version(version) > parse_version(version_required):
version_required = version
self._required_glibc = version_required
return version_required
def load_dependencies(
self,
root_path: str,
core_base_path: Optional[str],
content_dirs: Set[str],
arch_triplet: str,
soname_cache: SonameCache = None,
) -> Set[str]:
"""Load the set of libraries that are needed to satisfy elf's runtime.
This may include libraries contained within the project.
The object's .dependencies attribute is set after loading.
:param str root_path: the root path to search for missing dependencies.
:param str core_base_path: the core base path to search for missing
dependencies.
:param SonameCache soname_cache: a cache of previously search
dependencies.
:returns: a set of string with paths to the library dependencies of
elf.
"""
if soname_cache is None:
soname_cache = SonameCache()
logger.debug("Getting dependencies for {!r}".format(self.path))
search_paths = [root_path, *content_dirs]
if core_base_path is not None:
search_paths.append(core_base_path)
ld_library_paths: List[str] = list()
for path in search_paths:
ld_library_paths.extend(common.get_library_paths(path, arch_triplet))
libraries = ldd(self.path, ld_library_paths)
for soname, soname_path in libraries.items():
if self.arch is None:
raise RuntimeError("failed to parse architecture")
self.dependencies.add(
Library(
soname=soname,
soname_path=soname_path,
search_paths=search_paths,
core_base_path=core_base_path,
arch=self.arch,
soname_cache=soname_cache,
)
)
# Return the set of dependency paths, minus those found in the base.
dependencies: Set[str] = set()
for library in self.dependencies:
if not library.in_base_snap:
dependencies.add(library.path)
return dependencies
class Patcher:
"""Patcher holds the necessary logic to patch elf files."""
def __init__(
self, *, dynamic_linker: str, root_path: str, preferred_patchelf_path=None
) -> None:
"""Create a Patcher instance.
:param str dynamic_linker: the path to the dynamic linker to set the
elf file to.
:param str root_path: the base path for the snap to determine
if use of $ORIGIN is possible.
:param str preferred_patchelf_path: patch the necessary elf_files with
this patchelf.
"""
self._dynamic_linker = dynamic_linker
self._root_path = root_path
if preferred_patchelf_path:
self._patchelf_cmd = preferred_patchelf_path
else:
self._patchelf_cmd = file_utils.get_tool_path("patchelf")
self._strip_cmd = file_utils.get_tool_path("strip")
def patch(self, *, elf_file: ElfFile) -> None:
"""Patch elf_file with the Patcher instance configuration.
If the ELF is executable, patch it to use the configured linker.
If the ELF has dependencies (DT_NEEDED), set an rpath to them.
:param ElfFile elf: a data object representing an elf file and its
relevant attributes.
:raises snapcraft.internal.errors.PatcherError:
raised when the elf_file cannot be patched.
"""
patchelf_args = []
if elf_file.interp:
patchelf_args.extend(["--set-interpreter", self._dynamic_linker])
if elf_file.dependencies:
rpath = self._get_rpath(elf_file)
# Due to https://github.com/NixOS/patchelf/issues/94 we need
# to first clear the current rpath
self._run_patchelf(
patchelf_args=["--remove-rpath"], elf_file_path=elf_file.path
)
# Parameters:
# --force-rpath: use RPATH instead of RUNPATH.
# --shrink-rpath: will remove unneeded entries, with the
# side effect of preferring host libraries
# so we simply do not use it.
# --set-rpath: set the RPATH to the colon separated argument.
patchelf_args.extend(["--force-rpath", "--set-rpath", rpath])
# no patchelf_args means there is nothing to do.
if not patchelf_args:
return
self._run_patchelf(patchelf_args=patchelf_args, elf_file_path=elf_file.path)
def _run_patchelf(self, *, patchelf_args: List[str], elf_file_path: str) -> None:
# Run patchelf on a copy of the primed file and replace it
# after it is successful. This allows us to break the potential
# hard link created when migrating the file across the steps of
# the part.
with tempfile.NamedTemporaryFile() as temp_file:
shutil.copy2(elf_file_path, temp_file.name)
cmd = [self._patchelf_cmd] + patchelf_args + [temp_file.name]
try:
subprocess.check_call(cmd)
# There is no need to catch FileNotFoundError as patchelf should be
# bundled with snapcraft which means its lack of existence is a
# "packager" error.
except subprocess.CalledProcessError as call_error:
raise errors.PatcherGenericError(
elf_file=elf_file_path, process_exception=call_error
)
# We unlink to break the potential hard link
os.unlink(elf_file_path)
shutil.copy2(temp_file.name, elf_file_path)
def _get_existing_rpath(self, elf_file_path):
output = subprocess.check_output(
[self._patchelf_cmd, "--print-rpath", elf_file_path]
)
return output.decode().strip().split(":")
def _get_rpath(self, elf_file) -> str:
origin_rpaths = list() # type: List[str]
base_rpaths = set() # type: Set[str]
existing_rpaths = self._get_existing_rpath(elf_file.path)
for dependency in elf_file.dependencies:
if dependency.path:
if dependency.in_base_snap:
base_rpaths.add(os.path.dirname(dependency.path))
elif dependency.path.startswith(self._root_path):
rel_library_path = os.path.relpath(dependency.path, elf_file.path)
rel_library_path_dir = os.path.dirname(rel_library_path)
# return the dirname, with the first .. replace
# with $ORIGIN
origin_rpath = rel_library_path_dir.replace("..", "$ORIGIN", 1)
if origin_rpath not in origin_rpaths:
origin_rpaths.append(origin_rpath)
if existing_rpaths:
# Only keep those that mention origin and are not already in our
# bundle.
existing_rpaths = [
r for r in existing_rpaths if "$ORIGIN" in r and r not in origin_rpaths
]
origin_rpaths = existing_rpaths + origin_rpaths
origin_paths = ":".join((r for r in origin_rpaths if r))
core_base_rpaths = ":".join(base_rpaths)
if origin_paths and core_base_rpaths:
return "{}:{}".format(origin_paths, core_base_rpaths)
elif origin_paths and not core_base_rpaths:
return origin_paths
else:
return core_base_rpaths
def determine_ld_library_path(root: str) -> List[str]:
"""Determine additional library paths needed for the linker loader.
This is a workaround until full library searching is implemented which
works by searching for ld.so.conf in specific hard coded locations
within root.
:param root str: the root directory to search for specific ld.so.conf
entries.
:returns: a list of strings of library paths where relevant libraries
can be found within root.
"""
# If more ld.so.conf files need to be supported, add them here.
ld_config_globs = {"{}/usr/lib/*/mesa*/ld.so.conf".format(root)}
ld_library_paths = []
for this_glob in ld_config_globs:
for ld_conf_file in glob.glob(this_glob):
ld_library_paths.extend(_extract_ld_library_paths(ld_conf_file))
return [root + path for path in ld_library_paths]
def _extract_ld_library_paths(ld_conf_file: str) -> List[str]:
# From the ldconfig manpage, paths can be colon-, space-, tab-, newline-,
# or comma-separated.
path_delimiters = re.compile(r"[:\s,]")
comments = re.compile(r"#.*$")
paths = []
with open(ld_conf_file, "r") as f:
for line in f:
# Remove comments from line
line = comments.sub("", line).strip()
if line:
paths.extend(path_delimiters.split(line))
return paths
_libraries = None
def get_elf_files(root: str, file_list: Sequence[str]) -> FrozenSet[ElfFile]:
"""Return a frozenset of elf files from file_list prepended with root.
:param str root: the root directory from where the file_list is generated.
:param file_list: a list of file in root.
:returns: a frozentset of ElfFile objects.
"""
elf_files = set() # type: Set[ElfFile]
for part_file in file_list:
# Filter out object (*.o) files-- we only care about binaries.
if part_file.endswith(".o"):
continue
# No need to crawl links-- the original should be here, too.
path = os.path.join(root, part_file) # type: str
if os.path.islink(path):
logger.debug("Skipped link {!r} while finding dependencies".format(path))
continue
# Ignore if file does not have ELF header.
if not ElfFile.is_elf(path):
continue
try:
elf_file = ElfFile(path=path)
except elftools.common.exceptions.ELFError:
# Ignore invalid ELF files.
continue
except errors.CorruptedElfFileError as exception:
# Log if the ELF file seems corrupted
logger.warning(exception.get_brief())
continue
# If ELF has dynamic symbols, add it.
if elf_file.needed:
elf_files.add(elf_file)
return frozenset(elf_files)
def _get_dynamic_linker(library_list: List[str]) -> str:
"""Return the dynamic linker from library_list."""
regex = re.compile(r"(?P<dynamic_linker>ld-[\d.]+.so)$")
for library in library_list:
m = regex.search(os.path.basename(library))
if m:
return library
raise RuntimeError(
"The format for the linker should be of the form "
"<root>/ld-<X>.<Y>.so. There are no matches for the "
"current libc6 package"
)
def find_linker(*, root_path: str, snap_base_path: str) -> str:
"""Find and return the dynamic linker that would be seen at runtime.
:param str root_path: the root path of a snap tree.
:param str snap_base_path: absolute path to the snap once installed to
setup proper rpaths.
:returns: the path to the dynamic linker to use
"""
# We assume the current system will satisfy the GLIBC requirement,
# get the current libc6 libraries (which includes the linker)
libc6_libraries_list = repo.Repo.get_package_libraries("libc6")
# For security reasons, we do not want to automatically pull in
# libraries but expect them to be consciously brought in by stage-packages
# instead.
libc6_libraries_paths = [
os.path.join(root_path, l[1:]) for l in libc6_libraries_list
]
dynamic_linker = _get_dynamic_linker(libc6_libraries_paths)
# Get the path to the "would be" dynamic linker when this snap is
# installed. Strip the root_path from the retrieved dynamic_linker
# variables + the leading `/` so that os.path.join can perform the
# proper join with snap_base_path.
dynamic_linker_path = os.path.join(
snap_base_path, dynamic_linker[len(root_path) + 1 :]
)
return dynamic_linker_path
|
ubuntu-core/snapcraft
|
snapcraft/internal/elf.py
|
Python
|
gpl-3.0
| 27,232
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VmFailedToRebootGuestEvent(vim, *args, **kwargs):
'''This event records a failure to reboot the guest on a virtual machine.'''
obj = vim.client.factory.create('ns0:VmFailedToRebootGuestEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 6:
raise IndexError('Expected at least 7 arguments got: %d' % len(args))
required = [ 'reason', 'template', 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
xuru/pyvisdk
|
pyvisdk/do/vm_failed_to_reboot_guest_event.py
|
Python
|
mit
| 1,202
|
"""Pylons environment configuration"""
import os
from mako.lookup import TemplateLookup
from pylons.configuration import PylonsConfig
from pylons.error import handle_mako_error
from sqlalchemy import engine_from_config
import tictactoe.lib.app_globals as app_globals
import tictactoe.lib.helpers
from tictactoe.config.routing import make_map
from tictactoe.model import init_model
def load_environment(global_conf, app_conf):
"""Configure the Pylons environment via the ``pylons.config``
object
"""
config = PylonsConfig()
# Pylons paths
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = dict(root=root,
controllers=os.path.join(root, 'controllers'),
static_files=os.path.join(root, 'public'),
templates=[os.path.join(root, 'templates')])
# Initialize config with the basic options
config.init_app(global_conf, app_conf, package='tictactoe', paths=paths)
config['routes.map'] = make_map(config)
config['pylons.app_globals'] = app_globals.Globals(config)
config['pylons.h'] = tictactoe.lib.helpers
# Setup cache object as early as possible
import pylons
pylons.cache._push_object(config['pylons.app_globals'].cache)
# Create the Mako TemplateLookup, with the default auto-escaping
config['pylons.app_globals'].mako_lookup = TemplateLookup(
directories=paths['templates'],
error_handler=handle_mako_error,
module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
input_encoding='utf-8', default_filters=['escape'],
imports=['from webhelpers.html import escape'])
# Setup the SQLAlchemy database engine
engine = engine_from_config(config, 'sqlalchemy.')
init_model(engine)
# CONFIGURATION OPTIONS HERE (note: all config options will override
# any Pylons config options)
return config
|
Pewpewarrows/reddit-tic-tac-toe
|
tictactoe/tictactoe/config/environment.py
|
Python
|
mit
| 1,928
|
'''
This little script can be used to generate an
SQLite3 database from Rachel's GF output.
The script will make a table out from each ascii
output file. The halo_id and gal_id columns of
each table are indexed for faster table joining.
Each index is names as table_id, albeit there
should be no need to know the name of the index.
output file, by default, is names as sams.db,
however, this is easy to change as the name
is stored to output variable.
@depends: sextutils
@author: Sami-Matias Niemi, niemi@stsci.edu
@version: 0.1
'''
import sqlite3
import glob as g
import sextutils as su
__author__ = 'Sami-Matias Niemi'
__version__ = 0.1
def parse_column_names(filename, column = 2):
'''
Reads lines from a file as long as they
start with a comment charachter (#).
Parses column names, by default it is
assumed that the name of the column is
the third in the split. For example, if
the parsed line is:
#0 z redshift
then the name of the column is "redshift".
'''
cols = []
next = True
fh = open(filename)
while next:
line = fh.next()
if not line.startswith('#'):
next = False
else:
tmp = line.split()
cols.append(tmp[column])
return cols
def make_sql_string(columns, format, start):
'''
This function helps to make an sql string.
'''
start += '('
for a, b in zip(columns, format):
start += '%s %s, ' % (a, b)
start = start[:-2] + ')'
return start
if __name__ == '__main__':
output = 'sams.db'
#find all files
files = g.glob('*.dat')
#create a Connection object that represents the database
#to a file
conn = sqlite3.connect(output)
#to memory
#conn = sqlite3.connect(':memory:')
for file in files:
print 'Processing file %s' % file
columns = parse_column_names(file)
formats = []
for col in columns:
if 'halo_id' in col:
formats.append('INTEGER')
elif 'gal_id' in col:
formats.append('INTEGER')
elif 'weight' in col or 'ngal' in col:
formats.append('INTEGER')
else:
formats.append('REAL')
if 'galprop.dat' in file:
start = 'create table galprop '
ins = 'insert into galprop values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
if 'galphot.dat' in file:
start = 'create table galphot '
ins = 'insert into galphot values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
if 'galphotdust.dat' in file:
start = 'create table galphotdust '
ins = 'insert into galphotdust values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
if 'halos.dat' in file:
start = 'create table halos '
ins = 'insert into halos values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
if 'totals.dat' in file:
start = 'create table totals '
ins = 'insert into totals values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
if 'FIR.dat' in file:
start = 'create table FIR '
ins = 'insert into FIR values ('
for x in range(len(formats)):
ins += '?,'
ins = ins[:-1] + ')'
sql_create_string = make_sql_string(columns, formats, start)
c = conn.cursor()
#Create table
c.execute(sql_create_string)
#read data using sextutils
data = su.se_catalog(file)
#insert data
#for t in data._colentries:
# c.execute(ins, t)
#this can be done faster
c.executemany(ins, data._colentries)
#create index to make searching faster
if 'halos.dat' in file:
c.execute('''CREATE UNIQUE INDEX %s_ids on %s (halo_id)''' % (file[:-4], file[:-4]))
else:
c.execute('''CREATE UNIQUE INDEX %s_ids on %s (halo_id, gal_id)''' % (file[:-4], file[:-4]))
# Save (commit) the changes
conn.commit()
# We can also close the cursor if we are done with it
c.close()
#to save memory
del data
print 'All done, DB file is %s' % output
|
sniemi/SamPy
|
sandbox/bolshoi/sqlite_sams.py
|
Python
|
bsd-2-clause
| 4,582
|
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# urllibcompat.py - adapters to ease using urllib2 on Py2 and urllib on Py3
#
# Copyright 2017 Google, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import sys
from . import pycompat
class _pycompatstub(object):
def __init__(self):
self._aliases = {}
def _registeraliases(self, origin, items):
"""Add items that will be populated at the first access"""
self._aliases.update(
(item.replace("_", "").lower(), (origin, item)) for item in items
)
def _registeralias(self, origin, attr, name):
"""Alias ``origin``.``attr`` as ``name``"""
self._aliases[name] = (origin, attr)
def __getattr__(self, name):
try:
origin, item = self._aliases[name]
except KeyError:
raise AttributeError(name)
self.__dict__[name] = obj = getattr(origin, item)
return obj
httpserver = _pycompatstub()
urlreq = _pycompatstub()
urlerr = _pycompatstub()
if sys.version_info[0] >= 3:
import urllib.parse
urlreq._registeraliases(
urllib.parse,
(
"splitattr",
"splitpasswd",
"splitport",
"splituser",
"urlparse",
"urlunparse",
),
)
urlreq._registeralias(urllib.parse, "unquote", "unquote")
import urllib.request
urlreq._registeraliases(
urllib.request,
(
"AbstractHTTPHandler",
"BaseHandler",
"build_opener",
"FileHandler",
"FTPHandler",
"ftpwrapper",
"HTTPHandler",
"HTTPSHandler",
"install_opener",
"pathname2url",
"HTTPBasicAuthHandler",
"HTTPDigestAuthHandler",
"HTTPPasswordMgrWithDefaultRealm",
"ProxyHandler",
"Request",
"url2pathname",
"urlopen",
),
)
import urllib.response
urlreq._registeraliases(urllib.response, ("addclosehook", "addinfourl"))
import urllib.error
urlerr._registeraliases(urllib.error, ("HTTPError", "URLError"))
import http.server
httpserver._registeraliases(
http.server,
(
"HTTPServer",
"BaseHTTPRequestHandler",
"SimpleHTTPRequestHandler",
"CGIHTTPRequestHandler",
),
)
# quote() and unquote() both operate on and return strings (not bytes)
quote = urllib.parse.quote
unquote = urllib.parse.unquote
# urllib.parse.urlencode() returns str. We use this function to make
# sure we return bytes.
def urlencode(query, doseq=False):
s = pycompat.encodeutf8(urllib.parse.urlencode(query, doseq=doseq))
return s
# pyre-fixme[16]: `_pycompatstub` has no attribute `quote`.
urlreq.quote = quote
# pyre-fixme[16]: `_pycompatstub` has no attribute `urlencode`.
urlreq.urlencode = urlencode
def getfullurl(req):
return req.full_url
def gethost(req):
return req.host
def getselector(req):
return req.selector
def getdata(req):
return req.data
def hasdata(req):
return req.data is not None
else:
import BaseHTTPServer
import CGIHTTPServer
import SimpleHTTPServer
import urllib
import urllib2
import urlparse
urlreq._registeraliases(
urllib,
(
"addclosehook",
"addinfourl",
"ftpwrapper",
"pathname2url",
"quote",
"splitattr",
"splitpasswd",
"splitport",
"splituser",
"unquote",
"url2pathname",
"urlencode",
),
)
urlreq._registeraliases(
urllib2,
(
"AbstractHTTPHandler",
"BaseHandler",
"build_opener",
"FileHandler",
"FTPHandler",
"HTTPBasicAuthHandler",
"HTTPDigestAuthHandler",
"HTTPHandler",
"HTTPPasswordMgrWithDefaultRealm",
"HTTPSHandler",
"install_opener",
"ProxyHandler",
"Request",
"urlopen",
),
)
urlreq._registeraliases(urlparse, ("urlparse", "urlunparse"))
urlerr._registeraliases(urllib2, ("HTTPError", "URLError"))
httpserver._registeraliases(
BaseHTTPServer, ("HTTPServer", "BaseHTTPRequestHandler")
)
httpserver._registeraliases(SimpleHTTPServer, ("SimpleHTTPRequestHandler",))
httpserver._registeraliases(CGIHTTPServer, ("CGIHTTPRequestHandler",))
def gethost(req):
return req.get_host()
def getselector(req):
return req.get_selector()
def getfullurl(req):
return req.get_full_url()
def getdata(req):
return req.get_data()
def hasdata(req):
return req.has_data()
quote = urllib.quote
unquote = urllib.unquote
|
facebookexperimental/eden
|
eden/scm/edenscm/mercurial/urllibcompat.py
|
Python
|
gpl-2.0
| 5,254
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'env',
'recipe_engine/path',
]
|
endlessm/chromium-browser
|
third_party/skia/infra/bots/recipe_modules/git/__init__.py
|
Python
|
bsd-3-clause
| 209
|
#!/usr/bin/env python3
import logging
import warnings
try:
import httplib
except ImportError:
import http.client
warnings.filterwarnings("ignore")
# Hijack the HTTP lib logger message and Log only once
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.CRITICAL)
requests_log.propagate = False
class disable_warning_urllib():
def do_nothing():
return
|
openbmc/openbmc-test-automation
|
lib/disable_warning_urllib.py
|
Python
|
apache-2.0
| 416
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-06 09:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("gallery", "0012_auto_20180619_1106")]
operations = [migrations.RemoveField(model_name="image", name="phash")]
|
manti-by/M2-Blog-Engine
|
manti_by/apps/gallery/migrations/0013_remove_image_phash.py
|
Python
|
bsd-3-clause
| 286
|
"""Create Celery app instances used for testing."""
from __future__ import absolute_import, unicode_literals
import weakref
from contextlib import contextmanager
from copy import deepcopy
from kombu.utils.imports import symbol_by_name
from celery import Celery, _state
#: Contains the default configuration values for the test app.
DEFAULT_TEST_CONFIG = {
'worker_hijack_root_logger': False,
'worker_log_color': False,
'accept_content': {'json'},
'enable_utc': True,
'timezone': 'UTC',
'broker_url': 'memory://',
'result_backend': 'cache+memory://',
'broker_heartbeat': 0,
}
class Trap(object):
"""Trap that pretends to be an app but raises an exception instead.
This to protect from code that does not properly pass app instances,
then falls back to the current_app.
"""
def __getattr__(self, name):
raise RuntimeError('Test depends on current_app')
class UnitLogging(symbol_by_name(Celery.log_cls)):
"""Sets up logging for the test application."""
def __init__(self, *args, **kwargs):
super(UnitLogging, self).__init__(*args, **kwargs)
self.already_setup = True
def TestApp(name=None, config=None, enable_logging=False, set_as_current=False,
log=UnitLogging, backend=None, broker=None, **kwargs):
"""App used for testing."""
from . import tasks # noqa
config = dict(deepcopy(DEFAULT_TEST_CONFIG), **config or {})
if broker is not None:
config.pop('broker_url', None)
if backend is not None:
config.pop('result_backend', None)
log = None if enable_logging else log
test_app = Celery(
name or 'celery.tests',
set_as_current=set_as_current,
log=log,
broker=broker,
backend=backend,
**kwargs)
test_app.add_defaults(config)
return test_app
@contextmanager
def set_trap(app):
"""Contextmanager that installs the trap app.
The trap means that anything trying to use the current or default app
will raise an exception.
"""
trap = Trap()
prev_tls = _state._tls
_state.set_default_app(trap)
class NonTLS(object):
current_app = trap
_state._tls = NonTLS()
yield
_state._tls = prev_tls
@contextmanager
def setup_default_app(app, use_trap=False):
"""Setup default app for testing.
Ensures state is clean after the test returns.
"""
prev_current_app = _state.get_current_app()
prev_default_app = _state.default_app
prev_finalizers = set(_state._on_app_finalizers)
prev_apps = weakref.WeakSet(_state._apps)
if use_trap:
with set_trap(app):
yield
else:
yield
_state.set_default_app(prev_default_app)
_state._tls.current_app = prev_current_app
if app is not prev_current_app:
app.close()
_state._on_app_finalizers = prev_finalizers
_state._apps = prev_apps
|
cloudera/hue
|
desktop/core/ext-py/celery-4.2.1/celery/contrib/testing/app.py
|
Python
|
apache-2.0
| 2,906
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The main test runner script.
Usage: ::
python run_tests.py
Skip slow tests: ::
python run_tests.py fast
'''
from __future__ import unicode_literals
import nose
import sys
from textblob_fr.compat import PY2, PY26
def main():
args = get_argv()
success = nose.run(argv=args)
sys.exit(0) if success else sys.exit(1)
def get_argv():
args = [sys.argv[0], ]
attr_conditions = [] # Use nose's attribselect plugin to filter tests
if "force-all" in sys.argv:
# Don't exclude any tests
return args
if PY26:
# Exclude tests that don't work on python2.6
attr_conditions.append("not py27_only")
if not PY2:
# Exclude tests that only work on python2
attr_conditions.append("not py2_only")
if "fast" in sys.argv:
attr_conditions.append("not slow")
attr_expression = " and ".join(attr_conditions)
if attr_expression:
args.extend(["-A", attr_expression])
return args
if __name__ == '__main__':
main()
|
sloria/textblob-fr
|
run_tests.py
|
Python
|
mit
| 1,067
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import addons
import base64
import ir
import locale
import logging
import netsvc
import os
import platform
import pooler
import release
import security
import sql_db
import sys
import threading
import time
import tools
from tools.translate import _
from cStringIO import StringIO
#.apidoc title: Exported Service methods
#.apidoc module-mods: member-order: bysource
""" This python module defines the RPC methods available to remote clients.
Each 'Export Service' is a group of 'methods', which in turn are RPC
procedures to be called. Each method has its own arguments footprint.
"""
logging.basicConfig()
class baseExportService(netsvc.ExportService):
""" base class for the objects that implement the standardized
xmlrpc2 dispatch
"""
_auth_commands = { 'pub': [] , 'root': [], 'db': [] }
def new_dispatch(self, method, auth, params, auth_domain=None):
# Double check, that we have the correct authentication:
if not auth:
domain='pub'
else:
domain=auth.provider.domain
if method not in self._auth_commands[domain]:
raise Exception("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
if domain == 'db':
u, p, db, uid = auth.auth_creds[auth.last_auth]
cr = pooler.get_db(db).cursor()
try:
res = fn(cr, uid, *params)
cr.commit()
return res
finally:
cr.close()
else:
return fn(*params)
class db(baseExportService):
_auth_commands = { 'root': [ 'create', 'get_progress', 'drop', 'dump',
'restore', 'rename',
'change_admin_password', 'migrate_databases' ],
'pub': [ 'db_exist', 'list', 'list_lang', 'server_version' ],
}
def __init__(self, name="db"):
netsvc.ExportService.__init__(self, name)
self.joinGroup("web-services")
self.actions = {}
self.id = 0
self.id_protect = threading.Semaphore()
self._pg_psw_env_var_is_set = False # on win32, pg_dump need the PGPASSWORD env var
def dispatch(self, method, auth, params):
if method in [ 'create', 'get_progress', 'drop', 'dump',
'restore', 'rename',
'change_admin_password', 'migrate_databases' ]:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
elif method in [ 'db_exist', 'list', 'list_lang', 'server_version' ]:
# params = params
# No security check for these methods
pass
else:
raise KeyError("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
return fn(*params)
def _create_empty_database(self, name):
db = sql_db.db_connect('template1')
cr = db.cursor()
try:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name)
finally:
cr.close()
def exp_create(self, db_name, demo, lang, user_password='admin'):
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self.actions[id] = {'clean': False}
self._create_empty_database(db_name)
class DBInitialize(object):
def __call__(self, serv, id, db_name, demo, lang, user_password='admin'):
cr = None
try:
serv.actions[id]['progress'] = 0
cr = sql_db.db_connect(db_name).cursor()
tools.init_db(cr)
cr.commit()
cr.close()
cr = None
_langs = []
if lang:
_langs.append(lang)
pool = pooler.restart_pool(db_name, demo, serv.actions[id],
update_module=True, languages=_langs)[1]
cr = sql_db.db_connect(db_name).cursor()
if lang:
modobj = pool.get('ir.module.module')
mids = modobj.search(cr, 1, [('state', '=', 'installed')])
modobj.update_translations(cr, 1, mids, lang)
cr.execute('UPDATE res_users SET password=%s, context_lang=%s, active=True WHERE login=%s', (
user_password, lang, 'admin'))
cr.execute('SELECT login, password, name ' \
' FROM res_users ' \
' ORDER BY login')
serv.actions[id]['users'] = cr.dictfetchall()
serv.actions[id]['clean'] = True
cr.commit()
cr.close()
except Exception, e:
serv.actions[id]['clean'] = False
serv.actions[id]['exception'] = e
import traceback
e_str = StringIO()
traceback.print_exc(file=e_str)
traceback_str = e_str.getvalue()
e_str.close()
logging.getLogger('web-services').error('CREATE DATABASE\n%s' % (traceback_str))
serv.actions[id]['traceback'] = traceback_str
if cr:
cr.close()
logger = logging.getLogger('web-services')
logger.info('CREATE DATABASE: %s' % (db_name.lower()))
dbi = DBInitialize()
create_thread = threading.Thread(target=dbi,
args=(self, id, db_name, demo, lang, user_password))
create_thread.start()
self.actions[id]['thread'] = create_thread
return id
def exp_get_progress(self, id):
if self.actions[id]['thread'].isAlive():
# return addons.init_progress[db_name]
return (min(self.actions[id].get('progress', 0),0.95), [])
else:
clean = self.actions[id]['clean']
if clean:
users = self.actions[id]['users']
self.actions.pop(id)
return (1.0, users)
else:
e = self.actions[id]['exception']
self.actions.pop(id)
raise Exception, e
def exp_drop(self, db_name):
sql_db.close_db(db_name)
logger = logging.getLogger()
db = sql_db.db_connect('template1')
cr = db.cursor()
cr.autocommit(True) # avoid transaction block
if tools.config.get_misc('debug', 'drop_guard', False):
raise Exception("Not dropping database %s because guard is set!" % db_name)
try:
cr.execute('DROP DATABASE "%s"' % db_name)
logger.info('DROP DB: %s' % (db_name))
except Exception, e:
logger.exception('DROP DB: %s failed:' % (db_name,))
raise Exception("Couldn't drop database %s: %s" % (db_name, e))
finally:
cr.close()
return True
def _set_pg_psw_env_var(self):
if os.name == 'nt' and not os.environ.get('PGPASSWORD', ''):
os.environ['PGPASSWORD'] = tools.config['db_password']
self._pg_psw_env_var_is_set = True
def _unset_pg_psw_env_var(self):
if os.name == 'nt' and self._pg_psw_env_var_is_set:
os.environ['PGPASSWORD'] = ''
def exp_dump(self, db_name):
logger = logging.getLogger('web-services')
if tools.config.get_misc('databases', 'dump_guard', False):
logger.error("Prevented dump of database %s, because guard is set!", db_name)
raise Exception("Not dropping database %s because guard is set!" % db_name)
allowed_res = tools.config.get_misc('databases', 'allowed')
if allowed_res:
dbs_allowed = [ x.strip() for x in allowed_res.split(' ')]
if not db_name in dbs_allowed:
logger.critical("Asked to dump illegal database: %s", db_name)
raise Exception("Database %s is not allowed to be dumped!" % db_name)
self._set_pg_psw_env_var()
cmd = ['pg_dump', '--format=c', '--no-owner' , '-w']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + str(tools.config['db_port']))
cmd.append(db_name)
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
stdin.close()
data = stdout.read()
res = stdout.close()
if res:
logger.error('DUMP DB: %s failed\n%s' % (db_name, data))
raise Exception("Couldn't dump database")
logger.info('DUMP DB: %s' % (db_name))
self._unset_pg_psw_env_var()
return base64.encodestring(data)
def exp_restore(self, db_name, data):
logger = logging.getLogger('web-services')
self._set_pg_psw_env_var()
if self.exp_db_exist(db_name):
logger.warning('RESTORE DB: %s already exists' % (db_name,))
raise Exception("Database already exists")
self._create_empty_database(db_name)
cmd = ['pg_restore', '--no-owner', '-w']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + str(tools.config['db_port']))
cmd.append('--dbname=' + db_name)
args2 = tuple(cmd)
buf=base64.decodestring(data)
if os.name == "nt":
tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
file(tmpfile, 'wb').write(buf)
args2=list(args2)
args2.append(' ' + tmpfile)
args2=tuple(args2)
stdin, stdout = tools.exec_pg_command_pipe(*args2)
if not os.name == "nt":
stdin.write(base64.decodestring(data))
stdin.close()
res = stdout.close()
if res:
raise Exception, "Couldn't restore database"
logger.info('RESTORE DB: %s' % (db_name))
self._unset_pg_psw_env_var()
return True
def exp_rename(self, old_name, new_name):
sql_db.close_db(old_name)
logger = logging.getLogger('web-services')
allowed_res = tools.config.get_misc('databases', 'allowed')
if allowed_res:
# When we have a restricted set of database names, renaming must
# be totally forbiden. That is, we both don't want some known db
# to be renamed into an arbitrary name, nor one arbitrary db to
# be renamed into a known name. The old/new names of the databases
# are neither expected to be present at the config file.
# So, just tell the admin that he has to temporarily change the
# conf file.
logger.error("Renaming databases is not allowed. "\
"Please turn off the databases.allowed setting at the conf file.")
raise Exception("Database renaming is forbiden because the names are restricted")
db = sql_db.db_connect('template1')
cr = db.cursor()
cr.autocommit(True) # avoid transaction block
try:
try:
cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name))
except Exception, e:
logger.error('RENAME DB: %s -> %s failed:\n%s' % (old_name, new_name, e))
raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e))
else:
fs = os.path.join(tools.config['root_path'], 'filestore')
if os.path.exists(os.path.join(fs, old_name)):
os.rename(os.path.join(fs, old_name), os.path.join(fs, new_name))
logger.info('RENAME DB: %s -> %s' % (old_name, new_name))
finally:
cr.close()
return True
def exp_db_exist(self, db_name):
## Not True: in fact, check if connection to database is possible. The database may exists
return bool(sql_db.db_connect(db_name))
def exp_list(self, document=False):
if not tools.config['list_db'] and not document:
raise Exception('AccessDenied')
db = sql_db.db_connect('template1')
cr = db.cursor()
try:
try:
db_user = tools.config["db_user"]
if not db_user and os.name == 'posix':
import pwd
db_user = pwd.getpwuid(os.getuid())[0]
if not db_user:
cr.execute("select decode(usename, 'escape') from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
res = cr.fetchone()
db_user = res and str(res[0])
if db_user:
cr.execute("select decode(datname, 'escape') from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres') order by datname", (db_user,))
else:
cr.execute("select decode(datname, 'escape') from pg_database where datname not in('template0', 'template1','postgres') order by datname")
res = [str(name) for (name,) in cr.fetchall()]
except Exception:
res = []
finally:
cr.close()
allowed_res = tools.config.get_misc('databases', 'allowed')
if allowed_res:
dbs_allowed = [ x.strip() for x in allowed_res.split(' ')]
res_o = res
res = []
for s in res_o:
if s in dbs_allowed:
res.append(s)
res.sort()
return res
def exp_change_admin_password(self, new_password):
tools.config['admin_passwd'] = new_password
tools.config.save()
return True
def exp_list_lang(self):
return tools.scan_languages()
def exp_server_version(self):
""" Return the version of the server
Used by the client to verify the compatibility with its own version
"""
return release.version
def exp_migrate_databases(self,databases):
from osv.orm import except_orm
from osv.osv import except_osv
l = logging.getLogger('migration')
for db in databases:
try:
l.info('migrate database %s' % (db,))
tools.config['update']['base'] = True
pooler.restart_pool(db, force_demo=False, update_module=True)
except except_orm, inst:
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
self.abortResponse(1, inst.name, inst.exc_type, inst.value)
except Exception:
l.exception("Migrate database %s failed" % db)
raise
return True
db()
class _ObjectService(baseExportService):
"A common base class for those who have fn(db, uid, password,...) "
def common_dispatch(self, method, auth, params):
(db, uid, passwd ) = params[0:3]
params = params[3:]
security.check(db,uid,passwd)
cr = pooler.get_db(db).cursor()
fn = getattr(self, 'exp_'+method)
res = fn(cr, uid, *params)
cr.commit()
cr.close()
return res
class common(_ObjectService):
_auth_commands = { 'db-broken': [ 'ir_set','ir_del', 'ir_get' ],
'pub': ['about', 'timezone_get', 'get_server_environment',
'login_message','get_stats', 'check_connectivity',
'list_http_services', 'get_options'],
'root': ['get_available_updates', 'get_migration_scripts',
'set_loglevel', 'set_obj_debug', 'set_pool_debug',
'set_logger_level', 'get_pgmode', 'set_pgmode',
'get_loglevel', 'get_sqlcount', 'get_sql_stats',
'reset_sql_stats',
'get_garbage_stats',
'get_os_time']
}
def __init__(self,name="common"):
_ObjectService.__init__(self,name)
self.joinGroup("web-services")
def dispatch(self, method, auth, params):
logger = logging.getLogger('web-services')
if method in [ 'ir_set','ir_del', 'ir_get' ]:
return self.common_dispatch(method,auth,params)
if method == 'login':
# At this old dispatcher, we do NOT update the auth proxy
res = security.login(params[0], params[1], params[2])
msg = res and 'successful login' or 'bad login or password'
# TODO log the client ip address..
logger.info("%s from '%s' using database '%s'" % (msg, params[1], params[0].lower()))
return res or False
elif method == 'logout':
if auth:
auth.logout(params[1])
logger.info('Logout %s from database %s'%(params[1],db))
return True
elif method in self._auth_commands['pub']:
pass
elif method in self._auth_commands['root']:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
else:
raise Exception("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
return fn(*params)
def new_dispatch(self, method, auth, params, auth_domain=None):
# Double check, that we have the correct authentication:
if method == 'login':
if not (auth and auth.provider.domain == 'db'):
raise Exception("Method not found: %s" % method)
# By this time, an authentication should already be done at the
# http level
if not auth.last_auth:
return False
acds = auth.auth_creds[auth.last_auth]
assert(acds[0] == params[1])
assert(acds[1] == params[2])
assert(acds[2] == params[0])
assert acds[3] != False and acds[3] != None
log = logging.getLogger('web-service')
log.info("login from '%s' using database '%s'" % (params[1], params[0].lower()))
return acds[3]
else:
return super(common, self).new_dispatch(method, auth, params, auth_domain)
def exp_ir_set(self, cr, uid, keys, args, name, value, replace=True, isobject=False):
res = ir.ir_set(cr,uid, keys, args, name, value, replace, isobject)
return res
def exp_ir_del(self, cr, uid, id):
res = ir.ir_del(cr,uid, id)
return res
def exp_ir_get(self, cr, uid, keys, args=None, meta=None, context=None):
if not args:
args=[]
if not context:
context={}
res = ir.ir_get(cr,uid, keys, args, meta, context)
return res
def exp_about(self, extended=False):
"""Return information about the OpenERP Server.
@param extended: if True then return version info
@return string if extended is False else tuple
"""
info = _('''
OpenERP is an ERP+CRM program for small and medium businesses.
The whole source code is distributed under the terms of the
GNU Public Licence.
(c) 2003-TODAY, Fabien Pinckaers - Tiny sprl''')
if extended:
return info, release.version
return info
def exp_timezone_get(self, *args):
return tools.misc.get_server_timezone()
def exp_get_available_updates(self, contract_id, contract_password):
import tools.maintenance as tm
try:
rc = tm.remote_contract(contract_id, contract_password)
if not rc.id:
raise tm.RemoteContractException('This contract does not exist or is not active')
return rc.get_available_updates(rc.id, addons.get_modules_with_version())
except tm.RemoteContractException, e:
self.abortResponse(1, 'Migration Error', 'warning', str(e))
def exp_get_migration_scripts(self, contract_id, contract_password):
l = logging.getLogger('migration')
import tools.maintenance as tm
try:
rc = tm.remote_contract(contract_id, contract_password)
if not rc.id:
raise tm.RemoteContractException('This contract does not exist or is not active')
if rc.status != 'full':
raise tm.RemoteContractException('Can not get updates for a partial contract')
l.info('starting migration with contract %s' % (rc.name,))
zips = rc.retrieve_updates(rc.id, addons.get_modules_with_version())
from shutil import rmtree, copytree, copy
backup_directory = os.path.join(tools.config['root_path'], 'backup', time.strftime('%Y-%m-%d-%H-%M'))
if zips and not os.path.isdir(backup_directory):
l.info('Create a new backup directory to \
store the old modules: %s' % (backup_directory,))
os.makedirs(backup_directory)
for module in zips:
l.info('upgrade module %s' % (module,))
mp = addons.get_module_path(module)
if mp:
if os.path.isdir(mp):
copytree(mp, os.path.join(backup_directory, module))
if os.path.islink(mp):
os.unlink(mp)
else:
rmtree(mp)
else:
copy(mp + 'zip', backup_directory)
os.unlink(mp + '.zip')
try:
try:
base64_decoded = base64.decodestring(zips[module])
except Exception:
l.exception('unable to read the module %s' % (module,))
raise
zip_contents = StringIO(base64_decoded)
zip_contents.seek(0)
try:
try:
tools.extract_zip_file(zip_contents, tools.config['addons_path'] )
except Exception:
l.exception('unable to extract the module %s' % (module, ))
rmtree(module)
raise
finally:
zip_contents.close()
except Exception:
l.exception('restore the previous version of the module %s' % (module, ))
nmp = os.path.join(backup_directory, module)
if os.path.isdir(nmp):
copytree(nmp, tools.config['addons_path'])
else:
copy(nmp+'.zip', tools.config['addons_path'])
raise
return True
except tm.RemoteContractException, e:
self.abortResponse(1, 'Migration Error', 'warning', str(e))
except Exception, e:
l.exception("%s" % e)
raise
def exp_get_server_environment(self):
os_lang = '.'.join( [x for x in locale.getdefaultlocale() if x] )
if not os_lang:
os_lang = 'NOT SET'
environment = '\nEnvironment Information : \n' \
'System : %s\n' \
'OS Name : %s\n' \
%(platform.platform(), platform.os.name)
if os.name == 'posix':
if platform.system() == 'Linux':
lsbinfo = os.popen('lsb_release -a').read()
environment += '%s'%(lsbinfo)
else:
environment += 'Your System is not lsb compliant\n'
environment += 'Operating System Release : %s\n' \
'Operating System Version : %s\n' \
'Operating System Architecture : %s\n' \
'Operating System Locale : %s\n'\
'Python Version : %s\n'\
'OpenERP-Server Version : %s'\
%(platform.release(), platform.version(), platform.architecture()[0],
os_lang, platform.python_version(),release.version)
return environment
def exp_login_message(self):
return tools.config.get('login_message', False)
def exp_set_loglevel(self, loglevel, logger=None):
l = netsvc.Logger()
l.set_loglevel(loglevel, logger)
return True
def exp_set_logger_level(self, logger, loglevel):
l = netsvc.Logger()
l.set_logger_level(logger, loglevel)
return True
def exp_get_loglevel(self, logger=None):
l = netsvc.Logger()
return l.get_loglevel(logger)
def exp_get_pgmode(self):
return sql_db.Cursor.get_pgmode()
def exp_set_pgmode(self, pgmode):
assert pgmode in ['old', 'sql', 'pgsql', 'pg84', 'pg90', 'pg91', 'pg92']
sql_db.Cursor.set_pgmode(pgmode)
return True
def exp_set_obj_debug(self,db, obj, do_debug):
log = logging.getLogger('web-services')
log.info("setting debug for %s@%s to %s" %(obj, db, do_debug))
ls = netsvc.LocalService('object_proxy')
res = ls.set_debug(db, obj, do_debug)
return res
def exp_set_pool_debug(self,db, do_debug):
sql_db._Pool.set_pool_debug(do_debug)
return None
def exp_get_stats(self):
import threading
res = "OpenERP server: %d threads\n" % threading.active_count()
res += netsvc.Server.allStats()
res += "\n"
res += netsvc.ExportService.allStats()
try:
import gc
if gc.isenabled():
res += "\nPython GC enabled: %d:%d:%d objs." % \
gc.get_count()
except ImportError: pass
try:
from tools import lru
res += "\nLRU counts: LRU: %d, nodes: %d" % \
(sys.getrefcount(lru.LRU), sys.getrefcount(lru.LRUNode))
except Exception: pass
return res
def exp_list_http_services(self, *args):
from service import http_server
return http_server.list_http_services(*args)
def exp_check_connectivity(self):
return bool(sql_db.db_connect('template1'))
def exp_get_os_time(self):
return os.times()
def exp_get_sqlcount(self):
logger = logging.getLogger('db.cursor')
if not logger.isEnabledFor(logging.DEBUG_SQL):
logger.warning("Counters of SQL will not be reliable unless DEBUG_SQL is set at the server's config.")
return sql_db.sql_counter
def exp_get_sql_stats(self):
"""Retrieve the sql statistics from the pool.
Unfortunately, XML-RPC won't allow tuple indexes, so we have to
rearrange the dict.
"""
ret = {}
for skey, val in sql_db._Pool.sql_stats.items():
sk0 = skey[0]
if not isinstance(skey[0], str):
sk0 = str(skey[0])
ret.setdefault(sk0,{})
ret[sk0][skey[1]] = val
return ret
def exp_reset_sql_stats(self):
sql_db._Pool.sql_stats = {}
return True
def exp_get_garbage_stats(self):
import gc
garbage_count = {}
for garb in gc.garbage:
try:
name = '%s.%s' % (garb.__class__.__module__, garb.__class__.__name__)
garbage_count.setdefault(name, 0)
garbage_count[name] += 1
except Exception, e:
print "Exception:", e
continue
# Perhaps list the attributes of garb that are instances of object
return garbage_count
def exp_get_options(self, module=None):
"""Return a list of options, keywords, that the server supports.
Apart from the server version, which should be a linear number,
some server branches may support extra API functionality. By this
call, the server can advertise these extensions to compatible
clients.
"""
if module:
raise NotImplementedError('No module-specific options yet')
return release.server_options
common()
class objects_proxy(baseExportService):
_auth_commands = { 'db': ['execute','exec_workflow', 'exec_dict'], 'root': ['obj_list',] }
def __init__(self, name="object"):
netsvc.ExportService.__init__(self,name)
self.joinGroup('web-services')
self._ls = netsvc.LocalService('object_proxy')
def dispatch(self, method, auth, params):
if method in self._auth_commands['root']:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
fn = getattr(self._ls, method)
res = fn(*params, auth_proxy=auth)
return res
(db, uid, passwd ) = params[0:3]
params = params[3:]
if method not in ['execute','exec_workflow', 'exec_dict', 'obj_list']:
raise KeyError("Method not supported %s" % method)
security.check(db,uid,passwd)
fn = getattr(self._ls, method)
res = fn(db, uid, *params, auth_proxy=auth)
return res
def new_dispatch(self, method, auth, params, auth_domain=None):
# Double check, that we have the correct authentication:
if not auth:
raise Exception("Not auth domain for object service")
if auth.provider.domain not in self._auth_commands:
raise Exception("Invalid domain for object service")
if method not in self._auth_commands[auth.provider.domain]:
raise Exception("Method not found: %s" % method)
fn = getattr(self._ls, method)
if auth.provider.domain == 'root':
res = fn(*params, auth_proxy=auth)
return res
acds = auth.auth_creds[auth.last_auth]
db, uid = (acds[2], acds[3])
res = fn(db, uid, *params, auth_proxy=auth)
return res
def stats(self, _pre_msg='No statistics'):
try:
from osv import orm
msg = ''
for klass in ('browse_record', 'browse_record_list', 'browse_null',
'orm_memory', 'orm'):
msg += "%s[%d] " % (klass, sys.getrefcount(getattr(orm,klass)))
except Exception, e:
msg = str(e)
return "%s (%s.%s): %s" % ('object',
self.__class__.__module__, self.__class__.__name__,
msg)
objects_proxy()
class dbExportDispatch:
""" Intermediate class for those ExportServices that call fn(db, uid, ...)
These classes don't need the cursor, but just the name of the db
"""
def new_dispatch(self, method, auth, params, auth_domain=None):
# Double check, that we have the correct authentication:
if not auth:
domain='pub'
else:
domain=auth.provider.domain
if method not in self._auth_commands[domain]:
raise Exception("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
if domain == 'db':
u, p, db, uid = auth.auth_creds[auth.last_auth]
res = fn(db, uid, *params)
return res
else:
return fn(*params)
#
# Wizard ID: 1
# - None = end of wizard
#
# Wizard Type: 'form'
# - form
# - print
#
# Wizard datas: {}
# TODO: change local request to OSE request/reply pattern
#
class wizard(dbExportDispatch,baseExportService):
_auth_commands = { 'db': ['execute','create'] }
def __init__(self, name='wizard'):
netsvc.ExportService.__init__(self,name)
self.joinGroup('web-services')
self.id = 0
self.wiz_datas = {}
self.wiz_name = {}
self.wiz_uid = {}
def dispatch(self, method, auth, params):
(db, uid, passwd ) = params[0:3]
params = params[3:]
if method not in ['execute','create']:
raise KeyError("Method not supported %s" % method)
security.check(db,uid,passwd)
fn = getattr(self, 'exp_'+method)
res = fn(db, uid, *params)
return res
def _execute(self, db, uid, wiz_id, datas, action, context):
self.wiz_datas[wiz_id].update(datas)
wiz = netsvc.LocalService('wizard.'+self.wiz_name[wiz_id])
return wiz.execute(db, uid, self.wiz_datas[wiz_id], action, context)
def exp_create(self, db, uid, wiz_name, datas=None):
if not datas:
datas={}
#FIXME: this is not thread-safe
self.id += 1
self.wiz_datas[self.id] = {}
self.wiz_name[self.id] = wiz_name
self.wiz_uid[self.id] = uid
return self.id
def exp_execute(self, db, uid, wiz_id, datas, action='init', context=None):
if not context:
context={}
if wiz_id in self.wiz_uid:
if self.wiz_uid[wiz_id] == uid:
return self._execute(db, uid, wiz_id, datas, action, context)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'WizardNotFound'
wizard()
#
# TODO: set a maximum report number per user to avoid DOS attacks
#
# Report state:
# False -> True
#
class ExceptionWithTraceback(Exception):
def __init__(self, msg, tb):
self.message = msg
self.traceback = tb
self.args = (msg, tb)
class _report_spool_job(threading.Thread):
def __init__(self, id, db, uid, obj, ids, datas=None, context=None):
"""A report job, that should be spooled in the background
@param id the index at the parent spool list, shall not be trusted,
only useful for the repr()
@param db the database name
@param uid the calling user
@param obj the report orm object (string w/o the 'report.' prefix)
@param ids of the obj model
@param datas dictionary of input to report
"""
threading.Thread.__init__(self)
self.id = id
self.uid = uid
self.db = db
self.report_obj = obj
self.ids = ids
self.datas = datas
self.context = context
if self.context is None:
self.context = {}
self.result = False
self.format = None
self.state = False
self.exception = None
self.name = "report-%s-%s" % (self.report_obj, self.id)
def run(self):
try:
self.cr = pooler.get_db(self.db).cursor()
self.go()
self.cr.commit()
except Exception, e:
logger = logging.getLogger('web-services')
logger.exception('Exception: %s' % (e))
if hasattr(e, 'name') and hasattr(e, 'value'):
self.exception = ExceptionWithTraceback(tools.ustr(e.name), tools.ustr(e.value))
else:
self.exception = e
self.state = True
return
except KeyboardInterrupt, e:
tb = sys.exc_info()
logger = logging.getLogger('web-services')
logger.exception('Interrupt of report: %r' % self)
self.exception = ExceptionWithTraceback('KeyboardInterrupt of report: %r' % self, tb)
self.state = True
# we don't need to raise higher, because we already printed the tb
# and are exiting the thread loop.
return
finally:
if self.cr:
self.cr.close()
self.cr = None
return True
def stop(self):
"""Try to kill the job.
So far there is no genuinely good way to stop the thread (is there?),
so we can at least kill the cursor, so that the rest of the job borks.
"""
self.must_stop = True
if self.cr:
self.cr.rollback()
self.cr.close()
self.cr = None
def __repr__(self):
"""Readable name of report job
"""
return "<Report job #%s: %s.%s>" % (self.id, self.db, self.report_obj)
def go(self,):
cr = self.cr
obj = netsvc.LocalService('report.' + self.report_obj)
(result, format) = obj.create(cr, self.uid, self.ids, self.datas, self.context)
if not result:
tb = sys.exc_info()
self.exception = ExceptionWithTraceback('RML is not available at specified location or not enough data to print!', tb)
self.result = result
self.format = format
self.state = True
return True
class report_spool(dbExportDispatch, baseExportService):
_auth_commands = { 'db': ['report','report_get', 'report_stop'] }
def __init__(self, name='report'):
netsvc.ExportService.__init__(self, name)
self.joinGroup('web-services')
self._reports = {}
self.id = 0
self.id_protect = threading.Semaphore()
def dispatch(self, method, auth, params):
(db, uid, passwd ) = params[0:3]
params = params[3:]
if method not in ['report','report_get', 'report_stop']:
raise KeyError("Method not supported %s" % method)
security.check(db,uid,passwd)
fn = getattr(self, 'exp_' + method)
res = fn(db, uid, *params)
return res
def stats(self, _pre_msg=None):
ret = baseExportService.stats(self, _pre_msg='%d reports' % len(self._reports))
for id, r in self._reports.items():
if not r:
continue
ret += '\n [%d] ' % id
if r.is_alive() or not r.state:
ret += 'running '
else:
ret += 'finished '
ret += repr(r)
return ret
def exp_report(self, db, uid, object, ids, datas=None, context=None):
if not datas:
datas={}
if not context:
context={}
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self._reports[id] = _report_spool_job(id, db, uid, object, ids, datas=datas, context=context)
self._reports[id].start()
return id
def _check_report(self, report_id):
report = self._reports[report_id]
exc = report.exception
if exc:
self.abortResponse(1, exc.__class__.__name__, 'warning', exc.message)
res = {'state': report.state }
if res['state']:
if tools.config['reportgz']:
import zlib
res2 = zlib.compress(report.result)
res['code'] = 'zlib'
else:
#CHECKME: why is this needed???
if isinstance(report.result, unicode):
res2 = report.result.encode('latin1', 'replace')
else:
res2 = report.result
if res2:
res['result'] = base64.encodestring(res2)
res['format'] = report.format
self.id_protect.acquire()
del self._reports[report_id]
self.id_protect.release()
return res
def exp_report_get(self, db, uid, report_id):
if report_id in self._reports:
if self._reports[report_id].uid == uid:
return self._check_report(report_id)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'ReportNotFound'
def exp_report_stop(self, db, uid, report_id, timeout=5.0):
""" Stop a running report, wait for it to finish
@return True if stopped, False if alredy finished,
Exception('Timeout') if cannot stop
Note that after a "report_stop" request, the caller shall
do one more "report_get" to fetch the exception and free
the job object.
"""
if report_id in self._reports:
report = self._reports[report_id]
if report.uid == uid or uid == 1:
if report.is_alive() and not report.state:
report.stop()
report.join(timeout=timeout)
if report.is_alive():
raise Exception('Timeout')
return True
else:
return False
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'ReportNotFound'
report_spool()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
xrg/openerp-server
|
bin/service/web_services.py
|
Python
|
agpl-3.0
| 41,970
|
"""
timedflock
==========
`timedflock` module provides a file lock class `TimedFileLock` on Unix-like
platforms which uses `fcntl.flock` at its core and supports timeout.
`TimedFileLock` does not poll the file lock to support timeout. Instead, it
spawns a child process to do `fcntl.flock`. Because of this, the main process
does not actually hold the file lock. This means that `TimedFileLock` is not
re-entrant and behaves more like `threading.Lock`.
`TimedFileLock` supports both shared lock and exclusive lock. It can be used
as reader-writer lock.
Example
-------
```python
from timedflock import TimedFileLock
with TimedFileLock(lockfile, shared=False, timeout=5.5) as _lck:
if _lck.locked():
... # locked code here
else:
... # not locked
```
"""
from __future__ import print_function
import sys
import os
import fcntl
import signal
import json
import threading
import traceback
from subprocess import Popen, PIPE
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
__all__ = ['TimedFileLock']
_PY_EXEC = sys.executable
_PY_FILE = os.path.realpath(__file__)
class TimedFileLock:
"""
The file lock wrapper class. Use with Python's context manager.
"""
def __init__(self, lockfile, shared=False, timeout=0, tag=None):
"""
Arguments:
`lockfile`
The path to the lock file. If the lock file does not exist, it
will be created automatically.
`shared`
Acquire shared lock if True. Otherwise acquire exclusive lock.
Exclusive lock is acquired by default.
`timeout`
Timeout value in fractional seconds. Set timeout=0 or omit the
argument to set the operation non-blocking. Set timeout=None to
set infinite timeout (with caution).
`tag`
A string to identify the lock. If tag is not set, the default tag
is "<function>@<filename>:<line>".
"""
if timeout is not None:
timeout = float(timeout)
if timeout < 0:
raise ValueError("Invalid timeout")
self._config = {
'lockfile': os.path.abspath(lockfile),
'shared': bool(shared),
'timeout': timeout,
}
if tag is not None:
self.tag = str(tag)
else:
_file, _line, _func, _text = traceback.extract_stack(limit=2)[0]
self.tag = '{}@{}:{}'.format(_func, os.path.basename(_file), _line)
self._subproc = None
def __enter__(self):
self._try_lock()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._unlock()
return None
def _try_lock(self):
parent = 'ppid:{},tid:{}'.format(os.getpid(), get_ident())
config = json.dumps(self._config)
proc = None
try:
proc = Popen([_PY_EXEC, '-u', _PY_FILE, self.tag, parent, config],
stdin=PIPE, stdout=PIPE)
outline = proc.stdout.readline()
if outline != b'locked\n':
# not locked
proc.wait()
proc = None
except:
if proc is not None and proc.poll() is None:
proc.kill()
proc = None
self._subproc = proc
return None
def _unlock(self):
if self._subproc is not None and self._subproc.poll() is None:
self._subproc.communicate(b'quit')
self._subproc = None
return None
def locked(self):
"""Returns True if the file is locked."""
return self._subproc is not None
def _watcher():
_data = sys.stdin.read()
if _data == 'quit':
print('received quit command', file=sys.stderr)
exit_event.set()
else:
print('parent process has quit', file=sys.stderr)
os._exit(1)
def _handler(signum, frame):
# signal handler
print('received signal:', signum, file=sys.stderr)
if __name__ == '__main__':
# create global exit event
exit_event = threading.Event()
# set signal handler
signal.signal(signal.SIGALRM, _handler)
# debug: print tag
tag = sys.argv[1]
parent = sys.argv[2]
print('Created subprocess for lock', tag, 'by', parent, file=sys.stderr)
# load config
config = json.loads(sys.argv[3])
watcher = threading.Thread(target=_watcher)
watcher.daemon = True
watcher.start()
with open(config['lockfile'], 'ab') as _file:
lock_fd = _file.fileno()
lock_op = fcntl.LOCK_SH if config['shared'] else fcntl.LOCK_EX
timeout = config['timeout']
if timeout is not None:
if timeout > 0:
signal.setitimer(signal.ITIMER_REAL, timeout, 1)
else:
lock_op |= fcntl.LOCK_NB # non-blocking
locked = True
try:
fcntl.flock(lock_fd, lock_op)
except:
locked = False
# reset timer
signal.setitimer(signal.ITIMER_REAL, 0)
if locked:
sys.stdout.write('locked\n')
sys.stdout.flush()
while not exit_event.wait(5):
pass # just pause the process infinitely
|
rkyoto/timedflock
|
timedflock2.py
|
Python
|
apache-2.0
| 5,273
|
'''
Image
=====
The :class:`Image` widget is used to display an image::
wimg = Image(source='mylogo.png')
Asynchronous Loading
--------------------
To load an image asynchronously (for example from an external webserver), use
the :class:`AsyncImage` subclass::
aimg = AsyncImage(source='http://mywebsite.com/logo.png')
Alignment
---------
By default, the image is centered and fit inside the widget bounding box.
If you don't want that, you can inherit from Image and create your own style.
For example, if you want your image to take the same size of your widget, you
can do::
class FullImage(Image):
pass
And in your kivy language file, you can do::
<FullImage>:
canvas:
Color:
rgb: (1, 1, 1)
Rectangle:
texture: self.texture
size: self.size
pos: self.pos
'''
__all__ = ('Image', 'AsyncImage')
from kivy.uix.widget import Widget
from kivy.core.image import Image as CoreImage
from kivy.resources import resource_find
from kivy.properties import StringProperty, ObjectProperty, ListProperty, \
AliasProperty, BooleanProperty, NumericProperty
from kivy.loader import Loader
from kivy.logger import Logger
class Image(Widget):
'''Image class, see module documentation for more information.
'''
source = StringProperty(None)
'''Filename / source of your image.
:data:`source` is a :class:`~kivy.properties.StringProperty`, default to
None.
'''
texture = ObjectProperty(None, allownone=True)
'''Texture object of the image.
Depending of the texture creation, the value will be a
:class:`~kivy.graphics.texture.Texture` or
:class:`~kivy.graphics.texture.TextureRegion` object.
:data:`texture` is a :class:`~kivy.properties.ObjectProperty`, default to
None.
'''
texture_size = ListProperty([0, 0])
'''Texture size of the image.
.. warning::
The texture size is set after the texture property. So if you listen on
the change to :data:`texture`, the property texture_size will not be
up-to-date. Use self.texture.size instead.
'''
def get_image_ratio(self):
if self.texture:
return self.texture.width / float(self.texture.height)
return 1.
mipmap = BooleanProperty(False)
'''Indicate if you want OpenGL mipmapping to be applied on the texture.
Read :ref:`mipmap` for more information.
.. versionadded:: 1.0.7
:data:`mipmap` is a :class:`~kivy.properties.BooleanProperty`, default to
False.
'''
image_ratio = AliasProperty(get_image_ratio, None, bind=('texture', ))
'''Ratio of the image (width / float(height).
:data:`image_ratio` is a :class:`~kivy.properties.AliasProperty`, and is
read-only.
'''
color = ListProperty([1, 1, 1, 1])
'''Image color, in the format (r, g, b, a). This attribute can be used to
'tint' an image. Be careful, if the source image is not gray/white, the
color will not really work as expected.
.. versionadded:: 1.0.6
:data:`color` is a :class:`~kivy.properties.ListProperty`, default to [1, 1,
1, 1].
'''
allow_stretch = BooleanProperty(False)
'''If True, the normalized image size will be maximized to fit in the image
box. Otherwise, if the box is too tall, the image will not be stretched more
than 1:1 pixels.
.. versionadded:: 1.0.7
:data:`allow_stretch` is a :class:`~kivy.properties.BooleanProperty`,
default to False
'''
keep_ratio = BooleanProperty(True)
'''If False along with allow_stretch being True, the normalized image
size will be maximized to fit in the image box, disregarding the aspect
ratio of the image.
Otherwise, if the box is too tall, the image will not be stretched more
than 1:1 pixels.
.. versionadded:: 1.0.8
:data:`keep_ratio` is a :class:`~kivy.properties.BooleanProperty`,
default to True
'''
keep_data = BooleanProperty(False)
'''If true the underlaying _coreimage have to keep the raw image data.
Useful to perform pixel based collision detection
.. versionadded:: 1.3.0
:data:`keep_data` is a :class:`~kivy.properties.BooleanProperty`, default
to False
'''
anim_delay = NumericProperty(.25)
'''Delay of animation if the image is sequenced (like an animated gif).
If the anim_delay is set to -1, the animation will be stopped.
.. versionadded:: 1.0.8
:data:`anim_delay` is a :class:`~kivy.properties.NumericProperty`, default
to .25 (4 FPS)
'''
nocache = BooleanProperty(False)
'''If this property is set True, the image will not be added to the
internal cache anymore. (the cache will simply ignore any calls trying to
append the core image)
.. versionadded:: 1.6.0
:data:`nocache` is a :class:`~kivy.properties.BooleanProperty`, default
to False
'''
def get_norm_image_size(self):
if not self.texture:
return self.size
ratio = self.image_ratio
w, h = self.size
tw, th = self.texture.size
# ensure that the width is always maximized to the containter width
if self.allow_stretch:
if not self.keep_ratio:
return w, h
iw = w
else:
iw = min(w, tw)
# calculate the appropriate height
ih = iw / ratio
# if the height is too higher, take the height of the container
# and calculate appropriate width. no need to test further. :)
if ih > h:
if self.allow_stretch:
ih = h
else:
ih = min(h, th)
iw = ih * ratio
return iw, ih
norm_image_size = AliasProperty(get_norm_image_size, None, bind=(
'texture', 'size', 'image_ratio', 'allow_stretch'))
'''Normalized image size within the widget box.
This size will always be fit to the widget size, and will preserve the image
ratio.
:data:`norm_image_size` is a :class:`~kivy.properties.AliasProperty`, and is
read-only.
'''
def __init__(self, **kwargs):
self._coreimage = None
super(Image, self).__init__(**kwargs)
self.bind(source=self.texture_update,
mipmap=self.texture_update)
if self.source:
self.texture_update()
def texture_update(self, *largs):
if not self.source:
self.texture = None
else:
filename = resource_find(self.source)
if filename is None:
return Logger.error('Image: Error reading file {filename}'.
format(filename=self.source))
mipmap = self.mipmap
if self._coreimage is not None:
self._coreimage.unbind(on_texture=self._on_tex_change)
self._coreimage = ci = CoreImage(filename, mipmap=mipmap,
anim_delay=self.anim_delay, keep_data=self.keep_data,
nocache=self.nocache)
ci.bind(on_texture=self._on_tex_change)
self.texture = ci.texture
def on_anim_delay(self, instance, value):
if self._coreimage is None:
return
self._coreimage.anim_delay = value
if value < 0:
self._coreimage.anim_reset(False)
def on_texture(self, instance, value):
if value is not None:
self.texture_size = list(value.size)
def _on_tex_change(self, *largs):
# update texture from core image
self.texture = self._coreimage.texture
def reload(self):
'''Reload image from disk. This facilitates re-loading of
image from disk in case contents change.
.. versionadded:: 1.3.0
Usage::
im = Image(source = '1.jpg')
# -- do something --
im.reload()
# image will be re-loaded from disk
'''
self._coreimage.remove_from_cache()
olsource = self.source
self.source = ''
self.source = olsource
def on_nocache(self, *args):
if self.nocache and self._coreimage:
self._coreimage.remove_from_cache()
self._coreimage._nocache = True
class AsyncImage(Image):
'''Asynchronous Image class. See module documentation for more information.
'''
def __init__(self, **kwargs):
self._coreimage = None
super(AsyncImage, self).__init__(**kwargs)
self.bind(source=self._load_source)
if self.source:
self._load_source()
def _load_source(self, *args):
source = self.source
if not source:
if self._coreimage is not None:
self._coreimage.unbind(on_texture=self._on_tex_change)
self.texture = None
self._coreimage = None
else:
if not self.is_uri(source):
source = resource_find(source)
self._coreimage = image = Loader.image(source,
nocache=self.nocache, mipmap=self.mipmap)
image.bind(on_load=self._on_source_load)
image.bind(on_texture=self._on_tex_change)
self.texture = image.texture
def _on_source_load(self, value):
image = self._coreimage.image
if not image:
return
self.texture = image.texture
def is_uri(self, filename):
proto = filename.split('://', 1)[0]
return proto in ('http', 'https', 'ftp', 'smb')
def _on_tex_change(self, *largs):
if self._coreimage:
self.texture = self._coreimage.texture
def texture_update(self, *largs):
pass
|
happy56/kivy
|
kivy/uix/image.py
|
Python
|
lgpl-3.0
| 9,715
|
#!/usr/bin/env python
from geophys2netcdf._geophys2netcdf import Geophys2NetCDF
#=========================================================================
# Copyright (c) 2014 Geoscience Australia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither Geoscience Australia nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#=========================================================================
'''
Geophys2NetCDF Class
Created on 08/03/2016
@author: Alex Ip
'''
import sys
import os
from geophys2netcdf import ERS2NetCDF, Zip2NetCDF
def main():
assert len(
sys.argv) >= 2, 'Must provide input file path and optional output file path'
input_path = os.path.abspath(sys.argv[1])
# If only NetCDF path given, then do update_nc_metadata
if len(sys.argv) == 2 and os.path.splitext(input_path)[1] == '.nc':
g2n_object = ERS2NetCDF()
g2n_object.update_nc_metadata(input_path)
# Kind of redundant, but possibly useful for debugging
g2n_object.check_json_metadata()
return
if len(sys.argv) == 3: # output_path specified
output_path = os.path.abspath(sys.argv[2])
else:
# Default output path is next to input path
output_path = os.path.abspath(os.path.splitext(input_path)[0] + '.nc')
g2n_object = None
for subclass in [ERS2NetCDF, Zip2NetCDF]:
if os.path.splitext(input_path)[1] == '.' + subclass.FILE_EXTENSION:
print 'Input file is of type %s' % subclass.FILE_EXTENSION
# Perform translation
g2n_object = subclass(input_path, output_path)
break
assert g2n_object, 'Unrecognised input file extension'
main()
|
alex-ip/geophys2netcdf
|
geophys2netcdf/__main__.py
|
Python
|
apache-2.0
| 3,071
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
try:
import ctypes
except MemoryError:
# selinux execmem denial
# https://bugzilla.redhat.com/show_bug.cgi?id=488396
ctypes = None
except ImportError:
# Python on Solaris compiled with Sun Studio doesn't have ctypes
ctypes = None
import fcntl
import os
import pkg_resources
import random
import resource
import socket
import sys
import textwrap
import time
import traceback
import inspect
import errno
import warnings
from gunicorn.six import text_type, string_types
MAXFD = 1024
if (hasattr(os, "devnull")):
REDIRECT_TO = os.devnull
else:
REDIRECT_TO = "/dev/null"
timeout_default = object()
CHUNK_SIZE = (16 * 1024)
MAX_BODY = 1024 * 132
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
# Server and Date aren't technically hop-by-hop
# headers, but they are in the purview of the
# origin server which the WSGI spec says we should
# act like. So we drop them and add our own.
#
# In the future, concatenation server header values
# might be better, but nothing else does it and
# dropping them is easier.
hop_headers = set("""
connection keep-alive proxy-authenticate proxy-authorization
te trailers transfer-encoding upgrade
server date
""".split())
try:
from setproctitle import setproctitle
def _setproctitle(title):
setproctitle("gunicorn: %s" % title)
except ImportError:
def _setproctitle(title):
return
try:
from importlib import import_module
except ImportError:
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
def load_class(uri, default="sync", section="gunicorn.workers"):
if inspect.isclass(uri):
return uri
if uri.startswith("egg:"):
# uses entry points
entry_str = uri.split("egg:")[1]
try:
dist, name = entry_str.rsplit("#", 1)
except ValueError:
dist = entry_str
name = default
try:
return pkg_resources.load_entry_point(dist, section, name)
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
else:
components = uri.split('.')
if len(components) == 1:
try:
if uri.startswith("#"):
uri = uri[1:]
return pkg_resources.load_entry_point("gunicorn",
section, uri)
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
klass = components.pop(-1)
try:
mod = __import__('.'.join(components))
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
for comp in components[1:]:
mod = getattr(mod, comp)
return getattr(mod, klass)
def set_owner_process(uid, gid):
""" set user and group of workers processes """
if gid:
try:
os.setgid(gid)
except OverflowError:
if not ctypes:
raise
# versions of python < 2.6.2 don't manage unsigned int for
# groups like on osx or fedora
os.setgid(-ctypes.c_int(-gid).value)
if uid:
os.setuid(uid)
def chown(path, uid, gid):
try:
os.chown(path, uid, gid)
except OverflowError:
if not ctypes:
raise
os.chown(path, uid, -ctypes.c_int(-gid).value)
if sys.platform.startswith("win"):
def _waitfor(func, pathname, waitall=False):
# Peform the operation
func(pathname)
# Now setup the wait loop
if waitall:
dirname = pathname
else:
dirname, name = os.path.split(pathname)
dirname = dirname or '.'
# Check for `pathname` to be removed from the filesystem.
# The exponential backoff of the timeout amounts to a total
# of ~1 second after which the deletion is probably an error
# anyway.
# Testing on a i7@4.3GHz shows that usually only 1 iteration is
# required when contention occurs.
timeout = 0.001
while timeout < 1.0:
# Note we are only testing for the existance of the file(s) in
# the contents of the directory regardless of any security or
# access rights. If we have made it this far, we have sufficient
# permissions to do that much using Python's equivalent of the
# Windows API FindFirstFile.
# Other Windows APIs can fail or give incorrect results when
# dealing with files that are pending deletion.
L = os.listdir(dirname)
if not (L if waitall else name in L):
return
# Increase the timeout and try again
time.sleep(timeout)
timeout *= 2
warnings.warn('tests may fail, delete still pending for ' + pathname,
RuntimeWarning, stacklevel=4)
def _unlink(filename):
_waitfor(os.unlink, filename)
else:
_unlink = os.unlink
def unlink(filename):
try:
_unlink(filename)
except OSError as error:
# The filename need not exist.
if error.errno not in (errno.ENOENT, errno.ENOTDIR):
raise
def is_ipv6(addr):
try:
socket.inet_pton(socket.AF_INET6, addr)
except socket.error: # not a valid address
return False
return True
def parse_address(netloc, default_port=8000):
if netloc.startswith("unix:"):
return netloc.split("unix:")[1]
if netloc.startswith("unix://"):
return netloc.split("unix://")[1]
if netloc.startswith("tcp://"):
netloc = netloc.split("tcp://")[1]
# get host
if '[' in netloc and ']' in netloc:
host = netloc.split(']')[0][1:].lower()
elif ':' in netloc:
host = netloc.split(':')[0].lower()
elif netloc == "":
host = "0.0.0.0"
else:
host = netloc.lower()
#get port
netloc = netloc.split(']')[-1]
if ":" in netloc:
port = netloc.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = default_port
return (host, port)
def get_maxfd():
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if (maxfd == resource.RLIM_INFINITY):
maxfd = MAXFD
return maxfd
def close_on_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def set_non_blocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def close(sock):
try:
sock.close()
except socket.error:
pass
try:
from os import closerange
except ImportError:
def closerange(fd_low, fd_high):
# Iterate through and close all file descriptors.
for fd in range(fd_low, fd_high):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass
def write_chunk(sock, data):
if isinstance(data, text_type):
data = data.encode('utf-8')
chunk_size = "%X\r\n" % len(data)
chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
sock.sendall(chunk)
def write(sock, data, chunked=False):
if chunked:
return write_chunk(sock, data)
sock.sendall(data)
def write_nonblock(sock, data, chunked=False):
timeout = sock.gettimeout()
if timeout != 0.0:
try:
sock.setblocking(0)
return write(sock, data, chunked)
finally:
sock.setblocking(1)
else:
return write(sock, data, chunked)
def writelines(sock, lines, chunked=False):
for line in list(lines):
write(sock, line, chunked)
def write_error(sock, status_int, reason, mesg):
html = textwrap.dedent("""\
<html>
<head>
<title>%(reason)s</title>
</head>
<body>
<h1>%(reason)s</h1>
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": mesg}
http = textwrap.dedent("""\
HTTP/1.1 %s %s\r
Connection: close\r
Content-Type: text/html\r
Content-Length: %d\r
\r
%s
""") % (str(status_int), reason, len(html), html)
write_nonblock(sock, http.encode('latin1'))
def normalize_name(name):
return "-".join([w.lower().capitalize() for w in name.split("-")])
def import_app(module):
parts = module.split(":", 1)
if len(parts) == 1:
module, obj = module, "application"
else:
module, obj = parts[0], parts[1]
try:
__import__(module)
except ImportError:
if module.endswith(".py") and os.path.exists(module):
raise ImportError("Failed to find application, did "
"you mean '%s:%s'?" % (module.rsplit(".", 1)[0], obj))
else:
raise
mod = sys.modules[module]
app = eval(obj, mod.__dict__)
if app is None:
raise ImportError("Failed to find application object: %r" % obj)
if not callable(app):
raise TypeError("Application object must be callable.")
return app
def http_date(timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
weekdayname[wd],
day, monthname[month], year,
hh, mm, ss)
return s
def is_hoppish(header):
return header.lower().strip() in hop_headers
def daemonize():
"""\
Standard daemonization of a process.
http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16
"""
if not 'GUNICORN_FD' in os.environ:
if os.fork():
os._exit(0)
os.setsid()
if os.fork():
os._exit(0)
os.umask(0)
maxfd = get_maxfd()
closerange(0, maxfd)
os.open(REDIRECT_TO, os.O_RDWR)
os.dup2(0, 1)
os.dup2(0, 2)
def seed():
try:
random.seed(os.urandom(64))
except NotImplementedError:
random.seed('%s.%s' % (time.time(), os.getpid()))
def check_is_writeable(path):
try:
f = open(path, 'a')
except IOError as e:
raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e))
f.close()
def to_bytestring(value):
"""Converts a string argument to a byte string"""
if isinstance(value, bytes):
return value
assert isinstance(value, text_type)
return value.encode("utf-8")
|
OneBitSoftware/jwtSample
|
src/Spa/env1/Lib/site-packages/gunicorn/util.py
|
Python
|
mit
| 12,319
|
__author__ = "Christian Kongsgaard"
__license__ = "MIT"
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORTS
# Modules:
import os
import datetime
import time
import shutil
import typing
from mongoengine import Q
# RiBuild Modules:
from delphin_6_automation.database_interactions.db_templates import delphin_entry
from delphin_6_automation.database_interactions import general_interactions as general_interact
from delphin_6_automation.logging.ribuild_logger import ribuild_logger
from delphin_6_automation.backend import simulation_worker
# Logger
logger = ribuild_logger()
# -------------------------------------------------------------------------------------------------------------------- #
# RIBUILD SIMULATION FUNCTIONS AND CLASSES
def download_simulation_result(sim_id: str, download_path: str, raw_or_processed='raw') -> None:
"""
Downloads Delphin simulation results from the database.
:param sim_id: Delphin project ID
:param download_path: Path to download to
:param raw_or_processed: Whether to download the raw results or the processed ones
:return: None
"""
delphin_doc = delphin_entry.Delphin.objects(id=sim_id).first()
download_extended_path = download_path + '/' + str(sim_id)
os.mkdir(download_extended_path)
if raw_or_processed == 'raw':
result_id = delphin_doc.results_raw
logger.info(f'Downloads raw result with ID: {result_id} from Delphin project with ID: {sim_id}')
general_interact.download_raw_result(result_id.id, download_extended_path)
elif raw_or_processed == 'processed':
pass
# TODO - Download processed results from database
else:
raise ValueError('raw_or_processed has to be raw or processed. Value given was: ' + str(raw_or_processed))
return None
def find_next_sim_in_queue() -> typing.Optional[str]:
"""
Finds the next entry in the simulation queue, which is not yet simulated and has the highest queue priority.
:return: If a entry is found the id will be returned otherwise None.
"""
try:
id_ = delphin_entry.Delphin.objects(simulating=False, simulated=None).order_by('-queue_priority').first().id
set_simulating(str(id_), True)
logger.debug(f'Found unsimulated Delphin project with ID: {id_}')
return str(id_)
except AttributeError:
logger.info('All Delphin Projects in the queue are simulated!')
time.sleep(60)
return None
def set_simulating(id_: str, set_to: bool) -> str:
"""
Set the simulating flag of an entry.
:param id_: ID of the entry
:param set_to: What to set simulating to. Should be either True or False.
:return: ID of the entry
"""
delphin_doc = delphin_entry.Delphin.objects(id=id_).first()
if set_to:
delphin_doc.update(set__simulating=datetime.datetime.now())
else:
delphin_doc.update(set__simulating=None)
logger.debug(f'For Delphin project with ID: {id_}, simulating was changed to: {set_to}')
return delphin_doc.id
def set_simulated(id_: str) -> str:
"""
Flags an entry for finishing the simulation.
:param id_: ID of the entry
:return: ID of the entry
"""
simulation = delphin_entry.Delphin.objects(id=id_).first()
simulation.update(set__simulated=datetime.datetime.now())
set_simulating(id_, False)
logger.debug(f'For Delphin project with ID: {id_}, simulated was changed to: {datetime.datetime.now()}')
return simulation.id
def clean_simulation_folder(path: str) -> bool:
"""
Cleans the simulation folder for content
:param path: Path to the simulation folder
:return: True on success
"""
shutil.rmtree(path)
logger.debug(f'Deleted {path}')
return True
def set_simulation_time(sim_id: str, computation_time: datetime.timedelta) -> str:
"""Sets the time it took to simulate Delphin project"""
delphin_doc = delphin_entry.Delphin.objects(id=sim_id).first()
delphin_doc.update(set__simulation_time=computation_time.total_seconds())
logger.debug(f'For Delphin project with ID: {sim_id}, '
f'simulation time was changed to: {computation_time.total_seconds()}')
return sim_id
def set_simulation_time_estimate(sim_id: str, computation_time: int) -> str:
"""Sets the estimate simulation time for a Delphin project"""
delphin_doc = delphin_entry.Delphin.objects(id=sim_id).first()
delphin_doc.update(set__estimated_simulation_time=computation_time)
logger.debug(f'For Delphin project with ID: {sim_id}, '
f'simulation time was changed to: {computation_time}')
return sim_id
def get_simulation_time_estimate(delphin_id: str) -> int:
"""Returns the estimated simulation time of Delphin project, given its ID"""
delphin_doc = delphin_entry.Delphin.objects(id=delphin_id).first()
if delphin_doc.estimated_simulation_time:
return delphin_doc.estimated_simulation_time
else:
return general_interact.compute_simulation_time(delphin_id)
def wait_until_simulated(delphin_ids: list, is_sampling_ahead: bool = False) -> bool:
"""
Wait until all simulations in the given list is simulated.
:param delphin_ids: List with Delphin database ids
:return: True
"""
simulated = [False] * len(delphin_ids)
logger.info(f'Checking if Delphin projects have been simulated')
while not all(simulated):
for index, id_ in enumerate(delphin_ids):
entry = delphin_entry.Delphin.objects(id=id_).only('simulated').first()
if entry.simulated:
simulated[index] = True
logger.debug(f'Waiting until all projects are simulated. {sum(simulated)}/{len(simulated)} is simulated')
if all(simulated):
logger.info('All projects are simulated')
return True
if sum(simulated) >= (len(simulated) * 0.9) and not is_sampling_ahead:
logger.info('90% of projects are simulated')
return False
if not all(simulated):
time.sleep(180)
logger.info('All projects are simulated')
return True
def find_exceeded() -> typing.Optional[str]:
"""
Finds a Delphin project which has exceeded the simulation run time limit.
:return: If a entry is found the id will be returned otherwise None.
"""
try:
id_ = delphin_entry.Delphin.objects(simulating=False,
exceeded_time_limit=True).order_by('-queue_priority').first().id
set_simulating(str(id_), True)
logger.debug(f'Found exceeded Delphin project with ID: {id_}')
return str(id_)
except AttributeError:
logger.info('No exceeded Delphin Projects in the database!')
time.sleep(60)
return None
def check_simulations(auth_file: str, only_count=False) -> tuple:
"""Checks running simulations on HPC"""
terminal_call = f"bstat\n"
client = simulation_worker.connect_to_hpc(auth_file)
channel = client.invoke_shell()
time.sleep(0.5)
channel.send(terminal_call)
channel_data = get_command_results(channel)
simulation_data = channel_data.split('hpclogin3')[1]
channel.close()
client.close()
# Process string
simulation_data = simulation_data.split("\n")[1:]
count = 0
p_count = 0
for data in simulation_data:
data = data.strip()
if data and data != '~' and 'JOBID' not in data:
if "pend" in data.lower():
p_count += 1
else:
count += 1
if not only_count:
logger.info(data)
return count, p_count
def get_command_results(channel):
## http://joelinoff.com/blog/?p=905
interval = 0.1
maxseconds = 10
maxcount = maxseconds / interval
bufsize = 1024
# Poll until completion or timeout
# Note that we cannot directly use the stdout file descriptor
# because it stalls at 64K bytes (65536).
input_idx = 0
timeout_flag = False
start = datetime.datetime.now()
start_secs = time.mktime(start.timetuple())
output = ''
channel.setblocking(0)
while True:
if channel.recv_ready():
data = channel.recv(bufsize).decode('utf-8')
output += data
if channel.exit_status_ready():
break
# Timeout check
now = datetime.datetime.now()
now_secs = time.mktime(now.timetuple())
et_secs = now_secs - start_secs
if et_secs > maxseconds:
timeout_flag = True
break
rbuffer = output.rstrip(' ')
if len(rbuffer) > 0 and (rbuffer[-1] == '#' or rbuffer[-1] == '>'): ## got a Cisco command prompt
break
time.sleep(0.2)
return output
def check_simulating_projects(not_simulating: bool = False) -> None:
expiry_date = datetime.datetime.now() - datetime.timedelta(minutes=300)
projects = delphin_entry.Delphin.objects(simulating__lt=expiry_date)
logger.info(f'There are {projects.count()} projects, which have exceeded their simulation time.')
if not_simulating and projects.count() > 0:
logger.info('Setting exceed simulations to not simulating')
projects.update(simulating=None)
projects = delphin_entry.Delphin.objects(simulating__lt=expiry_date)
logger.info(f'There are {projects.count()} projects, which have exceeded their simulation time.')
|
thp44/delphin_6_automation
|
delphin_6_automation/database_interactions/simulation_interactions.py
|
Python
|
mit
| 9,558
|
def decoded_len(s):
chars = 0
# 0: not in an escape sequence
# 1: just saw a \
# 2: in a hex sequence (just saw \x)
# 3: passed the first of 2 hex chars (just saw \xd for some digit d)
state = 0
for c in s[1:-1]: # assume all strings are quoted
if state == 0 and c == "\\":
state = 1
continue
if state == 1 and (c == "\\" or c == '"'):
chars += 1
state = 0
continue
if state == 1 and c == 'x':
state = 2
continue
if state == 2:
state = 3
continue
if state == 3:
chars += 1
state = 0
continue
# just a normal char
chars += 1
continue
return chars
total_raw_chars = 0
total_computed_chars = 0
with open('input_8.txt') as strings:
for s in strings:
s = s.strip()
raw_chars = len(s)
computed_chars = decoded_len(s)
print s, raw_chars, computed_chars
total_raw_chars += raw_chars
total_computed_chars += computed_chars
print(total_raw_chars - total_computed_chars)
#!/usr/bin/env python
def encoded_length(s):
total_length = 2 # we get "" for free
for c in s:
if c == "\\" or c == '"':
total_length += 2
else:
total_length += 1
return total_length
total_diff = 0
with open('input_8.txt') as strings:
for s in strings:
s = s.strip()
total_diff += (encoded_length(s) - len(s))
print total_diff
|
aarestad/advent-of-code-2015
|
2015/8.py
|
Python
|
gpl-3.0
| 1,509
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example downloads activity tags for a given floodlight activity."""
import argparse
import sys
from apiclient import sample_tools
from oauth2client import client
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to download tags for')
argparser.add_argument(
'activity_id', type=int,
help='The ID of the floodlight activity to download tags for')
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'dfareporting', 'v2.1', __doc__, __file__, parents=[argparser],
scope=['https://www.googleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/dfatrafficking'])
profile_id = flags.profile_id
activity_id = flags.activity_id
try:
# Construct the request.
request = service.floodlightActivities().generatetag(
profileId=profile_id, floodlightActivityId=activity_id)
# Execute request and print response.
response = request.execute()
print response['floodlightActivityTag']
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
|
falbassini/googleads-dfa-reporting-samples
|
python/v2.1/download_floodlight_tag.py
|
Python
|
apache-2.0
| 1,952
|
# -*- coding: utf-8 -*-
# Copyright 2017 Stein & Gabelgaard ApS
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models, SUPERUSER_ID, _
import logging
_logger = logging.getLogger(__name__)
class CamposEventParticipant(models.Model):
_inherit = 'campos.event.participant'
fee_agegroup_id = fields.Many2one('campos.fee.agegroup', 'Fee Agegroup', compute='_compute_fee_agegroup')
nights = fields.Integer('Nights', compute='_compute_nights_product')
transport_co = fields.Integer('Transports', compute='_compute_nights_product')
camp_product_id = fields.Many2one('product.product', 'Camp Fee Product', compute='_compute_nights_product')
rent_product_id = fields.Many2one('product.product', 'Campsite Rent Product', compute='_compute_nights_product')
camp_price = fields.Float(related='camp_product_id.lst_price', string="Camp Fee", readonly=True)
rent_price = fields.Float(related='rent_product_id.lst_price', string="Campsite Rent", readonly=True)
transport_product_id = fields.Many2one('product.product', 'Transport Fee Product', compute='_compute_nights_product')
transport_price = fields.Float(related='transport_product_id.lst_price', string="Transport Fee", readonly=True)
transport_price_total = fields.Float("Transport Total", compute='_compute_nights_product' )
camp_price_total = fields.Float("Camp Total", compute='_compute_nights_product')
sspar_ids = fields.One2many('campos.fee.ss.participant', 'participant_id', 'Snapshot')
ssreginv_ids = fields.One2many(related='registration_id.ssreginv_ids')
no_invoicing = fields.Boolean('Suspend invoicing', groups='campos_event.group_campos_admin')
no_cancel_fee = fields.Boolean('No cancel fee', groups='campos_event.group_campos_admin')
# Handle Tranport refusion calculations for fereign jobbers
group_entrypoint = fields.Many2one(related='registration_id.group_entrypoint')
group_exitpoint = fields.Many2one(related='registration_id.group_exitpoint')
group_country_code2 = fields.Char(related='registration_id.partner_id.country_id.code', string='Country Code2', readonly=True)
@api.multi
@api.depends('birthdate')
def _compute_fee_agegroup(self):
for par in self:
ag_id = self.env['campos.fee.agegroup'].search([('birthdate_from', '<=', par.birthdate), ('birthdate_to', '>=', par.birthdate)])
if not len(ag_id) == 1:
ag_id = self.env['campos.fee.agegroup'].search([('default_group', '=', True)])
par.fee_agegroup_id = ag_id
@api.multi
@api.depends('birthdate', 'camp_day_ids')
def _compute_nights_product(self):
for par in self:
par.rent_product_id = False
if par.state not in ['deregistered','rejected']:
camp_price = 0.0
days_ids = par.camp_day_ids.filtered(lambda r: r.will_participate and r.day_id.event_period == 'maincamp')
if len(days_ids) == 0:
nights = 8
if par.staff or par.jobber_child:
nights = 0
else:
nights = len(days_ids) - 1
if nights < 1:
nights = 1
pav_id = False
if self.env.uid == SUPERUSER_ID:
pav_id = self.env['product.attribute.value'].search([('attribute_id.name', '=', u'Døgn'),('name', '=', str(nights))])
else:
pav_id = self.env['product.attribute.value'].suspend_security().search([('attribute_id.name', '=', u'Døgn'),('name', '=', str(nights))])
if pav_id:
if self.env.uid == SUPERUSER_ID:
pp_id = self.env['product.product'].search([('product_tmpl_id', '=', par.fee_agegroup_id.template_id.id),('attribute_value_ids', 'in', pav_id.ids)])
else:
pp_id = self.env['product.product'].suspend_security().search([('product_tmpl_id', '=', par.fee_agegroup_id.template_id.id),('attribute_value_ids', 'in', pav_id.ids)])
if pp_id:
par.camp_product_id = pp_id[0]
camp_price = pp_id[0].lst_price
if not par.sudo().no_invoicing:
if self.env.uid == SUPERUSER_ID:
pp_id = self.env['product.product'].search([('product_tmpl_id', '=', par.fee_agegroup_id.rent_template_id.id),('attribute_value_ids', 'in', pav_id.ids)])
else:
pp_id = self.env['product.product'].suspend_security().search([('product_tmpl_id', '=', par.fee_agegroup_id.rent_template_id.id),('attribute_value_ids', 'in', pav_id.ids)])
if pp_id:
par.rent_product_id = pp_id[0]
transport_co = 0
transport_price_total = 0.0
if nights > 0:
if par.fee_agegroup_id.transport_incl:
if camp_price > 0.0:
if not par.transport_from_camp:
transport_co += 1
if not par.transport_to_camp:
transport_co += 1
else:
if par.transport_from_camp:
transport_co += 1
if par.transport_to_camp:
transport_co += 1
par.transport_co = transport_co
muni_prod_attr_ids = False
if par.registration_id.partner_id.municipality_id.product_attribute_id.id:
muni_prod_attr_ids = [par.registration_id.partner_id.municipality_id.product_attribute_id.id]
if not muni_prod_attr_ids:
if par.registration_id.group_entrypoint.municipality_id.product_attribute_id.id and par.registration_id.group_exitpoint.municipality_id.product_attribute_id.id:
muni_prod_attr_ids = [par.registration_id.group_entrypoint.municipality_id.product_attribute_id.id, par.registration_id.group_exitpoint.municipality_id.product_attribute_id.id]
_logger.info('Muni: %s', muni_prod_attr_ids)
if transport_co and muni_prod_attr_ids:
pp_id = False
if self.env.uid == SUPERUSER_ID:
pp_id = self.env['product.product'].search([('product_tmpl_id', '=', par.fee_agegroup_id.transport_tmpl_id.id),('attribute_value_ids', 'in', muni_prod_attr_ids)])
else:
pp_id = self.env['product.product'].suspend_security().search([('product_tmpl_id', '=', par.fee_agegroup_id.transport_tmpl_id.id),('attribute_value_ids', 'in', muni_prod_attr_ids)])
if pp_id:
pp_id = pp_id.sorted(key=lambda r: r.lst_price)
par.transport_product_id = pp_id[0]
transport_price_total = pp_id[0].lst_price * transport_co
par.nights = nights
par.transport_price_total = transport_price_total
par.camp_price_total = transport_price_total + camp_price
else:
par.nights = 0
par.camp_product_id = False
par.transport_co
par.transport_product_id = False
par.camp_price_total = 0
@api.multi
def do_snapshot(self, ssreg):
for par in self:
if par.jobber_child:
par.signup_state = par.parent_jobber_id.signup_state
sspar = self.env['campos.fee.ss.participant'].create({'ssreg_id': ssreg.id,
'participant_id': par.id,
'state': par.state,
'name': par.name,
'fee_agegroup_id': par.fee_agegroup_id.id,
'nights': par.nights,
'transport_co': par.transport_co,
'transport_to_camp': par.transport_to_camp,
'transport_from_camp': par.transport_from_camp,
'camp_product_id': par.camp_product_id.id,
'transport_product_id': par.transport_product_id.id,
'transport_price_total': par.transport_price_total,
'camp_price_total': par.camp_price_total,
'dates_summery': par.dates_summery,
'payreq_state': par.payreq_state,
'payreq_approved_date': par.payreq_approved_date,
'payreq_approved_user_id': par.payreq_approved_user_id.id,
'participant': par.participant,
'staff': par.staff,
#'jobber_child': par.jobber_child,
# Transportaion fields
'webtourususeridno': par.webtourususeridno,
'webtourusgroupidno': par.webtourusgroupidno,
'tocampfromdestination_id': par.tocampfromdestination_id.id,
'fromcamptodestination_id': par.fromcamptodestination_id.id,
'tocampdate': par.tocampdate,
'fromcampdate': par.tocampdate,
'tocampusneed_id': par.tocampusneed_id.id,
'fromcampusneed_id': par.fromcampusneed_id.id,
'signup_state': par.signup_state,
'no_invoicing': par.sudo().no_invoicing,
'no_cancel_fee': par.sudo().no_cancel_fee,
})
@api.multi
def set_cancel_date(self):
for par in self:
if par.state == 'deregistered' and not par.cancel_dt:
msg_ids = par.message_ids.filtered(lambda r: r.body.find('→ Afmeldt') or r.body.find('→ Deregistered') )
if msg_ids:
par.cancel_dt = msg_ids[0].date
@api.multi
def action_open_invoices(self):
self.ensure_one()
view = self.env.ref('account.invoice_form')
action = {
'name': _("Invoice for %s") % (self.name),
'view_mode': 'tree,form',
'view_type': 'form',
'views': [(False, 'tree'),(view.id, 'form')],
'res_model': 'account.invoice',
'type': 'ir.actions.act_window',
'nodestroy': True,
'domain': [('partner_id', '=', self.registration_id.partner_id.id)],
}
_logger.info('ACTION: %s', action)
return action
|
sl2017/campos
|
campos_fee/models/campos_event_participant.py
|
Python
|
agpl-3.0
| 12,468
|
#
# Configuration/Defaults file for SConscript.
# This is Python file.
# Store frequently used command-line variables in this file rather than
# suppying them to scons at each invocation.
#build_mode = 'rel'
use_plat = 1
|
semihc/gsl
|
SConsCfg.py
|
Python
|
gpl-2.0
| 222
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/BoolOption.py 2013/03/03 09:48:35 garyo"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def BoolOption(*args, **kw):
global warned
if not warned:
msg = "The BoolOption() function is deprecated; use the BoolVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.BoolVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
aubreyrjones/libesp
|
scons_local/scons-local-2.3.0/SCons/Options/BoolOption.py
|
Python
|
mit
| 2,015
|
from __future__ import print_function
import unittest2
from lldbsuite.test.decorators import *
from lldbsuite.test.concurrent_base import ConcurrentEventsBase
from lldbsuite.test.lldbtest import TestBase
@skipIfWindows
class ConcurrentTwoWatchpointsOneDelayBreakpoint(ConcurrentEventsBase):
mydir = ConcurrentEventsBase.compute_mydir(__file__)
@skipIfFreeBSD # timing out on buildbot
# Atomic sequences are not supported yet for MIPS in LLDB.
@skipIf(triple='^mips')
@expectedFailureNetBSD
@add_test_categories(["watchpoint"])
def test(self):
"""Test two threads that trigger a watchpoint and one (1 second delay) breakpoint thread. """
self.build(dictionary=self.getBuildFlags())
self.do_thread_actions(
num_watchpoint_threads=2,
num_delay_breakpoint_threads=1)
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/functionalities/thread/concurrent_events/TestConcurrentTwoWatchpointsOneDelayBreakpoint.py
|
Python
|
apache-2.0
| 846
|
# -*- coding: utf-8 -*-
#
# Eryri documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 14 09:53:41 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.pngmath',
'sphinx.ext.viewcode',
'sphinxcontrib.blockdiag',
'sphinxcontrib.actdiag',
'sphinxcontrib.seqdiag',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Eryri'
copyright = u'2014, Juti Noppornpitak, Panote Siriaraya'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# [Customization]
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
html_theme = 'default'
# [Original]
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Eryridoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Eryri.tex', u'Eryri Documentation',
u'Juti Noppornpitak, Panote Siriaraya', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'eryri', u'Eryri Documentation',
[u'Juti Noppornpitak, Panote Siriaraya'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Eryri', u'Eryri Documentation',
u'Juti Noppornpitak, Panote Siriaraya', 'Eryri', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Eryri'
epub_author = u'Juti Noppornpitak, Panote Siriaraya'
epub_publisher = u'Juti Noppornpitak, Panote Siriaraya'
epub_copyright = u'2014, Juti Noppornpitak, Panote Siriaraya'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'Eryri'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
nepteam/documentation
|
docs/source/conf.py
|
Python
|
mit
| 10,696
|
import os
from setuptools import setup
PACKAGE_VERSION = '0.2.0'
PACKAGE_NAME = 'django-couchdb-cache'
EXAMPLES_TARGET_DIR = 'share/{}/'.format(PACKAGE_NAME)
EXAMPLES_LOCAL_DIR = 'examples'
def get_data_files():
data_files = [(os.path.join(EXAMPLES_TARGET_DIR, root), [os.path.join(root, f) for f in files]) for root, dirs, files in os.walk(EXAMPLES_LOCAL_DIR)]
return data_files
setup(
name=PACKAGE_NAME,
author='ShuttleCloud Corp',
author_email='dev@shuttlecloud.com',
description='CouchDB cache application for Django',
url='https://github.com/shuttlecloud/django-couchdb-cache.git',
version=PACKAGE_VERSION,
packages=['couchdb_cache'],
include_package_data=True,
zip_safe=False,
scripts=[],
data_files=get_data_files(),
install_requires=[
'Django<=1.7.4',
'CouchDB==0.10',
'python-memcached'
],
dependency_links=[
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Programming Language :: Python :: 2',
]
)
|
shuttlecloud/django-couchdb-cache
|
setup.py
|
Python
|
gpl-3.0
| 1,196
|
"""
Django settings for training project.
Generated by 'django-admin startproject' using Django 1.11.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=*p9ko#afwa@qmr%icu!v04=_jwyhb=na9-r0ji&3hh!nn)qwc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'ticketing_system.apps.TicketingSystemConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'training.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'training.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'ticketing_system',
'HOST': 'localhost',
'USER': 'debian',
'PASSWORD': 'debian123',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
aberon10/training
|
training/training/settings.py
|
Python
|
mit
| 3,221
|
"""Automatically download MLdata datasets."""
# Copyright (c) 2011 Pietro Berkes
# License: Simplified BSD
import os
from os.path import join, exists
import re
import numpy as np
import scipy as sp
from scipy import io
from shutil import copyfileobj
import urllib2
from .base import get_data_home, Bunch
MLDATA_BASE_URL = "http://mldata.org/repository/data/download/matlab/%s"
def mldata_filename(dataname):
"""Convert a raw name for a data set in a mldata.org filename."""
dataname = dataname.lower().replace(' ', '-')
return re.sub(r'[().]', '', dataname)
def fetch_mldata(dataname, target_name='label', data_name='data',
transpose_data=True, data_home=None):
"""Fetch an mldata.org data set
If the file does not exist yet, it is downloaded from mldata.org .
mldata.org does not have an enforced convention for storing data or
naming the columns in a data set. The default behavior of this function
works well with the most common cases:
1) data values are stored in the column 'data', and target values in the
column 'label'
2) alternatively, the first column stores target values, and the second
data values
3) the data array is stored as `n_features x n_samples` , and thus needs
to be transposed to match the `sklearn` standard
Keyword arguments allow to adapt these defaults to specific data sets
(see parameters `target_name`, `data_name`, `transpose_data`, and
the examples below).
mldata.org data sets may have multiple columns, which are stored in the
Bunch object with their original name.
Parameters
----------
dataname:
Name of the data set on mldata.org,
e.g.: "leukemia", "Whistler Daily Snowfall", etc.
The raw name is automatically converted to a mldata.org URL .
target_name: optional, default: 'label'
Name or index of the column containing the target values.
data_name: optional, default: 'data'
Name or index of the column containing the data.
transpose_data: optional, default: True
If True, transpose the downloaded data array.
data_home: optional, default: None
Specify another download and cache folder for the data sets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'data', the data to learn, 'target', the classification labels,
'DESCR', the full description of the dataset, and
'COL_NAMES', the original names of the dataset columns.
Examples
--------
Load the 'iris' dataset from mldata.org:
>>> from sklearn.datasets.mldata import fetch_mldata
>>> iris = fetch_mldata('iris')
>>> iris.target[0]
1
>>> print(iris.data[0])
[-0.555556 0.25 -0.864407 -0.916667]
Load the 'leukemia' dataset from mldata.org, which needs to be transposed
to respects the sklearn axes convention:
>>> leuk = fetch_mldata('leukemia', transpose_data=True)
>>> print(leuk.data.shape[0])
72
Load an alternative 'iris' dataset, which has different names for the
columns:
>>> iris2 = fetch_mldata('datasets-UCI iris', target_name=1,
... data_name=0)
>>> iris3 = fetch_mldata('datasets-UCI iris',
... target_name='class', data_name='double0')
"""
# normalize dataset name
dataname = mldata_filename(dataname)
# check if this data set has been already downloaded
data_home = get_data_home(data_home=data_home)
data_home = join(data_home, 'mldata')
if not exists(data_home):
os.makedirs(data_home)
matlab_name = dataname + '.mat'
filename = join(data_home, matlab_name)
# if the file does not exist, download it
if not exists(filename):
urlname = MLDATA_BASE_URL % urllib2.quote(dataname)
try:
mldata_url = urllib2.urlopen(urlname)
except urllib2.HTTPError as e:
if e.code == 404:
e.msg = "Dataset '%s' not found on mldata.org." % dataname
raise
# store Matlab file
try:
with open(filename, 'w+b') as matlab_file:
copyfileobj(mldata_url, matlab_file)
except:
os.remove(filename)
raise
mldata_url.close()
# load dataset matlab file
with open(filename, 'rb') as matlab_file:
matlab_dict = io.loadmat(matlab_file, struct_as_record=True)
# -- extract data from matlab_dict
# flatten column names
col_names = [str(descr[0])
for descr in matlab_dict['mldata_descr_ordering'][0]]
# if target or data names are indices, transform then into names
if isinstance(target_name, (int, np.integer)):
target_name = col_names[target_name]
if isinstance(data_name, (int, np.integer)):
data_name = col_names[data_name]
# rules for making sense of the mldata.org data format
# (earlier ones have priority):
# 1) there is only one array => it is "data"
# 2) there are multiple arrays
# a) copy all columns in the bunch, using their column name
# b) if there is a column called `target_name`, set "target" to it,
# otherwise set "target" to first column
# c) if there is a column called `data_name`, set "data" to it,
# otherwise set "data" to second column
dataset = {'DESCR': 'mldata.org dataset: %s' % dataname,
'COL_NAMES': col_names}
# 1) there is only one array => it is considered data
if len(col_names) == 1:
data_name = col_names[0]
dataset['data'] = matlab_dict[data_name]
# 2) there are multiple arrays
else:
for name in col_names:
dataset[name] = matlab_dict[name]
if target_name in col_names:
del dataset[target_name]
dataset['target'] = matlab_dict[target_name]
else:
del dataset[col_names[0]]
dataset['target'] = matlab_dict[col_names[0]]
if data_name in col_names:
del dataset[data_name]
dataset['data'] = matlab_dict[data_name]
else:
del dataset[col_names[1]]
dataset['data'] = matlab_dict[col_names[1]]
# set axes to sklearn conventions
if transpose_data:
dataset['data'] = dataset['data'].T
if 'target' in dataset:
if not sp.sparse.issparse(dataset['target']):
dataset['target'] = dataset['target'].squeeze()
return Bunch(**dataset)
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sklearn/datasets/mldata.py
|
Python
|
agpl-3.0
| 6,651
|
from django import template
register = template.Library()
@register.filter
def make_id(name):
return "-".join(name.split())
@register.filter
def block_param_title(dict):
return list(dict.keys())[0].title()
@register.filter
def block_param_id(dict):
return "-".join(list(dict.keys())[0].split())
|
OpenSourcePolicyCenter/PolicyBrain
|
webapp/apps/taxbrain/templatetags/strings.py
|
Python
|
mit
| 314
|
import pytest
import rpmlint.spellcheck
@pytest.mark.skipif(not rpmlint.spellcheck.ENCHANT, reason='Missing enchant bindings')
def test_spelldict(capsys):
"""
Check we can init dictionary spellchecker
"""
spell = rpmlint.spellcheck.Spellcheck()
spell._init_checker()
out, err = capsys.readouterr()
assert not out
assert not err
assert 'unable to load spellchecking dictionary' not in err
spell._init_checker('not-existing-language')
out, err = capsys.readouterr()
assert not out
assert 'unable to load spellchecking dictionary' in err
assert 'en_US' in spell._enchant_checkers
assert spell._enchant_checkers['en_US'] is not None
assert 'not-existing-language' not in spell._enchant_checkers
@pytest.mark.skipif(not rpmlint.spellcheck.ENCHANT, reason='Missing enchant bindings')
def test_spellchecking():
"""
Check if we can test the spelling
"""
spell = rpmlint.spellcheck.Spellcheck()
# correct text
text = 'I swear this text is proper English'
result = spell.spell_check(text, 'Description({}):')
assert not result
# english 2 typos
text = "I don't think tihs tetx is correct English"
result = spell.spell_check(text, 'Description({}):')
assert len(result) == 2
assert result['tihs'] == 'Description(en_US): tihs -> this, hits, ties'
# different language, one typo
text = 'Příčerně žluťoučký kůň'
result = spell.spell_check(text, 'Summary({}):', 'cs_CZ')
assert len(result) == 1
assert result['Příčerně'] == 'Summary(cs_CZ): Příčerně -> Příčetně, Příčeně, Příšerně'
# non-existing language, should return nothing:
text = 'Weird english text'
result = spell.spell_check(text, 'Summary({}):', 'de_CZ')
assert not result
@pytest.mark.skipif(not rpmlint.spellcheck.ENCHANT, reason='Missing enchant bindings')
def test_pkgname_spellchecking():
spell = rpmlint.spellcheck.Spellcheck()
pkgname = 'python-squeqe'
text = "This package is squeqe's framework helper"
result = spell.spell_check(text, 'Description({}):', 'en_US', pkgname)
assert not result
@pytest.mark.skipif(not rpmlint.spellcheck.ENCHANT, reason='Missing enchant bindings')
def test_ignorelist_spellchecking():
spell = rpmlint.spellcheck.Spellcheck()
ignore = ['wrod', 'žížala']
text = 'This package should not have any typos in wrod or žíŽala'
result = spell.spell_check(text, 'Description({}):', ignored_words=ignore)
assert not result
|
matwey/rpmlint
|
test/test_spellchecking.py
|
Python
|
gpl-2.0
| 2,542
|
import time
print('')
|
xNUTs/PTVS
|
Python/Tests/TestData/DebuggerProject/DebugReplTest5.py
|
Python
|
apache-2.0
| 26
|
"""
Tests for middleware for comprehensive themes.
"""
from __future__ import absolute_import
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sites.models import Site
from django.test import RequestFactory, TestCase, override_settings
from openedx.core.djangoapps.theming.middleware import CurrentSiteThemeMiddleware
from student.tests.factories import UserFactory
from ..views import set_user_preview_site_theme
TEST_URL = '/test'
TEST_THEME_NAME = 'test-theme'
class TestCurrentSiteThemeMiddleware(TestCase):
"""
Test theming middleware.
"""
def setUp(self):
"""
Initialize middleware and related objects
"""
super(TestCurrentSiteThemeMiddleware, self).setUp()
self.site_theme_middleware = CurrentSiteThemeMiddleware()
self.user = UserFactory.create()
def create_mock_get_request(self, qs_theme=None):
"""
Returns a mock GET request.
"""
if qs_theme:
test_url = "{}?site_theme={}".format(TEST_URL, qs_theme)
else:
test_url = TEST_URL
request = RequestFactory().get(test_url)
self.initialize_mock_request(request)
return request
def initialize_mock_request(self, request):
"""
Initialize a test request.
"""
request.user = self.user
request.site, __ = Site.objects.get_or_create(domain='test', name='test')
request.session = {}
MessageMiddleware().process_request(request)
@override_settings(DEFAULT_SITE_THEME=TEST_THEME_NAME)
def test_default_site_theme(self):
"""
Test that request.site_theme returns theme defined by DEFAULT_SITE_THEME setting
when there is no theme associated with the current site.
"""
request = self.create_mock_get_request()
self.assertEqual(self.site_theme_middleware.process_request(request), None)
self.assertIsNotNone(request.site_theme)
self.assertEqual(request.site_theme.theme_dir_name, TEST_THEME_NAME)
@override_settings(DEFAULT_SITE_THEME=None)
def test_default_site_theme_2(self):
"""
Test that request.site_theme returns None when there is no theme associated with
the current site and DEFAULT_SITE_THEME is also None.
"""
request = self.create_mock_get_request()
self.assertEqual(self.site_theme_middleware.process_request(request), None)
self.assertIsNone(request.site_theme)
def test_preview_theme(self):
"""
Verify that preview themes behaves correctly.
"""
# First request a preview theme
post_request = RequestFactory().post('/test')
self.initialize_mock_request(post_request)
set_user_preview_site_theme(post_request, TEST_THEME_NAME)
# Next request a page and verify that the theme is returned
get_request = self.create_mock_get_request()
self.assertEqual(self.site_theme_middleware.process_request(get_request), None)
self.assertEqual(get_request.site_theme.theme_dir_name, TEST_THEME_NAME)
# Request to reset the theme
post_request = RequestFactory().post('/test')
self.initialize_mock_request(post_request)
set_user_preview_site_theme(post_request, None)
# Verify that no theme is returned now
get_request = self.create_mock_get_request()
self.assertEqual(self.site_theme_middleware.process_request(get_request), None)
self.assertIsNone(get_request.site_theme)
# Verify that we can still force the theme with a querystring arg
get_request = self.create_mock_get_request(qs_theme=TEST_THEME_NAME)
self.assertEqual(self.site_theme_middleware.process_request(get_request), None)
self.assertEqual(get_request.site_theme.theme_dir_name, TEST_THEME_NAME)
|
ESOedX/edx-platform
|
openedx/core/djangoapps/theming/tests/test_middleware.py
|
Python
|
agpl-3.0
| 3,897
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from resources.datatables import FactionStatus
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crackdown_imperial_corporal')
mobileTemplate.setLevel(16)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(True)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("imperial")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setFaction("imperial")
mobileTemplate.setFactionStatus(FactionStatus.Combatant)
templates = Vector()
templates.add('object/mobile/shared_dressed_npe_imperial_officer.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_e11.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('imp_corporal_16', mobileTemplate)
return
|
agry/NGECore2
|
scripts/mobiles/generic/faction/imperial/imp_corporal_16.py
|
Python
|
lgpl-3.0
| 1,426
|
# -*- coding: utf-8 -*-
"""
logbook.base
~~~~~~~~~~~~
Base implementation for logbook.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
try:
import thread
except ImportError:
# for python 3.1,3.2
import _thread as thread
import threading
import traceback
from itertools import chain
from weakref import ref as weakref
from datetime import datetime
from logbook.helpers import to_safe_json, parse_iso8601, cached_property, \
PY2, u, string_types, iteritems, integer_types
try:
from logbook._speedups import group_reflected_property, \
ContextStackManager, StackedObject
except ImportError:
from logbook._fallback import group_reflected_property, \
ContextStackManager, StackedObject
_datetime_factory = datetime.utcnow
def set_datetime_format(datetime_format):
"""
Set the format for the datetime objects created, which are then
made available as the :py:attr:`LogRecord.time` attribute of
:py:class:`LogRecord` instances.
:param datetime_format: Indicates how to generate datetime objects. Possible values are:
"utc"
:py:attr:`LogRecord.time` will be a datetime in UTC time zone (but not time zone aware)
"local"
:py:attr:`LogRecord.time` will be a datetime in local time zone (but not time zone aware)
This function defaults to creating datetime objects in UTC time,
using `datetime.utcnow()
<http://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow>`_,
so that logbook logs all times in UTC time by default. This is
recommended in case you have multiple software modules or
instances running in different servers in different time zones, as
it makes it simple and less error prone to correlate logging
across the different servers.
On the other hand if all your software modules are running in the
same time zone and you have to correlate logging with third party
modules already logging in local time, it can be more convenient
to have logbook logging to local time instead of UTC. Local time
logging can be enabled like this::
import logbook
from datetime import datetime
logbook.set_datetime_format("local")
"""
global _datetime_factory
if datetime_format == "utc":
_datetime_factory = datetime.utcnow
elif datetime_format == "local":
_datetime_factory = datetime.now
else:
raise ValueError("Invalid value %r. Valid values are 'utc' and 'local'." % (datetime_format,))
# make sure to sync these up with _speedups.pyx
CRITICAL = 6
ERROR = 5
WARNING = 4
NOTICE = 3
INFO = 2
DEBUG = 1
NOTSET = 0
_level_names = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARNING',
NOTICE: 'NOTICE',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET'
}
_reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names))
_missing = object()
# on python 3 we can savely assume that frame filenames will be in
# unicode, on Python 2 we have to apply a trick.
if PY2:
def _convert_frame_filename(fn):
if isinstance(fn, unicode):
fn = fn.decode(sys.getfilesystemencoding() or 'utf-8',
'replace')
return fn
else:
def _convert_frame_filename(fn):
return fn
def level_name_property():
"""Returns a property that reflects the level as name from
the internal level attribute.
"""
def _get_level_name(self):
return get_level_name(self.level)
def _set_level_name(self, level):
self.level = lookup_level(level)
return property(_get_level_name, _set_level_name,
doc='The level as unicode string')
def lookup_level(level):
"""Return the integer representation of a logging level."""
if isinstance(level, integer_types):
return level
try:
return _reverse_level_names[level]
except KeyError:
raise LookupError('unknown level name %s' % level)
def get_level_name(level):
"""Return the textual representation of logging level 'level'."""
try:
return _level_names[level]
except KeyError:
raise LookupError('unknown level')
class ExtraDict(dict):
"""A dictionary which returns ``u''`` on missing keys."""
if sys.version_info[:2] < (2, 5):
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return u''
else:
def __missing__(self, key):
return u''
def copy(self):
return self.__class__(self)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
dict.__repr__(self)
)
class _ExceptionCatcher(object):
"""Helper for exception caught blocks."""
def __init__(self, logger, args, kwargs):
self.logger = logger
self.args = args
self.kwargs = kwargs
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is not None:
kwargs = self.kwargs.copy()
kwargs['exc_info'] = (exc_type, exc_value, tb)
self.logger.exception(*self.args, **kwargs)
return True
class ContextObject(StackedObject):
"""An object that can be bound to a context. It is managed by the
:class:`ContextStackManager`"""
#: subclasses have to instanciate a :class:`ContextStackManager`
#: object on this attribute which is then shared for all the
#: subclasses of it.
stack_manager = None
def push_thread(self):
"""Pushes the context object to the thread stack."""
self.stack_manager.push_thread(self)
def pop_thread(self):
"""Pops the context object from the stack."""
popped = self.stack_manager.pop_thread()
assert popped is self, 'popped unexpected object'
def push_application(self):
"""Pushes the context object to the application stack."""
self.stack_manager.push_application(self)
def pop_application(self):
"""Pops the context object from the stack."""
popped = self.stack_manager.pop_application()
assert popped is self, 'popped unexpected object'
class NestedSetup(StackedObject):
"""A nested setup can be used to configure multiple handlers
and processors at once.
"""
def __init__(self, objects=None):
self.objects = list(objects or ())
def push_application(self):
for obj in self.objects:
obj.push_application()
def pop_application(self):
for obj in reversed(self.objects):
obj.pop_application()
def push_thread(self):
for obj in self.objects:
obj.push_thread()
def pop_thread(self):
for obj in reversed(self.objects):
obj.pop_thread()
class Processor(ContextObject):
"""Can be pushed to a stack to inject additional information into
a log record as necessary::
def inject_ip(record):
record.extra['ip'] = '127.0.0.1'
with Processor(inject_ip):
...
"""
stack_manager = ContextStackManager()
def __init__(self, callback=None):
#: the callback that was passed to the constructor
self.callback = callback
def process(self, record):
"""Called with the log record that should be overridden. The default
implementation calls :attr:`callback` if it is not `None`.
"""
if self.callback is not None:
self.callback(record)
class _InheritedType(object):
__slots__ = ()
def __repr__(self):
return 'Inherit'
def __reduce__(self):
return 'Inherit'
Inherit = _InheritedType()
class Flags(ContextObject):
"""Allows flags to be pushed on a flag stack. Currently two flags
are available:
`errors`
Can be set to override the current error behaviour. This value is
used when logging calls fail. The default behaviour is spitting
out the stacktrace to stderr but this can be overridden:
=================== ==========================================
``'silent'`` fail silently
``'raise'`` raise a catchable exception
``'print'`` print the stacktrace to stderr (default)
=================== ==========================================
`introspection`
Can be used to disable frame introspection. This can give a
speedup on production systems if you are using a JIT compiled
Python interpreter such as pypy. The default is `True`.
Note that the default setup of some of the handler (mail for
instance) includes frame dependent information which will
not be available when introspection is disabled.
Example usage::
with Flags(errors='silent'):
...
"""
stack_manager = ContextStackManager()
def __init__(self, **flags):
self.__dict__.update(flags)
@staticmethod
def get_flag(flag, default=None):
"""Looks up the current value of a specific flag."""
for flags in Flags.stack_manager.iter_context_objects():
val = getattr(flags, flag, Inherit)
if val is not Inherit:
return val
return default
def _create_log_record(cls, dict):
"""Extra function for reduce because on Python 3 unbound methods
can no longer be pickled.
"""
return cls.from_dict(dict)
class LogRecord(object):
"""A LogRecord instance represents an event being logged.
LogRecord instances are created every time something is logged. They
contain all the information pertinent to the event being logged. The
main information passed in is in msg and args
"""
_pullable_information = frozenset((
'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread',
'thread_name', 'formatted_exception', 'message', 'exception_name',
'exception_message'
))
_noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame'))
#: can be overriden by a handler to not close the record. This could
#: lead to memory leaks so it should be used carefully.
keep_open = False
#: the time of the log record creation as :class:`datetime.datetime`
#: object. This information is unavailable until the record was
#: heavy initialized.
time = None
#: a flag that is `True` if the log record is heavy initialized which
#: is not the case by default.
heavy_initialized = False
#: a flag that is `True` when heavy initialization is no longer possible
late = False
#: a flag that is `True` when all the information was pulled from the
#: information that becomes unavailable on close.
information_pulled = False
def __init__(self, channel, level, msg, args=None, kwargs=None,
exc_info=None, extra=None, frame=None, dispatcher=None):
#: the name of the logger that created it or any other textual
#: channel description. This is a descriptive name and can be
#: used for filtering.
self.channel = channel
#: The message of the log record as new-style format string.
self.msg = msg
#: the positional arguments for the format string.
self.args = args or ()
#: the keyword arguments for the format string.
self.kwargs = kwargs or {}
#: the level of the log record as integer.
self.level = level
#: optional exception information. If set, this is a tuple in the
#: form ``(exc_type, exc_value, tb)`` as returned by
#: :func:`sys.exc_info`.
#: This parameter can also be ``True``, which would cause the exception info tuple
#: to be fetched for you.
self.exc_info = exc_info
#: optional extra information as dictionary. This is the place
#: where custom log processors can attach custom context sensitive
#: data.
self.extra = ExtraDict(extra or ())
#: If available, optionally the interpreter frame that pulled the
#: heavy init. This usually points to somewhere in the dispatcher.
#: Might not be available for all calls and is removed when the log
#: record is closed.
self.frame = frame
#: the PID of the current process
self.process = None
if dispatcher is not None:
dispatcher = weakref(dispatcher)
self._dispatcher = dispatcher
def heavy_init(self):
"""Does the heavy initialization that could be expensive. This must
not be called from a higher stack level than when the log record was
created and the later the initialization happens, the more off the
date information will be for example.
This is internally used by the record dispatching system and usually
something not to worry about.
"""
if self.heavy_initialized:
return
assert not self.late, 'heavy init is no longer possible'
self.heavy_initialized = True
self.process = os.getpid()
self.time = _datetime_factory()
if self.frame is None and Flags.get_flag('introspection', True):
self.frame = sys._getframe(1)
if self.exc_info is True:
self.exc_info = sys.exc_info()
def pull_information(self):
"""A helper function that pulls all frame-related information into
the object so that this information is available after the log
record was closed.
"""
if self.information_pulled:
return
# due to how cached_property is implemented, the attribute access
# has the side effect of caching the attribute on the instance of
# the class.
for key in self._pullable_information:
getattr(self, key)
self.information_pulled = True
def close(self):
"""Closes the log record. This will set the frame and calling
frame to `None` and frame-related information will no longer be
available unless it was pulled in first (:meth:`pull_information`).
This makes a log record safe for pickling and will clean up
memory that might be still referenced by the frames.
"""
for key in self._noned_on_close:
setattr(self, key, None)
self.late = True
def __reduce_ex__(self, protocol):
return _create_log_record, (type(self), self.to_dict())
def to_dict(self, json_safe=False):
"""Exports the log record into a dictionary without the information
that cannot be safely serialized like interpreter frames and
tracebacks.
"""
self.pull_information()
rv = {}
for key, value in iteritems(self.__dict__):
if key[:1] != '_' and key not in self._noned_on_close:
rv[key] = value
# the extra dict is exported as regular dict
rv['extra'] = dict(rv['extra'])
if json_safe:
return to_safe_json(rv)
return rv
@classmethod
def from_dict(cls, d):
"""Creates a log record from an exported dictionary. This also
supports JSON exported dictionaries.
"""
rv = object.__new__(cls)
rv.update_from_dict(d)
return rv
def update_from_dict(self, d):
"""Like the :meth:`from_dict` classmethod, but will update the
instance in place. Helpful for constructors.
"""
self.__dict__.update(d)
for key in self._noned_on_close:
setattr(self, key, None)
self._information_pulled = True
self._channel = None
if isinstance(self.time, string_types):
self.time = parse_iso8601(self.time)
return self
@cached_property
def message(self):
"""The formatted message."""
if not (self.args or self.kwargs):
return self.msg
try:
try:
return self.msg.format(*self.args, **self.kwargs)
except UnicodeDecodeError:
# Assume an unicode message but mixed-up args
msg = self.msg.encode('utf-8', 'replace')
return msg.format(*self.args, **self.kwargs)
except (UnicodeEncodeError, AttributeError):
# we catch AttributeError since if msg is bytes, it won't have the 'format' method
if sys.exc_info()[0] is AttributeError and (PY2 or not isinstance(self.msg, bytes)):
# this is not the case we thought it is...
raise
# Assume encoded message with unicode args.
# The assumption of utf8 as input encoding is just a guess,
# but this codepath is unlikely (if the message is a constant
# string in the caller's source file)
msg = self.msg.decode('utf-8', 'replace')
return msg.format(*self.args, **self.kwargs)
except Exception:
# this obviously will not give a proper error message if the
# information was not pulled and the log record no longer has
# access to the frame. But there is not much we can do about
# that.
e = sys.exc_info()[1]
errormsg = ('Could not format message with provided '
'arguments: {err}\n msg={msg!r}\n '
'args={args!r} \n kwargs={kwargs!r}.\n'
'Happened in file {file}, line {lineno}').format(
err=e, msg=self.msg, args=self.args,
kwargs=self.kwargs, file=self.filename,
lineno=self.lineno
)
if PY2:
errormsg = errormsg.encode('utf-8')
raise TypeError(errormsg)
level_name = level_name_property()
@cached_property
def calling_frame(self):
"""The frame in which the record has been created. This only
exists for as long the log record is not closed.
"""
frm = self.frame
globs = globals()
while frm is not None and frm.f_globals is globs:
frm = frm.f_back
return frm
@cached_property
def func_name(self):
"""The name of the function that triggered the log call if
available. Requires a frame or that :meth:`pull_information`
was called before.
"""
cf = self.calling_frame
if cf is not None:
return cf.f_code.co_name
@cached_property
def module(self):
"""The name of the module that triggered the log call if
available. Requires a frame or that :meth:`pull_information`
was called before.
"""
cf = self.calling_frame
if cf is not None:
return cf.f_globals.get('__name__')
@cached_property
def filename(self):
"""The filename of the module in which the record has been created.
Requires a frame or that :meth:`pull_information` was called before.
"""
cf = self.calling_frame
if cf is not None:
fn = cf.f_code.co_filename
if fn[:1] == '<' and fn[-1:] == '>':
return fn
return _convert_frame_filename(os.path.abspath(fn))
@cached_property
def lineno(self):
"""The line number of the file in which the record has been created.
Requires a frame or that :meth:`pull_information` was called before.
"""
cf = self.calling_frame
if cf is not None:
return cf.f_lineno
@cached_property
def thread(self):
"""The ident of the thread. This is evaluated late and means that
if the log record is passed to another thread, :meth:`pull_information`
was called in the old thread.
"""
return thread.get_ident()
@cached_property
def thread_name(self):
"""The name of the thread. This is evaluated late and means that
if the log record is passed to another thread, :meth:`pull_information`
was called in the old thread.
"""
return threading.currentThread().getName()
@cached_property
def process_name(self):
"""The name of the process in which the record has been created."""
# Errors may occur if multiprocessing has not finished loading
# yet - e.g. if a custom import hook causes third-party code
# to run when multiprocessing calls import. See issue 8200
# for an example
mp = sys.modules.get('multiprocessing')
if mp is not None: # pragma: no cover
try:
return mp.current_process().name
except Exception:
pass
@cached_property
def formatted_exception(self):
"""The formatted exception which caused this record to be created
in case there was any.
"""
if self.exc_info is not None:
rv = ''.join(traceback.format_exception(*self.exc_info))
if PY2:
rv = rv.decode('utf-8', 'replace')
return rv.rstrip()
@cached_property
def exception_name(self):
"""The name of the exception."""
if self.exc_info is not None:
cls = self.exc_info[0]
return u(cls.__module__ + '.' + cls.__name__)
@property
def exception_shortname(self):
"""An abbreviated exception name (no import path)"""
return self.exception_name.rsplit('.')[-1]
@cached_property
def exception_message(self):
"""The message of the exception."""
if self.exc_info is not None:
val = self.exc_info[1]
try:
return u(str(val))
except UnicodeError:
return str(val).decode('utf-8', 'replace')
@property
def dispatcher(self):
"""The dispatcher that created the log record. Might not exist because
a log record does not have to be created from a logger or other
dispatcher to be handled by logbook. If this is set, it will point to
an object that implements the :class:`~logbook.base.RecordDispatcher`
interface.
"""
if self._dispatcher is not None:
return self._dispatcher()
class LoggerMixin(object):
"""This mixin class defines and implements the "usual" logger
interface (i.e. the descriptive logging functions).
Classes using this mixin have to implement a :meth:`!handle` method which
takes a :class:`~logbook.LogRecord` and passes it along.
"""
#: The name of the minimium logging level required for records to be
#: created.
level_name = level_name_property()
def debug(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.DEBUG`.
"""
if not self.disabled and DEBUG >= self.level:
self._log(DEBUG, args, kwargs)
def info(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.INFO`.
"""
if not self.disabled and INFO >= self.level:
self._log(INFO, args, kwargs)
def warn(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.WARNING`. This function has an alias
named :meth:`warning`.
"""
if not self.disabled and WARNING >= self.level:
self._log(WARNING, args, kwargs)
def warning(self, *args, **kwargs):
"""Alias for :meth:`warn`."""
return self.warn(*args, **kwargs)
def notice(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.NOTICE`.
"""
if not self.disabled and NOTICE >= self.level:
self._log(NOTICE, args, kwargs)
def error(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.ERROR`.
"""
if not self.disabled and ERROR >= self.level:
self._log(ERROR, args, kwargs)
def exception(self, *args, **kwargs):
"""Works exactly like :meth:`error` just that the message
is optional and exception information is recorded.
"""
if self.disabled or ERROR < self.level:
return
if not args:
args = ('Uncaught exception occurred',)
if 'exc_info' not in kwargs:
exc_info = sys.exc_info()
assert exc_info[0] is not None, 'no exception occurred'
kwargs.setdefault('exc_info', sys.exc_info())
return self.error(*args, **kwargs)
def critical(self, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to :data:`~logbook.CRITICAL`.
"""
if not self.disabled and CRITICAL >= self.level:
self._log(CRITICAL, args, kwargs)
def log(self, level, *args, **kwargs):
"""Logs a :class:`~logbook.LogRecord` with the level set
to the `level` parameter. Because custom levels are not
supported by logbook, this method is mainly used to avoid
the use of reflection (e.g.: :func:`getattr`) for programmatic
logging.
"""
level = lookup_level(level)
if level >= self.level:
self._log(level, args, kwargs)
def catch_exceptions(self, *args, **kwargs):
"""A context manager that catches exceptions and calls
:meth:`exception` for exceptions caught that way. Example:
.. code-block:: python
with logger.catch_exceptions():
execute_code_that_might_fail()
"""
if not args:
args = ('Uncaught exception occurred',)
return _ExceptionCatcher(self, args, kwargs)
def _log(self, level, args, kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', None)
self.make_record_and_handle(level, args[0], args[1:], kwargs,
exc_info, extra)
class RecordDispatcher(object):
"""A record dispatcher is the internal base class that implements
the logic used by the :class:`~logbook.Logger`.
"""
#: If this is set to `True` the dispatcher information will be suppressed
#: for log records emitted from this logger.
suppress_dispatcher = False
def __init__(self, name=None, level=NOTSET):
#: the name of the record dispatcher
self.name = name
#: list of handlers specific for this record dispatcher
self.handlers = []
#: optionally the name of the group this logger belongs to
self.group = None
#: the level of the record dispatcher as integer
self.level = level
disabled = group_reflected_property('disabled', False)
level = group_reflected_property('level', NOTSET, fallback=NOTSET)
def handle(self, record):
"""Call the handlers for the specified record. This is
invoked automatically when a record should be handled.
The default implementation checks if the dispatcher is disabled
and if the record level is greater than the level of the
record dispatcher. In that case it will call the handlers
(:meth:`call_handlers`).
"""
if not self.disabled and record.level >= self.level:
self.call_handlers(record)
def make_record_and_handle(self, level, msg, args, kwargs, exc_info,
extra):
"""Creates a record from some given arguments and heads it
over to the handling system.
"""
# The channel information can be useful for some use cases which is
# why we keep it on there. The log record however internally will
# only store a weak reference to the channel, so it might disappear
# from one instruction to the other. It will also disappear when
# a log record is transmitted to another process etc.
channel = None
if not self.suppress_dispatcher:
channel = self
record = LogRecord(self.name, level, msg, args, kwargs, exc_info,
extra, None, channel)
# after handling the log record is closed which will remove some
# referenes that would require a GC run on cpython. This includes
# the current stack frame, exception information. However there are
# some use cases in keeping the records open for a little longer.
# For example the test handler keeps log records open until the
# test handler is closed to allow assertions based on stack frames
# and exception information.
try:
self.handle(record)
finally:
record.late = True
if not record.keep_open:
record.close()
def call_handlers(self, record):
"""Pass a record to all relevant handlers in the following
order:
- per-dispatcher handlers are handled first
- afterwards all the current context handlers in the
order they were pushed
Before the first handler is invoked, the record is processed
(:meth:`process_record`).
"""
# for performance reasons records are only heavy initialized
# and processed if at least one of the handlers has a higher
# level than the record and that handler is not a black hole.
record_initialized = False
# Both logger attached handlers as well as context specific
# handlers are handled one after another. The latter also
# include global handlers.
for handler in chain(self.handlers,
Handler.stack_manager.iter_context_objects()):
# skip records that this handler is not interested in based
# on the record and handler level or in case this method was
# overridden on some custom logic.
if not handler.should_handle(record):
continue
# if this is a blackhole handler, don't even try to
# do further processing, stop right away. Technically
# speaking this is not 100% correct because if the handler
# is bubbling we shouldn't apply this logic, but then we
# won't enter this branch anyways. The result is that a
# bubbling blackhole handler will never have this shortcut
# applied and do the heavy init at one point. This is fine
# however because a bubbling blackhole handler is not very
# useful in general.
if handler.blackhole:
break
# we are about to handle the record. If it was not yet
# processed by context-specific record processors we
# have to do that now and remeber that we processed
# the record already.
if not record_initialized:
record.heavy_init()
self.process_record(record)
record_initialized = True
# a filter can still veto the handling of the record. This
# however is already operating on an initialized and processed
# record. The impact is that filters are slower than the
# handler's should_handle function in case there is no default
# handler that would handle the record (delayed init).
if handler.filter is not None \
and not handler.filter(record, handler):
continue
# handle the record. If the record was handled and
# the record is not bubbling we can abort now.
if handler.handle(record) and not handler.bubble:
break
def process_record(self, record):
"""Processes the record with all context specific processors. This
can be overriden to also inject additional information as necessary
that can be provided by this record dispatcher.
"""
if self.group is not None:
self.group.process_record(record)
for processor in Processor.stack_manager.iter_context_objects():
processor.process(record)
class Logger(RecordDispatcher, LoggerMixin):
"""Instances of the Logger class represent a single logging channel.
A "logging channel" indicates an area of an application. Exactly
how an "area" is defined is up to the application developer.
Names used by logbook should be descriptive and are intended for user
display, not for filtering. Filtering should happen based on the
context information instead.
A logger internally is a subclass of a
:class:`~logbook.base.RecordDispatcher` that implements the actual
logic. If you want to implement a custom logger class, have a look
at the interface of that class as well.
"""
class LoggerGroup(object):
"""A LoggerGroup represents a group of loggers. It cannot emit log
messages on its own but it can be used to set the disabled flag and
log level of all loggers in the group.
Furthermore the :meth:`process_record` method of the group is called
by any logger in the group which by default calls into the
:attr:`processor` callback function.
"""
def __init__(self, loggers=None, level=NOTSET, processor=None):
#: a list of all loggers on the logger group. Use the
#: :meth:`add_logger` and :meth:`remove_logger` methods to add
#: or remove loggers from this list.
self.loggers = []
if loggers is not None:
for logger in loggers:
self.add_logger(logger)
#: the level of the group. This is reflected to the loggers
#: in the group unless they overrode the setting.
self.level = lookup_level(level)
#: the disabled flag for all loggers in the group, unless
#: the loggers overrode the setting.
self.disabled = False
#: an optional callback function that is executed to process
#: the log records of all loggers in the group.
self.processor = processor
def add_logger(self, logger):
"""Adds a logger to this group."""
assert logger.group is None, 'Logger already belongs to a group'
logger.group = self
self.loggers.append(logger)
def remove_logger(self, logger):
"""Removes a logger from the group."""
self.loggers.remove(logger)
logger.group = None
def process_record(self, record):
"""Like :meth:`Logger.process_record` but for all loggers in
the group. By default this calls into the :attr:`processor`
function is it's not `None`.
"""
if self.processor is not None:
self.processor(record)
_default_dispatcher = RecordDispatcher()
def dispatch_record(record):
"""Passes a record on to the handlers on the stack. This is useful when
log records are created programmatically and already have all the
information attached and should be dispatched independent of a logger.
"""
_default_dispatcher.call_handlers(record)
# at that point we are save to import handler
from logbook.handlers import Handler
|
Rafiot/logbook
|
logbook/base.py
|
Python
|
bsd-3-clause
| 35,527
|
from __future__ import unicode_literals
import datetime
import warnings
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.utils import (
display_for_field, display_for_value, label_for_field, lookup_field,
)
from django.contrib.admin.views.main import (
ALL_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR,
)
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.template import Library
from django.template.loader import get_template
from django.templatetags.static import static
from django.urls import NoReverseMatch
from django.utils import formats
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
register = Library()
DOT = '.'
@register.simple_tag
def paginator_number(cl, i):
"""
Generates an individual page index link in a paginated list.
"""
if i == DOT:
return '... '
elif i == cl.page_num:
return format_html('<span class="this-page">{}</span> ', i + 1)
else:
return format_html('<a href="{}"{}>{}</a> ',
cl.get_query_string({PAGE_VAR: i}),
mark_safe(' class="end"' if i == cl.paginator.num_pages - 1 else ''),
i + 1)
@register.inclusion_tag('admin/pagination.html')
def pagination(cl):
"""
Generates the series of links to the pages in a paginated list.
"""
paginator, page_num = cl.paginator, cl.page_num
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
if not pagination_required:
page_range = []
else:
ON_EACH_SIDE = 3
ON_ENDS = 2
# If there are 10 or fewer pages, display links to every page.
# Otherwise, do some fancy
if paginator.num_pages <= 10:
page_range = range(paginator.num_pages)
else:
# Insert "smart" pagination links, so that there are always ON_ENDS
# links at either end of the list of pages, and there are always
# ON_EACH_SIDE links at either end of the "current page" link.
page_range = []
if page_num > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(0, ON_ENDS))
page_range.append(DOT)
page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1))
else:
page_range.extend(range(0, page_num + 1))
if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages))
else:
page_range.extend(range(page_num + 1, paginator.num_pages))
need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page
return {
'cl': cl,
'pagination_required': pagination_required,
'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}),
'page_range': page_range,
'ALL_VAR': ALL_VAR,
'1': 1,
}
def result_headers(cl):
"""
Generates the list column headers.
"""
ordering_field_columns = cl.get_ordering_field_columns()
for i, field_name in enumerate(cl.list_display):
text, attr = label_for_field(
field_name, cl.model,
model_admin=cl.model_admin,
return_attr=True
)
if attr:
field_name = _coerce_field_name(field_name, i)
# Potentially not sortable
# if the field is the action checkbox: no sorting and special class
if field_name == 'action_checkbox':
yield {
"text": text,
"class_attrib": mark_safe(' class="action-checkbox-column"'),
"sortable": False,
}
continue
admin_order_field = getattr(attr, "admin_order_field", None)
if not admin_order_field:
# Not sortable
yield {
"text": text,
"class_attrib": format_html(' class="column-{}"', field_name),
"sortable": False,
}
continue
# OK, it is sortable if we got this far
th_classes = ['sortable', 'column-{}'.format(field_name)]
order_type = ''
new_order_type = 'asc'
sort_priority = 0
sorted = False
# Is it currently being sorted on?
if i in ordering_field_columns:
sorted = True
order_type = ordering_field_columns.get(i).lower()
sort_priority = list(ordering_field_columns).index(i) + 1
th_classes.append('sorted %sending' % order_type)
new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type]
# build new ordering param
o_list_primary = [] # URL for making this field the primary sort
o_list_remove = [] # URL for removing this field from sort
o_list_toggle = [] # URL for toggling order type for this field
def make_qs_param(t, n):
return ('-' if t == 'desc' else '') + str(n)
for j, ot in ordering_field_columns.items():
if j == i: # Same column
param = make_qs_param(new_order_type, j)
# We want clicking on this header to bring the ordering to the
# front
o_list_primary.insert(0, param)
o_list_toggle.append(param)
# o_list_remove - omit
else:
param = make_qs_param(ot, j)
o_list_primary.append(param)
o_list_toggle.append(param)
o_list_remove.append(param)
if i not in ordering_field_columns:
o_list_primary.insert(0, make_qs_param(new_order_type, i))
yield {
"text": text,
"sortable": True,
"sorted": sorted,
"ascending": order_type == "asc",
"sort_priority": sort_priority,
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
"class_attrib": format_html(' class="{}"', ' '.join(th_classes)) if th_classes else '',
}
def _boolean_icon(field_val):
icon_url = static('admin/img/icon-%s.svg' %
{True: 'yes', False: 'no', None: 'unknown'}[field_val])
return format_html('<img src="{}" alt="{}" />', icon_url, field_val)
def _coerce_field_name(field_name, field_index):
"""
Coerce a field_name (which may be a callable) to a string.
"""
if callable(field_name):
if field_name.__name__ == '<lambda>':
return 'lambda' + str(field_index)
else:
return field_name.__name__
return field_name
def items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
def link_in_col(is_first, field_name, cl):
if cl.list_display_links is None:
return False
if is_first and not cl.list_display_links:
return True
return field_name in cl.list_display_links
first = True
pk = cl.lookup_opts.pk.attname
for field_index, field_name in enumerate(cl.list_display):
empty_value_display = cl.model_admin.get_empty_value_display()
row_classes = ['field-%s' % _coerce_field_name(field_name, field_index)]
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except ObjectDoesNotExist:
result_repr = empty_value_display
else:
empty_value_display = getattr(attr, 'empty_value_display', empty_value_display)
if f is None or f.auto_created:
if field_name == 'action_checkbox':
row_classes = ['action-checkbox']
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
result_repr = display_for_value(value, empty_value_display, boolean)
if allow_tags:
warnings.warn(
"Deprecated allow_tags attribute used on field {}. "
"Use django.utils.html.format_html(), format_html_join(), "
"or django.utils.safestring.mark_safe() instead.".format(field_name),
RemovedInDjango20Warning
)
result_repr = mark_safe(result_repr)
if isinstance(value, (datetime.date, datetime.time)):
row_classes.append('nowrap')
else:
if isinstance(f.remote_field, models.ManyToOneRel):
field_val = getattr(result, f.name)
if field_val is None:
result_repr = empty_value_display
else:
result_repr = field_val
else:
result_repr = display_for_field(value, f, empty_value_display)
if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)):
row_classes.append('nowrap')
if force_text(result_repr) == '':
result_repr = mark_safe(' ')
row_class = mark_safe(' class="%s"' % ' '.join(row_classes))
# If list_display_links not defined, add the link tag to the first field
if link_in_col(first, field_name, cl):
table_tag = 'th' if first else 'td'
first = False
# Display link to the result's change_view if the url exists, else
# display just the result's representation.
try:
url = cl.url_for_result(result)
except NoReverseMatch:
link_or_text = result_repr
else:
url = add_preserved_filters({'preserved_filters': cl.preserved_filters, 'opts': cl.opts}, url)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
link_or_text = format_html(
'<a href="{}"{}>{}</a>',
url,
format_html(
' data-popup-opener="{}"', value
) if cl.is_popup else '',
result_repr)
yield format_html('<{}{}>{}</{}>',
table_tag,
row_class,
link_or_text,
table_tag)
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if (form and field_name in form.fields and not (
field_name == cl.model._meta.pk.name and
form[cl.model._meta.pk.name].is_hidden)):
bf = form[field_name]
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
yield format_html('<td{}>{}</td>', row_class, result_repr)
if form and not form[cl.model._meta.pk.name].is_hidden:
yield format_html('<td>{}</td>', force_text(form[cl.model._meta.pk.name]))
class ResultList(list):
# Wrapper class used to return items in a list_editable
# changelist, annotated with the form object for error
# reporting purposes. Needed to maintain backwards
# compatibility with existing admin templates.
def __init__(self, form, *items):
self.form = form
super(ResultList, self).__init__(*items)
def results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield ResultList(form, items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield ResultList(None, items_for_result(cl, res, None))
def result_hidden_fields(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
if form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(force_text(form[cl.model._meta.pk.name]))
@register.inclusion_tag("admin/change_list_results.html")
def result_list(cl):
"""
Displays the headers and data list together
"""
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
return {'cl': cl,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl))}
@register.inclusion_tag('admin/date_hierarchy.html')
def date_hierarchy(cl):
"""
Displays the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
field = cl.opts.get_field(field_name)
dates_or_datetimes = 'datetimes' if isinstance(field, models.DateTimeField) else 'dates'
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
def link(filters):
return cl.get_query_string(filters, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.queryset.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = cl.queryset.filter(**{year_field: year_lookup, month_field: month_lookup})
days = getattr(days, dates_or_datetimes)(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = cl.queryset.filter(**{year_field: year_lookup})
months = getattr(months, dates_or_datetimes)(field_name, 'month')
return {
'show': True,
'back': {
'link': link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = getattr(cl.queryset, dates_or_datetimes)(field_name, 'year')
return {
'show': True,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.inclusion_tag('admin/search_form.html')
def search_form(cl):
"""
Displays a search form for searching the list.
"""
return {
'cl': cl,
'show_result_count': cl.result_count != cl.full_result_count,
'search_var': SEARCH_VAR
}
@register.simple_tag
def admin_list_filter(cl, spec):
tpl = get_template(spec.template)
return tpl.render({
'title': spec.title,
'choices': list(spec.choices(cl)),
'spec': spec,
})
@register.inclusion_tag('admin/actions.html', takes_context=True)
def admin_actions(context):
"""
Track the number of times the action field has been rendered on the page,
so we know which value to use.
"""
context['action_index'] = context.get('action_index', -1) + 1
return context
|
KrzysztofStachanczyk/Sensors-WWW-website
|
www/env/lib/python2.7/site-packages/django/contrib/admin/templatetags/admin_list.py
|
Python
|
gpl-3.0
| 17,797
|
# Copyright (C) 2014 Johnny Vestergaard <jkv@unixcluster.dk>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
from . import messages
import copy
from lxml import etree
from .register import KamstrupRegister
logger = logging.getLogger(__name__)
class CommandResponder(object):
def __init__(self, template):
# key: kamstrup_meter register, value: databus key
self.registers = {}
dom = etree.parse(template)
registers = dom.xpath("//kamstrup_meter/registers/*")
self.communication_address = int(
dom.xpath("//kamstrup_meter/config/communication_address/text()")[0]
)
for register in registers:
name = int(register.attrib["name"])
length = int(register.attrib["length"])
units = int(register.attrib["units"])
unknown = int(register.attrib["unknown"])
databuskey = register.xpath("./value/text()")[0]
kamstrup_register = KamstrupRegister(
name, units, length, unknown, databuskey
)
assert name not in self.registers
self.registers[name] = kamstrup_register
def respond(self, request):
if request.communication_address != self.communication_address:
logger.warning(
"Kamstrup request received with wrong communication address, got {} but expected {}.".format(
request.communication_address, self.communication_address
)
)
return None
elif isinstance(request, messages.KamstrupRequestGetRegisters):
response = messages.KamstrupResponseRegister(self.communication_address)
for register in request.registers:
if register in self.registers:
response.add_register(copy.deepcopy(self.registers[register]))
return response
else:
assert False
|
mushorg/conpot
|
conpot/protocols/kamstrup_meter/command_responder.py
|
Python
|
gpl-2.0
| 2,610
|
#
# This file defines global variables that will always be
# available in a view context without having to repeatedly
# include it. For this to work, this file is included in
# the settings file, in the TEMPLATE_CONTEXT_PROCESSORS
# tuple.
#
from django.conf import settings
from evennia.utils.utils import get_evennia_version
# Determine the site name and server version
try:
GAME_NAME = settings.SERVERNAME.strip()
except AttributeError:
GAME_NAME = "Evennia"
SERVER_VERSION = get_evennia_version()
# Setup lists of the most relevant apps so
# the adminsite becomes more readable.
PLAYER_RELATED = ['Players']
GAME_ENTITIES = ['Objects', 'Scripts', 'Comms', 'Help']
GAME_SETUP = ['Permissions', 'Config']
CONNECTIONS = ['Irc', 'Imc2']
WEBSITE = ['Flatpages', 'News', 'Sites']
# The main context processor function
WEBCLIENT_ENABLED = settings.WEBCLIENT_ENABLED
WEBSOCKET_CLIENT_ENABLED = settings.WEBSOCKET_CLIENT_ENABLED
WEBSOCKET_PORT = settings.WEBSOCKET_CLIENT_PORT
WEBSOCKET_URL = settings.WEBSOCKET_CLIENT_URL
def general_context(request):
"""
Returns common Evennia-related context stuff, which
is automatically added to context of all views.
"""
return {
'game_name': GAME_NAME,
'game_slogan': SERVER_VERSION,
'evennia_userapps': PLAYER_RELATED,
'evennia_entityapps': GAME_ENTITIES,
'evennia_setupapps': GAME_SETUP,
'evennia_connectapps': CONNECTIONS,
'evennia_websiteapps':WEBSITE,
"webclient_enabled" : WEBCLIENT_ENABLED,
"websocket_enabled" : WEBSOCKET_CLIENT_ENABLED,
"websocket_port" : WEBSOCKET_PORT,
"websocket_url" : WEBSOCKET_URL
}
|
shollen/evennia
|
evennia/web/utils/general_context.py
|
Python
|
bsd-3-clause
| 1,680
|
#!/usr/bin/env python
import os
import sys
def whereis_python():
import json
print('sys.path =', json.dumps(sys.path, indent=4))
print(sys.executable)
if __name__ == "__main__":
#whereis_python() # debug
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "maio.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
jonmsawyer/maio
|
manage.py
|
Python
|
mit
| 954
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-10 12:18
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('study_material', '0002_auto_20170510_1601'),
]
operations = [
migrations.AddField(
model_name='studymaterial',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
dhavalmanjaria/dma-student-information-system
|
study_material/migrations/0003_studymaterial_user.py
|
Python
|
gpl-2.0
| 665
|
'''
@author: Victor Barrera
Description: This scripts makes use of the siq module to obtain the methylation value for each CpG island region.
'''
import sys
import pysam
import re
from siq import *
# Obtain the CpG island sequence file,
# the sam-file and the minimum read filter
cpgi_sec_path=sys.argv[1]
sam_path=sys.argv[2]
filter=int(sys.argv[3])
cpgi_sec_file=open(cpgi_sec_path,'r')
samfile = pysam.Samfile(sam_path, "rb" )
for cpgi in cpgi_sec_file:
# For each CpG island structural data is obtained and the relative
# positions for the CG dinucleotide are obtained.
id=cpgi.split()[0]
chr=str(cpgi.split()[1])
startPosition=int(cpgi.split()[2])
endPosition=int(cpgi.split()[3])
cpgisec=(cpgi.split()[4])
cpgisec=cpgisec.upper();
starts = [match.start() for match in re.finditer('CG',cpgisec)]
CG_coord=[]
nCG=0
for i in starts:
# The absolute positions for the CG dinucleotide are obtain.
c_pos=str(chr)+"\t"+str(int(i)+startPosition-1)
g_pos=str(chr)+"\t"+str(int(i)+startPosition)
CG_coord.append(c_pos)
CG_coord.append(g_pos)
nCG+=1
# An object of the class Meth_region is generated.
cgi=Meth_region(CG_coord,samfile)
print str(id)+"\t"+str(nCG)+"\t"+str(chr)+"\t"+str(startPosition)+"\t"+str(endPosition)+"\t",
# A call to obtain the mean and the standard deviation is done.
print "%i\t%.2f\t%.2f\t%i" %(cgi.methcoef_sd(filter))
|
vbarrera/thesis
|
Genomic_Evaluation_of_individual_CpG_Methylation/Python/CpGMethStatus.py
|
Python
|
gpl-2.0
| 1,440
|
from __future__ import absolute_import, print_function
from pony.py23compat import PY2, imap, basestring, unicode
import re, os.path, sys, inspect, types, warnings
from datetime import datetime
from itertools import count as _count
from inspect import isfunction
from time import strptime
from collections import defaultdict
from functools import update_wrapper
from xml.etree import cElementTree
import pony
from pony import options
from pony.thirdparty.compiler import ast
from pony.thirdparty.decorator import decorator as _decorator
if pony.MODE.startswith('GAE-'): localbase = object
else: from threading import local as localbase
class PonyDeprecationWarning(DeprecationWarning):
pass
def deprecated(stacklevel, message):
warnings.warn(message, PonyDeprecationWarning, stacklevel)
warnings.simplefilter('once', PonyDeprecationWarning)
def _improved_decorator(caller, func):
if isfunction(func):
return _decorator(caller, func)
def pony_wrapper(*args, **kwargs):
return caller(func, *args, **kwargs)
return pony_wrapper
def decorator(caller, func=None):
if func is not None:
return _improved_decorator(caller, func)
def new_decorator(func):
return _improved_decorator(caller, func)
if isfunction(caller):
update_wrapper(new_decorator, caller)
return new_decorator
def decorator_with_params(dec):
def parameterized_decorator(*args, **kwargs):
if len(args) == 1 and isfunction(args[0]) and not kwargs:
return decorator(dec(), args[0])
return decorator(dec(*args, **kwargs))
return parameterized_decorator
@decorator
def cut_traceback(func, *args, **kwargs):
if not (pony.MODE == 'INTERACTIVE' and options.CUT_TRACEBACK):
return func(*args, **kwargs)
try: return func(*args, **kwargs)
except AssertionError: raise
except Exception:
exc_type, exc, tb = sys.exc_info()
last_pony_tb = None
try:
while tb.tb_next:
module_name = tb.tb_frame.f_globals['__name__']
if module_name == 'pony' or (module_name is not None # may be None during import
and module_name.startswith('pony.')):
last_pony_tb = tb
tb = tb.tb_next
if last_pony_tb is None: raise
if tb.tb_frame.f_globals.get('__name__') == 'pony.utils' and tb.tb_frame.f_code.co_name == 'throw':
reraise(exc_type, exc, last_pony_tb)
raise exc # Set "pony.options.CUT_TRACEBACK = False" to see full traceback
finally:
del exc, tb, last_pony_tb
if PY2:
exec('''def reraise(exc_type, exc, tb):
try: raise exc_type, exc, tb
finally: del tb''')
else:
def reraise(exc_type, exc, tb):
try: raise exc.with_traceback(tb)
finally: del exc, tb
def throw(exc_type, *args, **kwargs):
if isinstance(exc_type, Exception):
assert not args and not kwargs
exc = exc_type
else: exc = exc_type(*args, **kwargs)
exc.__cause__ = None
try:
if not (pony.MODE == 'INTERACTIVE' and options.CUT_TRACEBACK):
raise exc
else:
raise exc # Set "pony.options.CUT_TRACEBACK = False" to see full traceback
finally: del exc
def truncate_repr(s, max_len=100):
s = repr(s)
return s if len(s) <= max_len else s[:max_len-3] + '...'
lambda_args_cache = {}
def get_lambda_args(func):
names = lambda_args_cache.get(func)
if names is not None: return names
if type(func) is types.FunctionType:
if hasattr(inspect, 'signature'):
names, argsname, kwname, defaults = [], None, None, None
for p in inspect.signature(func).parameters.values():
if p.default is not p.empty:
defaults.append(p.default)
if p.kind == p.POSITIONAL_OR_KEYWORD:
names.append(p.name)
elif p.kind == p.VAR_POSITIONAL:
argsname = p.name
elif p.kind == p.VAR_KEYWORD:
kwname = p.name
elif p.kind == p.POSITIONAL_ONLY:
throw(TypeError, 'Positional-only arguments like %s are not supported' % p.name)
elif p.kind == p.KEYWORD_ONLY:
throw(TypeError, 'Keyword-only arguments like %s are not supported' % p.name)
else: assert False
else:
names, argsname, kwname, defaults = inspect.getargspec(func)
elif isinstance(func, ast.Lambda):
names = func.argnames
if func.kwargs: names, kwname = names[:-1], names[-1]
else: kwname = None
if func.varargs: names, argsname = names[:-1], names[-1]
else: argsname = None
defaults = func.defaults
else: assert False # pragma: no cover
if argsname: throw(TypeError, '*%s is not supported' % argsname)
if kwname: throw(TypeError, '**%s is not supported' % kwname)
if defaults: throw(TypeError, 'Defaults are not supported')
lambda_args_cache[func] = names
return names
def error_method(*args, **kwargs):
raise TypeError()
_ident_re = re.compile(r'^[A-Za-z_]\w*\Z')
# is_ident = ident_re.match
def is_ident(string):
'is_ident(string) -> bool'
return bool(_ident_re.match(string))
_name_parts_re = re.compile(r'''
[A-Z][A-Z0-9]+(?![a-z]) # ACRONYM
| [A-Z][a-z]* # Capitalized or single capital
| [a-z]+ # all-lowercase
| [0-9]+ # numbers
| _+ # underscores
''', re.VERBOSE)
def split_name(name):
"split_name('Some_FUNNYName') -> ['Some', 'FUNNY', 'Name']"
if not _ident_re.match(name):
raise ValueError('Name is not correct Python identifier')
list = _name_parts_re.findall(name)
if not (list[0].strip('_') and list[-1].strip('_')):
raise ValueError('Name must not starting or ending with underscores')
return [ s for s in list if s.strip('_') ]
def uppercase_name(name):
"uppercase_name('Some_FUNNYName') -> 'SOME_FUNNY_NAME'"
return '_'.join(s.upper() for s in split_name(name))
def lowercase_name(name):
"uppercase_name('Some_FUNNYName') -> 'some_funny_name'"
return '_'.join(s.lower() for s in split_name(name))
def camelcase_name(name):
"uppercase_name('Some_FUNNYName') -> 'SomeFunnyName'"
return ''.join(s.capitalize() for s in split_name(name))
def mixedcase_name(name):
"mixedcase_name('Some_FUNNYName') -> 'someFunnyName'"
list = split_name(name)
return list[0].lower() + ''.join(s.capitalize() for s in list[1:])
def import_module(name):
"import_module('a.b.c') -> <module a.b.c>"
mod = sys.modules.get(name)
if mod is not None: return mod
mod = __import__(name)
components = name.split('.')
for comp in components[1:]: mod = getattr(mod, comp)
return mod
if sys.platform == 'win32':
_absolute_re = re.compile(r'^(?:[A-Za-z]:)?[\\/]')
else: _absolute_re = re.compile(r'^/')
def is_absolute_path(filename):
return bool(_absolute_re.match(filename))
def absolutize_path(filename, frame_depth):
if is_absolute_path(filename): return filename
code_filename = sys._getframe(frame_depth+1).f_code.co_filename
if not is_absolute_path(code_filename):
if code_filename.startswith('<') and code_filename.endswith('>'):
if pony.MODE == 'INTERACTIVE': raise ValueError(
'When in interactive mode, please provide absolute file path. Got: %r' % filename)
raise EnvironmentError('Unexpected module filename, which is not absolute file path: %r' % code_filename)
code_path = os.path.dirname(code_filename)
return os.path.join(code_path, filename)
def current_timestamp():
return datetime2timestamp(datetime.now())
def datetime2timestamp(d):
result = d.isoformat(' ')
if len(result) == 19: return result + '.000000'
return result
def timestamp2datetime(t):
time_tuple = strptime(t[:19], '%Y-%m-%d %H:%M:%S')
microseconds = int((t[20:26] + '000000')[:6])
return datetime(*(time_tuple[:6] + (microseconds,)))
expr1_re = re.compile(r'''
([A-Za-z_]\w*) # identifier (group 1)
| ([(]) # open parenthesis (group 2)
''', re.VERBOSE)
expr2_re = re.compile(r'''
\s*(?:
(;) # semicolon (group 1)
| (\.\s*[A-Za-z_]\w*) # dot + identifier (group 2)
| ([([]) # open parenthesis or braces (group 3)
)
''', re.VERBOSE)
expr3_re = re.compile(r"""
[()[\]] # parenthesis or braces (group 1)
| '''(?:[^\\]|\\.)*?''' # '''triple-quoted string'''
| \"""(?:[^\\]|\\.)*?\""" # \"""triple-quoted string\"""
| '(?:[^'\\]|\\.)*?' # 'string'
| "(?:[^"\\]|\\.)*?" # "string"
""", re.VERBOSE)
def parse_expr(s, pos=0):
z = 0
match = expr1_re.match(s, pos)
if match is None: raise ValueError()
start = pos
i = match.lastindex
if i == 1: pos = match.end() # identifier
elif i == 2: z = 2 # "("
else: assert False # pragma: no cover
while True:
match = expr2_re.match(s, pos)
if match is None: return s[start:pos], z==1
pos = match.end()
i = match.lastindex
if i == 1: return s[start:pos], False # ";" - explicit end of expression
elif i == 2: z = 2 # .identifier
elif i == 3: # "(" or "["
pos = match.end()
counter = 1
open = match.group(i)
if open == '(': close = ')'
elif open == '[': close = ']'; z = 2
else: assert False # pragma: no cover
while True:
match = expr3_re.search(s, pos)
if match is None: raise ValueError()
pos = match.end()
x = match.group()
if x == open: counter += 1
elif x == close:
counter -= 1
if not counter: z += 1; break
else: assert False # pragma: no cover
def tostring(x):
if isinstance(x, basestring): return x
if hasattr(x, '__unicode__'):
try: return unicode(x)
except: pass
if hasattr(x, 'makeelement'): return cElementTree.tostring(x)
try: return str(x)
except: pass
try: return repr(x)
except: pass
if type(x) == types.InstanceType: return '<%s instance at 0x%X>' % (x.__class__.__name__)
return '<%s object at 0x%X>' % (x.__class__.__name__)
def strjoin(sep, strings, source_encoding='ascii', dest_encoding=None):
"Can join mix of unicode and byte strings in different encodings"
strings = list(strings)
try: return sep.join(strings)
except UnicodeDecodeError: pass
for i, s in enumerate(strings):
if isinstance(s, str):
strings[i] = s.decode(source_encoding, 'replace').replace(u'\ufffd', '?')
result = sep.join(strings)
if dest_encoding is None: return result
return result.encode(dest_encoding, 'replace')
def count(*args, **kwargs):
if kwargs: return _count(*args, **kwargs)
if len(args) != 1: return _count(*args)
arg = args[0]
if hasattr(arg, 'count'): return arg.count()
try: it = iter(arg)
except TypeError: return _count(arg)
return len(set(it))
def avg(iter):
count = 0
sum = 0.0
for elem in iter:
if elem is None: continue
sum += elem
count += 1
if not count: return None
return sum / count
def distinct(iter):
d = defaultdict(int)
for item in iter:
d[item] = d[item] + 1
return d
def concat(*args):
return ''.join(tostring(arg) for arg in args)
def is_utf8(encoding):
return encoding.upper().replace('_', '').replace('-', '') in ('UTF8', 'UTF', 'U8')
|
Ahmad31/Web_Flask_Cassandra
|
flask/lib/python2.7/site-packages/pony/utils/utils.py
|
Python
|
apache-2.0
| 12,252
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""This file builds and installs the NuPIC Core Python bindings."""
import glob
import os
import shutil
import subprocess
import sys
import tempfile
from setuptools import Command, find_packages, setup
from setuptools.command.test import test as BaseTestCommand
from distutils.core import Extension
PY_BINDINGS = os.path.dirname(os.path.realpath(__file__))
REPO_DIR = os.path.abspath(os.path.join(PY_BINDINGS, os.pardir, os.pardir))
DARWIN_PLATFORM = "darwin"
LINUX_PLATFORM = "linux"
UNIX_PLATFORMS = [LINUX_PLATFORM, DARWIN_PLATFORM]
WINDOWS_PLATFORMS = ["windows"]
def getVersion():
"""
Get version from local file.
"""
with open(os.path.join(REPO_DIR, "VERSION"), "r") as versionFile:
return versionFile.read().strip()
class CleanCommand(Command):
"""Command for cleaning up intermediate build files."""
description = "Command for cleaning up generated extension files."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
platform = getPlatformInfo()
files = getExtensionFileNames(platform)
for f in files:
try:
os.remove(f)
except OSError:
pass
def fixPath(path):
"""
Ensures paths are correct for linux and windows
"""
path = os.path.abspath(os.path.expanduser(path))
if path.startswith("\\"):
return "C:" + path
return path
def findRequirements(platform):
"""
Read the requirements.txt file and parse into requirements for setup's
install_requirements option.
"""
includePycapnp = platform not in WINDOWS_PLATFORMS
requirementsPath = fixPath(os.path.join(PY_BINDINGS, "requirements.txt"))
return [
line.strip()
for line in open(requirementsPath).readlines()
if not line.startswith("#") and (not line.startswith("pycapnp") or includePycapnp)
]
class TestCommand(BaseTestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
def initialize_options(self):
BaseTestCommand.initialize_options(self)
self.pytest_args = [] # pylint: disable=W0201
def finalize_options(self):
BaseTestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
cwd = os.getcwd()
try:
os.chdir("tests")
errno = pytest.main(self.pytest_args)
finally:
os.chdir(cwd)
sys.exit(errno)
def getPlatformInfo():
"""Identify platform."""
if "linux" in sys.platform:
platform = "linux"
elif "darwin" in sys.platform:
platform = "darwin"
# win32
elif sys.platform.startswith("win"):
platform = "windows"
else:
raise Exception("Platform '%s' is unsupported!" % sys.platform)
return platform
def getExtensionFileNames(platform):
if platform in WINDOWS_PLATFORMS:
libExtension = "pyd"
else:
libExtension = "so"
libNames = ("algorithms", "engine_internal", "math")
swigPythonFiles = ["{}.py".format(name) for name in libNames]
swigLibFiles = ["_{}.{}".format(name, libExtension) for name in libNames]
files = [os.path.join(PY_BINDINGS, "src", "nupic", "bindings", name)
for name in list(swigPythonFiles + swigLibFiles)]
return files
def getExtensionFiles(platform):
files = getExtensionFileNames(platform)
for f in files:
if not os.path.exists(f):
generateExtensions()
break
return files
def generateExtensions():
tmpDir = tempfile.mkdtemp()
cwd = os.getcwd()
try:
scriptsDir = os.path.join(tmpDir, "scripts")
releaseDir = os.path.join(tmpDir, "release")
pyExtensionsDir = os.path.join(PY_BINDINGS, "src", "nupic", "bindings")
os.mkdir(scriptsDir)
os.chdir(scriptsDir)
subprocess.check_call(
["cmake", REPO_DIR, "-DCMAKE_INSTALL_PREFIX={}".format(releaseDir),
"-DPY_EXTENSIONS_DIR={}".format(pyExtensionsDir)])
subprocess.check_call(["make", "-j3"])
subprocess.check_call(["make", "install"])
finally:
shutil.rmtree(tmpDir, ignore_errors=True)
os.chdir(cwd)
if __name__ == "__main__":
platform = getPlatformInfo()
if platform == DARWIN_PLATFORM and not "ARCHFLAGS" in os.environ:
raise Exception("To build NuPIC Core bindings in OS X, you must "
"`export ARCHFLAGS=\"-arch x86_64\"`.")
# Run CMake if extension files are missing.
getExtensionFiles(platform)
# Copy the proto files into the proto Python package.
destDir = os.path.relpath(os.path.join("src", "nupic", "proto"))
for protoPath in glob.glob(os.path.relpath(os.path.join(
"..", "..", "src", "nupic", "proto", "*.capnp"))):
shutil.copy(protoPath, destDir)
print "\nSetup SWIG Python module"
setup(
name="nupic.bindings",
version=getVersion(),
# This distribution contains platform-specific C++ libraries, but they are not
# built with distutils. So we must create a dummy Extension object so when we
# create a binary file it knows to make it platform-specific.
ext_modules=[Extension('nupic.dummy', sources = ['dummy.c'])],
package_dir = {"": "src"},
packages=find_packages("src"),
namespace_packages=["nupic"],
install_requires=findRequirements(platform),
package_data={
"nupic.proto": ["*.capnp"],
"nupic.bindings": ["*.so", "*.pyd"],
"nupic.bindings.tools": ["*.capnp"],
},
extras_require = {"capnp": ["pycapnp==0.5.8"]},
zip_safe=False,
cmdclass={
"clean": CleanCommand,
"test": TestCommand,
},
description="Numenta Platform for Intelligent Computing - bindings",
author="Numenta",
author_email="help@numenta.org",
url="https://github.com/numenta/nupic.core",
long_description = "Python bindings for nupic core.",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
# It has to be "5 - Production/Stable" or else pypi rejects it!
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts": [
"nupic-bindings-check = nupic.bindings.check:checkMain",
],
},
)
|
rhyolight/nupic.core
|
bindings/py/setup.py
|
Python
|
agpl-3.0
| 7,451
|
import unittest, random, sys, time, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e
def write_syn_dataset(csvPathname, rowCount, SEED):
# 8 random generatators, 1 per column
r1 = random.Random(SEED)
r2 = random.Random(SEED)
r3 = random.Random(SEED)
r4 = random.Random(SEED)
r5 = random.Random(SEED)
r6 = random.Random(SEED)
r7 = random.Random(SEED)
r8 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = "%s,%s,%s,%s,%s,%s,%s,%s" % (
r1.randint(0,1),
r2.randint(0,2),
r3.randint(-4,4),
r4.randint(0,8),
r5.randint(-16,16),
r6.randint(-32,32),
0,
r8.randint(0,1))
dsf.write(rowData + "\n")
dsf.close()
zeroList = [
'Result0 = 0',
'Result.hex = 0',
]
exprList = [
'Result<n> = max(<keyX>[,<col1>])',
]
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, localhost
SEED = h2o.setup_random_seed()
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(2,java_heap_GB=1)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
# wait while I inspect things
# time.sleep(1500)
h2o.tear_down_cloud()
def test_dkv(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
csvFilenameAll = [
("syn_10x8.csv", 'cA', 5),
]
### csvFilenameList = random.sample(csvFilenameAll,1)
csvFilenameList = csvFilenameAll
### h2b.browseTheCloud()
lenNodes = len(h2o.nodes)
for (csvFilename, hex_key, timeoutSecs) in csvFilenameList:
SEEDPERFILE = random.randint(0, sys.maxint)
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
print "Creating random 10x8 csv"
write_syn_dataset(csvPathname, 10, SEEDPERFILE)
# creates csvFilename.hex from file in importFolder dir
parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=hex_key, timeoutSecs=2000)
print csvFilename, 'parse time:', parseResult['response']['time']
print "Parse result['destination_key']:", parseResult['destination_key']
# We should be able to see the parse result?
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
print "\n" + csvFilename
h2e.exec_zero_list(zeroList)
# does n+1 so use maxCol 6
h2e.exec_expr_list_rand(lenNodes, exprList, hex_key,
maxCol=6, maxRow=400000, maxTrials=100, timeoutSecs=timeoutSecs)
if __name__ == '__main__':
h2o.unit_main()
|
janezhango/BigDataMachineLearning
|
py/testdir_multi_jvm_fvec/test_exec2_dkv.py
|
Python
|
apache-2.0
| 2,933
|
from hypergan.gans.standard_gan import StandardGAN
from hypergan.gan_component import GANComponent
def gan_factory(*args, **kw_args):
if 'config' in kw_args:
config = kw_args['config']
elif len(args) > 0:
config = args[0]
else:
config = None
if config and 'class' in config:
return GANComponent.lookup_function(None, config['class'])(*args, **kw_args)
else:
return StandardGAN(*args, **kw_args)
GAN=gan_factory
|
255BITS/HyperGAN
|
hypergan/gan.py
|
Python
|
mit
| 473
|
#!/usr/bin/env python3
"""
Created on 23 Jun 2019
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
(cat /home/pi/SCS/pipes/display_pipe &) | ./display.py -v
"""
import sys
from scs_core.data.datetime import LocalizedDatetime
from scs_core.sync.interval_timer import IntervalTimer
# --------------------------------------------------------------------------------------------------------------------
fifo = None
try:
fifo = open('/home/pi/SCS/pipes/display_pipe', 'w')
timer = IntervalTimer(10)
while timer.true():
now = LocalizedDatetime.now()
message = "test: %s" % now.as_iso8601()
print(message, file=fifo)
fifo.flush()
print(message, file=sys.stderr)
sys.stderr.flush()
except KeyboardInterrupt:
print("KeyboardInterrupt", file=sys.stderr)
finally:
print("exiting", file=sys.stderr)
if fifo:
fifo.close()
|
south-coast-science/scs_dev
|
tests/display/display_pipe_test.py
|
Python
|
mit
| 916
|
# See screenshots here: https://github.com/dmroeder/pylogix/issues/156
# This example allows reading either a single tag or multiple tags separated by semicolon (';').
# Single tag example: CT_2D_DINTArray[0,0] or CT_STRING or CT_BOOLArray[252].
# Multi tag example: CT_DINT; CT_REAL; CT_3D_DINTArray[0,3,1].
# It also allows reading multiple elements of an array with the following tag format: tagName[startIndex]{elementCount}
# where 'startIndex' is the starting array index (x or x,y or x,y,z) and 'elementCount' is the number of consecutive elements to read.
# Example: CT_REALArray[0]{15} or CT_DINTArray[0,1,0]{7}
# Multi tag example: CT_DINT; CT_DINT.0{5}; CT_REAL; CT_3D_DINTArray[0,3,1]; CT_DINTArray[0,1,0]{7}.
# Enabling logging will log values of the current tags
# Enabling logging will force bool/bit values to always be logged as True/False for uniformity of the log file
# Changing tags when logging is enabled will always overwrite the log file (save it manually if needed)
# Not changing tags when logging is enabled allows to Stop/Start updating of the values and continue appending them
'''
the following import is only necessary because eip.py is not in this directory
'''
import sys
sys.path.append('..')
'''
Create a simple Tkinter window to display discovered devices, tags and a single variable.
Tkinter doesn't come preinstalled on all Linux distributions, so you may need to install it.
For Ubuntu: sudo apt-get install python-tk
Tkinter vs tkinter:
Reference: https://stackoverflow.com/questions/17843596/difference-between-tkinter-and-tkinter
Window/widget resizing:
Reference: https://stackoverflow.com/questions/22835289/how-to-get-tkinter-canvas-to-dynamically-resize-to-window-width
'''
import os.path
import platform
import threading
import datetime
import pylogix
from pylogix import PLC
try:
# Python 2
from Tkinter import *
except ImportError:
# Python 3
from tkinter import *
import tkinter.font as tkfont
pythonVersion = platform.python_version()
# width wise resizing of the tag label (window)
class LabelResizing(Label):
def __init__(self,parent,**kwargs):
Label.__init__(self,parent,**kwargs)
self.bind("<Configure>", self.on_resize)
self.width = self.winfo_reqwidth()
def on_resize(self,event):
if self.width > 0:
self.width = int(event.width)
self.config(width=self.width, wraplength=self.width)
# width wise resizing of the tag entry box (window)
class EntryResizing(Entry):
def __init__(self,parent,**kwargs):
Entry.__init__(self,parent,**kwargs)
self.bind("<Configure>", self.on_resize)
self.width = self.winfo_reqwidth()
def on_resize(self,event):
if self.width > 0:
self.width = int(event.width)
self.config(width=self.width)
class device_discovery_thread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
discoverDevices()
class get_tags_thread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
getTags()
class connection_thread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
comm_check()
class update_thread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
startUpdateValue()
# startup default values
myTag, ipAddress, processorSlot = ['CT_STRING', 'CT_REAL', 'CT_DINT', 'CT_DINT.2{7}'], '192.168.1.15', 3
headerAdded = False
ver = pylogix.__version__
def main():
'''
Create our window and comm driver
'''
global root
global comm
global checkVarMicro800
global checkVarSaveTags
global checkVarLogTagValues
global checkVarBoolDisplay
global selectedIPAddress
global selectedProcessorSlot
global chbMicro800
global chbSaveTags
global chbLogTagValues
global chbBoolDisplay
global selectedTag
global connected
global updateRunning
global connectionInProgress
global changePLC
global btnStart
global btnStop
global lbDevices
global lbTags
global lbConnectionMessage
global lbErrorMessage
global tbIPAddress
global sbProcessorSlot
global tbTag
global tagValue
global popup_menu_tbTag
global popup_menu_tbIPAddress
global popup_menu_save_tags_list
global regularTags
global arrayTags
global tagsSet
global previousLogHeader
global app_closing
root = Tk()
root.config(background='black')
root.title('Pylogix GUI Test - Python v' + pythonVersion)
root.geometry('800x600')
root.bind('<Destroy>', on_exit)
app_closing = False
connectionInProgress, connected, updateRunning = False, False, True
regularTags = []
arrayTags = dict()
previousLogHeader = ''
tagsSet = False
changePLC = IntVar()
changePLC.set(0)
# bind the 'q' keyboard key to quit
root.bind('q', lambda event:root.destroy())
# add a frame to hold top widgets
frame1 = Frame(root, background='black')
frame1.pack(side='top', fill=X)
# add list boxes for Device Discovery and Get Tags
lbDevices = Listbox(frame1, height=11, width=45, bg='lightblue')
lbTags = Listbox(frame1, height=11, width=45, bg='lightgreen')
lbDevices.pack(anchor='n', side='left', padx=3, pady=3)
# add a scrollbar for the Devices list box
scrollbarDevices = Scrollbar(frame1, orient='vertical', command=lbDevices.yview)
scrollbarDevices.pack(anchor='n', side='left', pady=3, ipady=65)
lbDevices.config(yscrollcommand = scrollbarDevices.set)
# copy selected IP Address to the clipboard on the mouse double-click
# this is currently set to work for IP Address only
lbDevices.bind('<Double-Button-1>', lambda event: ip_copy())
# add the Discover Devices button
btnDiscoverDevices = Button(frame1, text = 'Discover Devices', fg ='green', height=1, width=14, command=start_discover_devices)
btnDiscoverDevices.pack(anchor='n', side='left', padx=3, pady=3)
# add a scrollbar for the Tags list box
scrollbarTags = Scrollbar(frame1, orient='vertical', command=lbTags.yview)
scrollbarTags.pack(anchor='n', side='right', padx=3, pady=3, ipady=65)
lbTags.config(yscrollcommand = scrollbarTags.set)
# copy selected tag to the clipboard on the mouse double-click
lbTags.bind('<Double-Button-1>', lambda event: tag_copy())
lbTags.pack(anchor='n', side='right', pady=3)
# add the Get Tags button
btnGetTags = Button(frame1, text = 'Get Tags', fg ='green', height=1, width=14, command=start_get_tags)
btnGetTags.pack(anchor='n', side='right', padx=3, pady=3)
# add a frame to hold the label for pylogix version and Log tag values/Save tags/Micro800 checkboxes
frame2 = Frame(root, background='black')
frame2.pack(fill=X)
# create a label to show pylogix version
lblVersion = Label(frame2, text='pylogix v' + ver, fg='grey', bg='black', font='Helvetica 9')
lblVersion.pack(side='left', padx=3, pady=5)
# add 'Log tag values' checkbox
checkVarLogTagValues = IntVar()
chbLogTagValues = Checkbutton(frame2, text='Log tags values', variable=checkVarLogTagValues, command=setBoolDisplayForLogging)
checkVarLogTagValues.set(0)
chbLogTagValues.pack(side='left', padx=95, pady=4)
# add Micro800 checkbox
checkVarMicro800 = IntVar()
chbMicro800 = Checkbutton(frame2, text='PLC is Micro800', variable=checkVarMicro800, command=check_micro800)
checkVarMicro800.set(0)
chbMicro800.pack(side='right', padx=5, pady=4)
# add 'Save tags' checkbox
checkVarSaveTags = IntVar()
chbSaveTags = Checkbutton(frame2, text='Save tags list', variable=checkVarSaveTags)
checkVarSaveTags.set(0)
chbSaveTags.pack(side='right', padx=80, pady=4)
# add the tooltip menu on the mouse right-click
popup_menu_save_tags_list = Menu(chbSaveTags, bg='lightblue', tearoff=0)
popup_menu_save_tags_list.add_command(label='Click the Get Tags button to save the list', command=set_checkbox_state)
chbSaveTags.bind('<Button-1>', lambda event: save_tags_list(event, chbSaveTags))
# add a frame to hold connection and error messages listboxes
frame3 = Frame(root, background='black')
frame3.pack(side='bottom', fill=X)
# add a list box for connection messages
lbConnectionMessage = Listbox(frame3, height=1, width=45, fg='blue', bg='lightgrey')
lbConnectionMessage.pack(anchor=S, side='left', padx=3, pady=3)
# add a listbox for error messages
lbErrorMessage = Listbox(frame3, height=1, width=45, fg='red', bg='lightgrey')
lbErrorMessage.pack(anchor=S, side='right', padx=3, pady=3)
# add a frame to hold the tag label, tag entry box and the update buttons
frame4 = Frame(root, background='black')
frame4.pack(fill=X)
# create a label for the Tag entry
lblTag = Label(frame4, text='Tags To Read (separate with semicolon)', fg='white', bg='black', font='Helvetica 8')
lblTag.pack(anchor='center', pady=10)
# add button to start updating tag value
btnStart = Button(frame4, text = 'Start Update', state='disabled', bg='lightgrey', fg ='blue', height=1, width=10, command=start_update)
btnStart.pack(side='left', padx=5, pady=1)
# add button to stop updating tag value
btnStop = Button(frame4, text = 'Stop Update', state='disabled', fg ='blue', height=1, width=10, command=stopUpdateValue)
btnStop.pack(side='right', padx=5, pady=1)
# create a text box for the Tag entry
char_width = 5
if int(pythonVersion[0]) > 2:
fnt = tkfont.Font(family="Helvetica", size=11, weight="normal")
char_width = fnt.measure("0")
selectedTag = StringVar()
tbTag = EntryResizing(frame4, justify='center', textvariable=selectedTag, font='Helvetica 11', width=(int(800 / char_width) - 24))
selectedTag.set((str(myTag).replace(',', ';'))[1:-1].replace('\'', ''))
# add the 'Paste' menu on the mouse right-click
popup_menu_tbTag = Menu(tbTag, tearoff=0)
popup_menu_tbTag.add_command(label='Paste', command=tag_paste)
tbTag.bind('<Button-3>', lambda event: tag_menu(event, tbTag))
tbTag.pack(side='left', fill=X)
# add a frame to hold the label displaying the tag value
frame5 = Frame(root, background='black')
frame5.pack(fill=X)
# create a label to display the tag value
if int(pythonVersion[0]) > 2:
fnt = tkfont.Font(family="Helvetica", size=11, weight="normal")
char_width = fnt.measure("0")
tagValue = LabelResizing(frame5, text='~', justify='left', fg='yellow', bg='navy', font='Helvetica 18', width=(int(800 / char_width - 4.5)), wraplength=800, relief=SUNKEN)
tagValue.pack(anchor='center', padx=3, pady=5)
# add a frame to hold the IPAddress / Slot labels
frameIPSlotLabels = Frame(root, background='black')
frameIPSlotLabels.place(anchor='center', relx=0.5, rely=0.09)
# create a label for the IPAddress entry
lblIPAddress = Label(frameIPSlotLabels, text='IP Address', fg='white', bg='black', font='Helvetica 8')
lblIPAddress.pack(side='left', anchor='n', padx=32)
# create a label for the processor Slot entry
lblProcessorSlot = Label(frameIPSlotLabels, text='Slot', fg='white', bg='black', font='Helvetica 8')
lblProcessorSlot.pack(side='left', anchor='n', padx=8)
# add a frame to hold the IPAddress / Slot entry boxes
frameIPSlotBoxes = Frame(root, background='black')
frameIPSlotBoxes.place(anchor='center', relx=0.5, rely=0.12)
# create a text box for the IPAddress entry
selectedIPAddress = StringVar()
tbIPAddress = Entry(frameIPSlotBoxes, justify='center', textvariable=selectedIPAddress)
selectedIPAddress.set(ipAddress)
# add the 'Paste' menu on the mouse right-click
popup_menu_tbIPAddress = Menu(tbIPAddress, tearoff=0)
popup_menu_tbIPAddress.add_command(label='Paste', command=ip_paste)
tbIPAddress.bind('<Button-3>', lambda event: ip_menu(event, tbIPAddress))
tbIPAddress.pack(side='left', anchor='n', padx=1, pady=1)
# create a spinbox for the processor Slot entry
selectedProcessorSlot = StringVar()
sbProcessorSlot = Spinbox(frameIPSlotBoxes, width=4, justify='center', from_ = 0, to = 17, increment=1, textvariable=selectedProcessorSlot, state='readonly')
selectedProcessorSlot.set(processorSlot)
sbProcessorSlot.pack(side='right', anchor='n', padx=1, pady=1)
# add a frame to hold the Boolean Display checkbox
frameBoolDisplay = Frame(root, background='black')
frameBoolDisplay.place(anchor='center', relx=0.5, rely=0.2)
# add 'Boolean Display' checkbox
checkVarBoolDisplay = IntVar()
chbBoolDisplay = Checkbutton(frameBoolDisplay, text='Boolean Display 1 : 0', variable=checkVarBoolDisplay)
checkVarBoolDisplay.set(0)
chbBoolDisplay.pack(side='top', anchor='center', pady=3)
# add Exit button
btnExit = Button(root, text = 'Exit', fg ='red', height=1, width=10, command=root.destroy)
btnExit.place(anchor='center', relx=0.5, rely=0.98)
# set the minimum window size to the current size
root.update()
root.minsize(root.winfo_width(), root.winfo_height())
comm = None
start_connection()
root.mainloop()
try:
if not comm is None:
comm.Close()
comm = None
except:
pass
def on_exit(*args):
global app_closing
app_closing = True
def start_connection():
try:
thread1 = connection_thread()
thread1.setDaemon(True)
thread1.start()
except Exception as e:
print('unable to start connection_thread, ' + str(e))
def start_discover_devices():
try:
thread2 = device_discovery_thread()
thread2.setDaemon(True)
thread2.start()
except Exception as e:
print('unable to start device_discovery_thread, ' + str(e))
def start_get_tags():
try:
thread3 = get_tags_thread()
thread3.setDaemon(True)
thread3.start()
except Exception as e:
print('unable to start get_tags_thread, ' + str(e))
def start_update():
try:
thread4 = update_thread()
thread4.setDaemon(True)
thread4.start()
except Exception as e:
print('unable to start update_thread, ' + str(e))
def check_micro800():
if checkVarMicro800.get() == 1:
sbProcessorSlot['state'] = 'disabled'
else:
sbProcessorSlot['state'] = 'normal'
changePLC.set(1)
lbDevices.delete(0, 'end')
lbTags.delete(0, 'end')
start_connection()
def setBoolDisplayForLogging():
global checkVarBoolDisplay
if checkVarLogTagValues.get() == 1: # force logging bool/bit values as True/False for uniformity
checkVarBoolDisplay.set(0)
chbBoolDisplay['state'] = 'disabled'
else:
chbBoolDisplay['state'] = 'normal'
def discoverDevices():
try:
lbDevices.delete(0, 'end')
commDD = PLC()
devices = commDD.Discover()
if str(devices) == 'None [] Success':
lbDevices.insert(1, 'No Devices Discovered')
else:
i = 0
for device in devices.Value:
lbDevices.insert(i * 12 + 1, 'IP Address: ' + device.IPAddress)
lbDevices.insert(i * 12 + 2, 'Product Name: ' + device.ProductName)
lbDevices.insert(i * 12 + 3, 'Product Code: ' + str(device.ProductCode))
lbDevices.insert(i * 12 + 4, 'Vendor: ' + device.Vendor)
lbDevices.insert(i * 12 + 5, 'Vendor ID: ' + str(device.VendorID))
lbDevices.insert(i * 12 + 6, 'Device Type: ' + str(device.DeviceType))
lbDevices.insert(i * 12 + 7, 'Device ID: ' + str(device.DeviceID))
lbDevices.insert(i * 12 + 8, 'Revision: ' + device.Revision)
lbDevices.insert(i * 12 + 9, 'Serial: ' + str(int(device.SerialNumber, 0)))
lbDevices.insert(i * 12 + 10, 'State: ' + str(device.State))
lbDevices.insert(i * 12 + 11, 'Status: ' + str(device.Status))
lbDevices.insert(i * 12 + 12, '----------------------------------')
i += 1
for device in devices.Value:
if device.DeviceID == 14:
lbDevices.insert(i * 12 + 1, 'Modules at ' + device.IPAddress)
'''
Query each slot for a module
'''
with PLC() as c:
c.IPAddress = device.IPAddress
for j in range(17):
x = c.GetModuleProperties(j)
lbDevices.insert(i * 12 + 2 + j, 'Slot ' + str(j) + ' ' + x.Value.ProductName + ' rev: ' + x.Value.Revision)
c.Close()
c = None
i += 1
commDD.Close()
commDD = None
except Exception as e:
if not commDD is None:
commDD.Close()
commDD = None
if app_closing:
pass
else:
print(str(e))
def getTags():
try:
lbTags.delete(0, 'end')
commGT = PLC()
commGT.IPAddress = selectedIPAddress.get()
if checkVarMicro800.get() == 0:
commGT.ProcessorSlot = int(selectedProcessorSlot.get())
tags = commGT.GetTagList()
if not tags is None:
if not tags.Value is None:
# save tags to a file
if checkVarSaveTags.get() == 1:
with open('tags_list.txt', 'w') as f:
for t in tags.Value:
if t.DataType == '':
f.write(t.TagName + '\n')
else:
f.write(t.TagName + ' (DataType - ' + t.DataType + ')\n')
for t in tags.Value:
j = 1
if t.DataType == '':
lbTags.insert(j, t.TagName)
else:
lbTags.insert(j, t.TagName + ' (DataType - ' + t.DataType + ')')
j = j + 1
else:
lbTags.insert(1, 'No Tags Retrieved')
else:
lbTags.insert(1, 'No Tags Retrieved')
commGT.Close()
commGT = None
except Exception as e:
if not commGT is None:
commGT.Close()
commGT = None
if app_closing:
pass
else:
print(str(e))
def comm_check():
global comm
global updateRunning
global connected
global connectionInProgress
try:
connectionInProgress = True
ip = selectedIPAddress.get()
port = int(selectedProcessorSlot.get())
if (not connected or comm.IPAddress != ip or comm.ProcessorSlot != port or changePLC.get() == 1):
if not comm is None:
comm.Close()
comm = None
comm = PLC()
comm.IPAddress = ip
if checkVarMicro800.get() == 0:
comm.ProcessorSlot = port
comm.Micro800 = False
else:
comm.Micro800 = True
plcTime = comm.GetPLCTime()
lbConnectionMessage.delete(0, 'end')
lbErrorMessage.delete(0, 'end')
if plcTime.Value is None:
if btnStop['state'] == 'disabled':
btnStart['state'] = 'disabled'
btnStart['bg'] = 'lightgrey'
lbConnectionMessage.insert(1, ' Not Connected')
lbErrorMessage.insert(1, ' ' + plcTime.Status)
connected = False
root.after(5000, start_connection)
else:
lbConnectionMessage.insert(1, ' Connected')
if not updateRunning:
updateRunning = True
connected = True
connectionInProgress = False
if btnStop['state'] == 'disabled':
btnStart['state'] = 'normal'
btnStart['bg'] = 'lightgreen'
else:
start_update()
changePLC.set(0)
except Exception as e:
if app_closing:
pass
else:
print(str(e))
def startUpdateValue():
global comm
global updateRunning
global connected
global checkVarLogTagValues
global previousLogHeader
global headerAdded
global regularTags
global arrayTags
global tagsSet
'''
Call ourself to update the screen
'''
try:
tagsChanged = False
arrayElementCount = 0
if not connected:
if not connectionInProgress:
start_connection()
else:
if not updateRunning:
updateRunning = True
else:
# remove all the spaces
displayTag = (selectedTag.get()).replace(' ', '')
allValues = ''
logHeader = ''
logValues = ''
if displayTag != '':
chbLogTagValues['state'] = 'disabled'
if not tagsSet:
regularTags = []
arrayTags = dict()
if ';' in displayTag:
tags = displayTag.split(';')
for tag in tags:
t = str(tag)
if not t == '':
if t.endswith('}') and '{' in t: # 1 or 2 or 3 dimensional array tag
try:
arrayElementCount = int(t[t.index('{') + 1:t.index('}')])
if arrayElementCount < 2:
regularTags.append(t[:t.index('{')])
else:
t = t[:t.index('{')]
arrayTags.update( {t : arrayElementCount} )
except:
regularTags.append(t[:t.index('{')])
else:
regularTags.append(t)
elif displayTag.endswith('}') and '{' in displayTag: # 1 or 2 or 3 dimensional array tag
try:
arrayElementCount = int(displayTag[displayTag.index('{') + 1:displayTag.index('}')])
if arrayElementCount < 2:
regularTags.append(displayTag[:displayTag.index('{')])
else:
readArray = True
arrayTags.update( {displayTag[:displayTag.index('{')] : arrayElementCount} )
except:
regularTags.append(displayTag[:displayTag.index('{')])
else:
regularTags.append(displayTag)
if len(regularTags) > 0:
for i in range(0, len(regularTags)):
logHeader += regularTags[i] + ', '
if len(arrayTags) > 0:
for key in arrayTags:
logHeader += key + '{' + str(arrayTags[key]) + '}, '
tagsSet = True
if previousLogHeader != logHeader:
tagsChanged = True
try:
if len(regularTags) > 0:
response = comm.Read(regularTags)
if not response[0].Value is None:
for i in range(0, len(response)):
allValues += response[i].TagName + ' : '
if (checkVarBoolDisplay.get() == 1) and (str(response[i].Value) == 'True' or str(response[i].Value) == 'False'):
if checkVarLogTagValues.get() == 1:
logValues += '1, ' if str(response[i].Value) == 'True' else '0, '
allValues += '1, ' if str(response[i].Value) == 'True' else '0, '
else:
if str(response[i].Value) == '':
if checkVarLogTagValues.get() == 1:
logValues += '{}, '
allValues += '{}, '
else:
if checkVarLogTagValues.get() == 1:
logValues += str(response[i].Value) + ', '
allValues += str(response[i].Value)
allValues += '\n'
if len(arrayTags) > 0:
for tg in arrayTags:
response = comm.Read(tg, arrayTags[tg])
if not response.Value is None:
allValues += response.TagName + '{' + str(arrayTags[tg]) + '} : '
if (checkVarBoolDisplay.get() == 1) and (str(response.Value[0]) == 'True' or str(response.Value[0]) == 'False'):
newBoolArray = []
for val in range(0, len(response.Value)):
newBoolArray.append(1 if str(response.Value[val]) == 'True' else 0)
if checkVarLogTagValues.get() == 1:
logValues += str(newBoolArray).replace(',', ';') + ', '
allValues += str(newBoolArray)
else:
if checkVarLogTagValues.get() == 1:
logValues += str(response.Value).replace(',', ';') + ', '
allValues += str(response.Value)
allValues += '\n'
except Exception as e:
tagValue['text'] = str(e)
connected = False
response = None
setWidgetState()
start_connection()
return
if allValues != '':
tagValue['text'] = allValues[:-1]
if checkVarLogTagValues.get() == 1:
if not os.path.exists('tag_values_log.txt') or tagsChanged:
headerAdded = False
if headerAdded:
with open('tag_values_log.txt', 'a') as log_file:
strValue = str(datetime.datetime.now()).replace(' ', '/') + ', ' + logValues[:-2] + '\n'
log_file.write(strValue)
else:
with open('tag_values_log.txt', 'w') as log_file:
previousLogHeader = logHeader
# add header with 'Date / Time' and all the tags being read
header = 'Date / Time, ' + logHeader[:-2] + '\n'
log_file.write(header)
headerAdded = True
else:
plcTime = comm.GetPLCTime()
if plcTime.Value is None:
tagValue['text'] = 'Connection Lost'
if not connectionInProgress:
connected = False
start_connection()
else:
tagValue['text'] = 'Check Tag(s)'
setWidgetState()
root.after(500, startUpdateValue)
except Exception as e:
if app_closing:
pass
else:
print(str(e))
def setWidgetState():
try:
if btnStart['state'] == 'normal':
btnStart['state'] = 'disabled'
btnStart['bg'] = 'lightgrey'
btnStop['state'] = 'normal'
btnStop['bg'] = 'lightgreen'
tbIPAddress['state'] = 'disabled'
if checkVarMicro800.get() == 0:
sbProcessorSlot['state'] = 'disabled'
chbMicro800['state'] = 'disabled'
tbTag['state'] = 'disabled'
except Exception as e:
if app_closing:
pass
else:
print(str(e))
def stopUpdateValue():
global updateRunning
global tagsSet
try:
if updateRunning:
updateRunning = False
tagValue['text'] = '~'
chbLogTagValues['state'] = 'normal'
if not connectionInProgress:
btnStart['state'] = 'normal'
btnStart['bg'] = 'lightgreen'
btnStop['state'] = 'disabled'
btnStop['bg'] = 'lightgrey'
tbIPAddress['state'] = 'normal'
chbMicro800['state'] = 'normal'
if checkVarMicro800.get() == 0:
sbProcessorSlot['state'] = 'normal'
tbTag['state'] = 'normal'
tagsSet = False
except Exception as e:
if app_closing:
pass
else:
print(str(e))
def save_tags_list(event, chbSaveTags):
if checkVarSaveTags.get() == 0:
popup_menu_save_tags_list.post(event.x_root, event.y_root)
# Windows users can also click outside of the popup so set the checkbox state here
if platform.system() == 'Windows':
chbSaveTags.select()
def set_checkbox_state():
chbSaveTags.select()
def tag_copy():
root.clipboard_clear()
listboxSelectedTag = (lbTags.get(ANCHOR)).split(' ')[0]
root.clipboard_append(listboxSelectedTag)
def tag_menu(event, tbTag):
try:
old_clip = root.clipboard_get()
except:
old_clip = None
if (not old_clip is None) and (type(old_clip) is str) and tbTag['state'] == 'normal':
tbTag.select_range(0, 'end')
popup_menu_tbTag.post(event.x_root, event.y_root)
def tag_paste():
# user clicked the 'Paste' option so paste the tag from the clipboard
selectedTag.set(root.clipboard_get())
tbTag.select_range(0, 'end')
tbTag.icursor('end')
def ip_copy():
if (lbDevices.get(ANCHOR)).split(' ')[0] == 'IP':
root.clipboard_clear()
listboxSelectedIPAddress = (lbDevices.get(ANCHOR)).split(' ')[2]
root.clipboard_append(listboxSelectedIPAddress)
def ip_menu(event, tbIPAddress):
try:
old_clip = root.clipboard_get()
except:
old_clip = None
if (not old_clip is None) and (type(old_clip) is str) and tbIPAddress['state'] == 'normal':
tbIPAddress.select_range(0, 'end')
popup_menu_tbIPAddress.post(event.x_root, event.y_root)
def ip_paste():
# user clicked the 'Paste' option so paste the IP Address from the clipboard
selectedIPAddress.set(root.clipboard_get())
tbIPAddress.select_range(0, 'end')
tbIPAddress.icursor('end')
if __name__=='__main__':
main()
|
dmroeder/pylogix
|
examples/81_simple_gui.py
|
Python
|
apache-2.0
| 32,706
|
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn("Module `scrapy.contrib.throttle` is deprecated, "
"use `scrapy.extensions.throttle` instead",
ScrapyDeprecationWarning, stacklevel=2)
from scrapy.extensions.throttle import *
|
bdh1011/wau
|
venv/lib/python2.7/site-packages/scrapy/contrib/throttle.py
|
Python
|
mit
| 290
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the cloud preprocessing module."""
import os
from absl.testing import absltest
import mock
from tensorflow_cloud.core import machine_config
from tensorflow_cloud.core import preprocess
class TestPreprocess(absltest.TestCase):
def setup_py(self):
self.entry_point_name = "sample_compile_fit.py"
self.entry_point = "sample_compile_fit.py"
def setup_ipython(self):
self.entry_point_name = "mnist_example_using_fit.ipynb"
self.entry_point = "mnist_example_using_fit.ipynb"
def get_preprocessed_entry_point(
self,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["CPU"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=0,
distribution_strategy="auto",
called_from_notebook=False,
):
self.wrapped_entry_point = preprocess.get_preprocessed_entry_point(
self.entry_point,
chief_config,
worker_config,
worker_count,
distribution_strategy,
called_from_notebook,
)
with open(self.wrapped_entry_point, "r") as f:
script_lines = f.readlines()
return script_lines
def assert_and_cleanup(self, expected_lines, script_lines):
self.assertListEqual(expected_lines, script_lines)
os.remove(self.wrapped_entry_point)
def test_auto_one_device_strategy(self):
self.setup_py()
script_lines = self.get_preprocessed_entry_point()
expected_lines = [
"import os\n",
"import tensorflow as tf\n",
'os.environ["TF_KERAS_RUNNING_REMOTELY"]="1"\n',
"import faulthandler\n",
"faulthandler.enable()\n",
"import sys\n",
"for flag in sys.argv[1:]:\n",
' if flag.startswith("TUNER_ID"):\n',
' os.environ["KERASTUNER_TUNER_ID"]=flag\n',
"strategy = tf.distribute.OneDeviceStrategy(device='/gpu:0')\n",
"tf.distribute.experimental_set_strategy(strategy)\n",
'exec(open("{}").read())\n'.format(self.entry_point_name),
]
self.assert_and_cleanup(expected_lines, script_lines)
def test_auto_mirrored_strategy(self):
self.setup_py()
chief_config = machine_config.COMMON_MACHINE_CONFIGS["K80_4X"]
script_lines = self.get_preprocessed_entry_point(
chief_config=chief_config)
expected_lines = [
"import os\n",
"import tensorflow as tf\n",
'os.environ["TF_KERAS_RUNNING_REMOTELY"]="1"\n',
"import faulthandler\n",
"faulthandler.enable()\n",
"import sys\n",
"for flag in sys.argv[1:]:\n",
' if flag.startswith("TUNER_ID"):\n',
' os.environ["KERASTUNER_TUNER_ID"]=flag\n',
"strategy = tf.distribute.MirroredStrategy()\n",
"tf.distribute.experimental_set_strategy(strategy)\n",
'exec(open("{}").read())\n'.format(self.entry_point_name),
]
self.assert_and_cleanup(expected_lines, script_lines)
def test_auto_multi_worker_strategy(self):
self.setup_py()
script_lines = self.get_preprocessed_entry_point(worker_count=2)
expected_lines = [
"import os\n",
"import tensorflow as tf\n",
'os.environ["TF_KERAS_RUNNING_REMOTELY"]="1"\n',
"import faulthandler\n",
"faulthandler.enable()\n",
"import sys\n",
"for flag in sys.argv[1:]:\n",
' if flag.startswith("TUNER_ID"):\n',
' os.environ["KERASTUNER_TUNER_ID"]=flag\n',
("strategy = tf.distribute.experimental."
"MultiWorkerMirroredStrategy()\n"),
"tf.distribute.experimental_set_strategy(strategy)\n",
'exec(open("{}").read())\n'.format(self.entry_point_name),
]
self.assert_and_cleanup(expected_lines, script_lines)
def test_auto_tpu_strategy(self):
self.setup_py()
worker_config = machine_config.COMMON_MACHINE_CONFIGS["TPU"]
script_lines = self.get_preprocessed_entry_point(
worker_config=worker_config, worker_count=1
)
expected_lines = [
"import os\n",
"import tensorflow as tf\n",
'os.environ["TF_KERAS_RUNNING_REMOTELY"]="1"\n',
"import sys\n",
"for flag in sys.argv[1:]:\n",
' if flag.startswith("TUNER_ID"):\n',
' os.environ["KERASTUNER_TUNER_ID"]=flag\n',
"import json\n",
"import logging\n",
"import time\n",
"logger = logging.getLogger(__name__)\n",
"logging.basicConfig(level=logging.INFO)\n",
"def wait_for_tpu_cluster_resolver_ready():\n",
" tpu_config_env = os.environ.get('TPU_CONFIG')\n",
" if not tpu_config_env:\n",
(" logging.info('Missing TPU_CONFIG, "
"use CPU/GPU for training.')\n"),
" return None\n",
" tpu_node = json.loads(tpu_config_env)\n",
" logging.info('Waiting for TPU to be ready: %s.', tpu_node)\n",
" num_retries = 40\n",
" for i in range(num_retries):\n",
" try:\n",
" tpu_cluster_resolver = (\n",
" tf.distribute.cluster_resolver.TPUClusterResolver(\n",
" tpu=[tpu_node['tpu_node_name']],\n",
" zone=tpu_node['zone'],\n",
" project=tpu_node['project'],\n",
" job_name='worker'))\n",
" tpu_cluster_resolver_dict = "
"tpu_cluster_resolver.cluster_spec().as_dict()\n",
" if 'worker' in tpu_cluster_resolver_dict:\n",
(" logging.info('Found TPU worker: %s', "
"tpu_cluster_resolver_dict)\n"),
" return tpu_cluster_resolver\n",
" except Exception as e:\n",
" if i < num_retries - 1:\n",
" logging.info('Still waiting for provisioning of TPU VM"
" instance.')\n",
" else:\n",
" # Preserves the traceback.\n",
(" raise RuntimeError('Failed to schedule TPU: "
"{}'.format(e))\n"),
" time.sleep(10)\n",
" raise RuntimeError('Failed to schedule TPU.')\n",
"resolver = wait_for_tpu_cluster_resolver_ready()\n",
"tf.config.experimental_connect_to_cluster(resolver)\n",
"tf.tpu.experimental.initialize_tpu_system(resolver)\n",
"strategy = tf.distribute.experimental.TPUStrategy(resolver)\n",
"tf.distribute.experimental_set_strategy(strategy)\n",
'exec(open("{}").read())\n'.format(self.entry_point_name),
]
self.assert_and_cleanup(expected_lines, script_lines)
@mock.patch("tensorflow_cloud.core.preprocess.PythonExporter") # pylint: disable=line-too-long
def test_ipython_notebook(self, mock_python_exporter):
file_contents = (
"num_train_examples = info.splits['train'].num_examples\n"
"eval_dataset = mnist_test.map(scale).batch(BATCH_SIZE)\n"
)
mock_python_exporter.return_value.from_filename.return_value = (
file_contents,
None,
)
self.setup_ipython()
script_lines = self.get_preprocessed_entry_point()
expected_lines = [
"import os\n",
"import tensorflow as tf\n",
'os.environ["TF_KERAS_RUNNING_REMOTELY"]="1"\n',
"import faulthandler\n",
"faulthandler.enable()\n",
"import sys\n",
"for flag in sys.argv[1:]:\n",
' if flag.startswith("TUNER_ID"):\n',
' os.environ["KERASTUNER_TUNER_ID"]=flag\n',
"strategy = tf.distribute.OneDeviceStrategy(device='/gpu:0')\n",
"tf.distribute.experimental_set_strategy(strategy)\n",
]
for el in expected_lines:
self.assertIn(el, script_lines)
self.assertIn(
"num_train_examples = info.splits['train'].num_examples\n",
script_lines
)
self.assertIn(
"eval_dataset = mnist_test.map(scale).batch(BATCH_SIZE)\n",
script_lines
)
if __name__ == "__main__":
absltest.main()
|
tensorflow/cloud
|
src/python/tensorflow_cloud/core/tests/unit/preprocess_test.py
|
Python
|
apache-2.0
| 9,116
|
# -*- test-case-name: twisted.test.test_persisted -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility classes for dealing with circular references.
"""
import types
from twisted.python import log, reflect
class NotKnown:
def __init__(self):
self.dependants = []
self.resolved = 0
def addDependant(self, mutableObject, key):
assert not self.resolved
self.dependants.append( (mutableObject, key) )
resolvedObject = None
def resolveDependants(self, newObject):
self.resolved = 1
self.resolvedObject = newObject
for mut, key in self.dependants:
mut[key] = newObject
if isinstance(newObject, NotKnown):
newObject.addDependant(mut, key)
def __hash__(self):
assert 0, "I am not to be used as a dictionary key."
class _Container(NotKnown):
"""
Helper class to resolve circular references on container objects.
"""
def __init__(self, l, containerType):
"""
@param l: The list of object which may contain some not yet referenced
objects.
@param containerType: A type of container objects (e.g., C{tuple} or
C{set}).
"""
NotKnown.__init__(self)
self.containerType = containerType
self.l = l
self.locs = range(len(l))
for idx in xrange(len(l)):
if not isinstance(l[idx], NotKnown):
self.locs.remove(idx)
else:
l[idx].addDependant(self, idx)
if not self.locs:
self.resolveDependants(self.containerType(self.l))
def __setitem__(self, n, obj):
"""
Change the value of one contained objects, and resolve references if
all objects have been referenced.
"""
self.l[n] = obj
if not isinstance(obj, NotKnown):
self.locs.remove(n)
if not self.locs:
self.resolveDependants(self.containerType(self.l))
class _Tuple(_Container):
"""
Manage tuple containing circular references. Deprecated: use C{_Container}
instead.
"""
def __init__(self, l):
"""
@param l: The list of object which may contain some not yet referenced
objects.
"""
_Container.__init__(self, l, tuple)
class _InstanceMethod(NotKnown):
def __init__(self, im_name, im_self, im_class):
NotKnown.__init__(self)
self.my_class = im_class
self.name = im_name
# im_self _must_ be a
im_self.addDependant(self, 0)
def __call__(self, *args, **kw):
import traceback
log.msg('instance method %s.%s' % (reflect.qual(self.my_class), self.name))
log.msg('being called with %r %r' % (args, kw))
traceback.print_stack(file=log.logfile)
assert 0
def __setitem__(self, n, obj):
assert n == 0, "only zero index allowed"
if not isinstance(obj, NotKnown):
method = types.MethodType(self.my_class.__dict__[self.name],
obj, self.my_class)
self.resolveDependants(method)
class _DictKeyAndValue:
def __init__(self, dict):
self.dict = dict
def __setitem__(self, n, obj):
if n not in (1, 0):
raise RuntimeError("DictKeyAndValue should only ever be called with 0 or 1")
if n: # value
self.value = obj
else:
self.key = obj
if hasattr(self, "key") and hasattr(self, "value"):
self.dict[self.key] = self.value
class _Dereference(NotKnown):
def __init__(self, id):
NotKnown.__init__(self)
self.id = id
from twisted.internet.defer import Deferred
class _Catcher:
def catch(self, value):
self.value = value
class _Defer(Deferred, NotKnown):
def __init__(self):
Deferred.__init__(self)
NotKnown.__init__(self)
self.pause()
wasset = 0
def __setitem__(self, n, obj):
if self.wasset:
raise RuntimeError('setitem should only be called once, setting %r to %r' % (n, obj))
else:
self.wasset = 1
self.callback(obj)
def addDependant(self, dep, key):
# by the time I'm adding a dependant, I'm *not* adding any more
# callbacks
NotKnown.addDependant(self, dep, key)
self.unpause()
resovd = self.result
self.resolveDependants(resovd)
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/persisted/crefutil.py
|
Python
|
bsd-3-clause
| 4,659
|
# name=Monitoring plots
# displayinmenu=true
# displaytouser=true
# displayinselector=true
from monitoring import plot
import toolbox as tb
from voluptuous import Schema, All, Any, Range, Datetime, Required, Optional, Lower
class PlotTool(tb.Tool):
defaultColours = [
[166, 206, 227],
[ 31, 120, 180],
[178, 223, 138],
[ 51, 160, 44],
[251, 154, 153],
[227, 26, 28],
[253, 191, 111],
[255, 127, 0],
[202, 178, 214],
[106, 61, 154]
]
defaultLineWidth = 1.25
schema = Schema({
'site': unicode,
'locations': [
unicode
],
'interval': unicode,
'version': unicode,
'output_folder': unicode,
'period': {
'start': Datetime("%d%b%Y %H:%M", msg="Start date must be formatted like this: 01JAN2000 00:00"),
'end': Datetime("%d%b%Y %H:%M", msg="End date must be formatted like this: 01JAN2000 00:00")
},
'params': {
unicode: Any({
Optional('scale'): All(Lower, Any('lin', 'log'))
}, None)
},
Required('width', default=1200): All(int, Range(min=100, max=3000)),
Required('height', default=800): All(int, Range(min=100, max=3000)),
Required('line', default={'width': defaultLineWidth,
'colours': defaultColours}): {
Required('width', default=defaultLineWidth):
All(float, Range(min=0.5, max=2.0)),
Required('colours', default=defaultColours): [
[All(int, Range(min=0, max=255)),
All(int, Range(min=0, max=255)),
All(int, Range(min=0, max=255))]
]
}
}, required=True)
def main(self):
plotted, messages = plot.onePerParam(self.config, self.dssFilePath)
messages.insert(0, "{} Timeseries plots exported.".format(plotted))
self.message += "\n".join(messages)
tool = PlotTool()
tool.run()
|
jprine/monitoring-module
|
src/monitoring_plots.py
|
Python
|
mit
| 2,046
|
import tkinter as tk
import tkinter.ttk
# adapted from http://tkinter.unpythonic.net/wiki/VerticalScrolledFrame
class VerticalScrolledFrame(tk.Frame):
"""A pure Tkinter scrollable frame that actually works!
* Use the 'interior' attribute to place widgets inside the scrollable frame
* Construct and pack/place/grid normally
* This frame only allows vertical scrolling
"""
def __init__(self, parent, *args, **kw):
tk.Frame.__init__(self, parent, *args, **kw)
self.config(borderwidth=2, relief='sunken')
# create a canvas object and a vertical scrollbar for scrolling it
vscrollbar = tk.ttk.Scrollbar(self, orient=tk.VERTICAL)
vscrollbar.pack(fill=tk.Y, side=tk.RIGHT, expand=tk.FALSE)
canvas = tk.Canvas(self, bd=0, highlightthickness=0,
yscrollcommand=vscrollbar.set)
canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=tk.TRUE)
vscrollbar.config(command=canvas.yview)
# reset the view
canvas.xview_moveto(0)
canvas.yview_moveto(0)
# create a frame inside the canvas which will be scrolled with it
self.interior = interior = tk.Frame(canvas)
interior_id = canvas.create_window(0, 0, window=interior,
anchor=tk.NW)
self.prev_size = (interior.winfo_reqwidth(), max(interior.winfo_reqheight(), canvas.winfo_reqheight()))
# track changes to the canvas and frame width and sync them,
# also updating the scrollbar
def _configure_interior(event):
# update the scrollbars to match the size of the inner frame
size = (interior.winfo_reqwidth(), max(interior.winfo_reqheight(), canvas.winfo_height()))
if self.prev_size != size:
canvas.config(scrollregion="0 0 %s %s" % size)
self.prev_size = size
# update the canvas's width to fit the inner frame
if canvas.winfo_reqwidth() != interior.winfo_reqwidth():
canvas.config(width=interior.winfo_reqwidth())
interior.bind('<Configure>', _configure_interior)
def _configure_canvas(event):
# update the inner frame's width to fill the canvas
if canvas.winfo_width() != interior.winfo_width():
canvas.itemconfigure(interior_id, width=canvas.winfo_width())
canvas.bind('<Configure>', _configure_canvas)
self.canvas=canvas
|
JonathanTaquet/Oe2sSLE
|
VerticalScrolledFrame.py
|
Python
|
gpl-2.0
| 2,525
|
import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission, Follow
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('david@example.net')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
def test_follows(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
self.assertFalse(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
timestamp_before = datetime.utcnow()
u1.follow(u2)
db.session.add(u1)
db.session.commit()
timestamp_after = datetime.utcnow()
self.assertTrue(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
self.assertTrue(u2.is_followed_by(u1))
self.assertTrue(u1.followed.count() == 2)
self.assertTrue(u2.followers.count() == 2)
f = u1.followed.all()[-1]
self.assertTrue(f.followed == u2)
# This assert appears to fail when using mySQL vs. SQLite
# self.assertTrue(timestamp_before <= f.timestamp <= timestamp_after)
f = u2.followers.all()[-1]
# self.assertTrue(f.follower == u1)
u1.unfollow(u2)
db.session.add(u1)
db.session.commit()
self.assertTrue(u1.followed.count() == 1)
self.assertTrue(u2.followers.count() == 1)
self.assertTrue(Follow.query.count() == 2)
u2.follow(u1)
db.session.add(u1)
db.session.add(u2)
db.session.commit()
db.session.delete(u2)
db.session.commit()
self.assertTrue(Follow.query.count() == 1)
|
russomi/flasky-appengine
|
tests/test_user_model.py
|
Python
|
mit
| 6,921
|
from pywps.app.Service import Service
def make_app(processes=None, cfgfiles=None):
app = Service(processes=processes, cfgfiles=cfgfiles)
return app
|
bird-house/PyWPS
|
pywps/application.py
|
Python
|
mit
| 158
|
from __future__ import unicode_literals
from django.contrib.auth.models import Permission, User
from django.utils import six
from djblets.avatars.services.gravatar import GravatarService
from djblets.testing.decorators import add_fixtures
from djblets.webapi.testing.decorators import webapi_test_template
from kgb import SpyAgency
from reviewboard.accounts.backends import (AuthBackend,
get_enabled_auth_backends)
from reviewboard.accounts.models import Profile
from reviewboard.avatars import avatar_services
from reviewboard.avatars.testcase import AvatarServicesTestMixin
from reviewboard.site.models import LocalSite
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (user_item_mimetype,
user_list_mimetype)
from reviewboard.webapi.tests.mixins import BasicTestsMetaclass
from reviewboard.webapi.tests.urls import (get_user_item_url,
get_user_list_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(SpyAgency, BaseWebAPITestCase):
"""Testing the UserResource list API tests."""
fixtures = ['test_users']
sample_api_url = 'users/'
resource = resources.user
test_http_methods = ('GET',)
def setup_http_not_allowed_list_test(self, user):
return get_user_list_url()
def compare_item(self, item_rsp, obj):
self.assertEqual(item_rsp['id'], obj.pk)
self.assertEqual(item_rsp['username'], obj.username)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
if not populate_items:
items = []
elif with_local_site:
local_site = self.get_local_site(name=local_site_name)
items = list(local_site.users.all())
else:
items = list(User.objects.all())
return (get_user_list_url(local_site_name),
user_list_mimetype,
items)
@webapi_test_template
def test_get_filter_inactive(self):
"""Testing the GET <URL> API filters out inactive users by default"""
dopey = User.objects.get(username='dopey')
dopey.is_active = False
dopey.save()
rsp = self.api_get(get_user_list_url(),
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
user_pks = [user['id'] for user in rsp['users']]
returned_users = set(User.objects.filter(pk__in=user_pks))
expected_users = set(User.objects.filter(is_active=True))
self.assertEqual(returned_users, expected_users)
@webapi_test_template
def test_get_include_inactive(self):
"""Testing the GET <URL>/?include-inactive=1 API includes inactive
users
"""
dopey = User.objects.get(username='dopey')
dopey.is_active = False
dopey.save()
rsp = self.api_get(get_user_list_url(), {'include-inactive': '1'},
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
user_pks = [user['id'] for user in rsp['users']]
self.assertEqual(set(User.objects.filter(pk__in=user_pks)),
set(User.objects.all()))
@webapi_test_template
def test_get_include_inactive_true(self):
"""Testing the GET <URL>/?include-inactive=true API includes inactive
users
"""
dopey = User.objects.get(username='dopey')
dopey.is_active = False
dopey.save()
rsp = self.api_get(get_user_list_url(), {'include-inactive': 'true'},
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
user_pks = [user['id'] for user in rsp['users']]
self.assertEqual(set(User.objects.filter(pk__in=user_pks)),
set(User.objects.all()))
def test_get_with_q(self):
"""Testing the GET users/?q= API"""
rsp = self.api_get(get_user_list_url(), {'q': 'gru'},
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['users']), 1) # grumpy
def test_query_users_auth_backend(self):
"""Testing the GET users/?q= API
with AuthBackend.query_users failure
"""
class SandboxAuthBackend(AuthBackend):
backend_id = 'test-id'
name = 'test'
def query_users(self, query, request):
raise Exception
backend = SandboxAuthBackend()
self.spy_on(get_enabled_auth_backends, call_fake=lambda: [backend])
self.spy_on(backend.query_users)
rsp = self.api_get(get_user_list_url(), {'q': 'gru'},
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertTrue(backend.query_users.called)
def test_search_users_auth_backend(self):
"""Testing the GET users/?q= API
with AuthBackend.search_users failure
"""
class SandboxAuthBackend(AuthBackend):
backend_id = 'test-id'
name = 'test'
def search_users(self, query, request):
raise Exception
backend = SandboxAuthBackend()
self.spy_on(get_enabled_auth_backends, call_fake=lambda: [backend])
self.spy_on(backend.search_users)
rsp = self.api_get(get_user_list_url(), {'q': 'gru'},
expected_mimetype=user_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertTrue(backend.search_users.called)
#
# HTTP POST tests
#
@webapi_test_template
def test_post_anonymous(self):
"""Testing the POST <URL> API as an anonymous user"""
self.client.logout()
rsp = self.api_post(
get_user_list_url(),
{
'username': 'username',
'password': 'password',
'email': 'email@example.com',
},
expected_status=401)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'fail')
self.assertIn('err', rsp)
self.assertIn('code', rsp['err'])
self.assertEqual(rsp['err']['code'], 103)
@webapi_test_template
def test_post(self):
"""Testing the POST <URL> API as a regular user"""
rsp = self.api_post(
get_user_list_url(),
{
'username': 'username',
'password': 'password',
'email': 'email@example.com'
},
expected_status=403)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'fail')
self.assertIn('err', rsp)
self.assertIn('code', rsp['err'])
self.assertEqual(rsp['err']['code'], 101)
@webapi_test_template
def test_post_superuser(self):
"""Testing the POST <URL> API as a superuser"""
self.client.login(username='admin', password='admin')
rsp = self.api_post(
get_user_list_url(),
{
'username': 'username',
'password': 'password',
'email': 'email@example.com',
},
expected_mimetype=user_item_mimetype)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'ok')
self.compare_item(rsp['user'], User.objects.get(username='username'))
@webapi_test_template
def test_post_auth_add_user_perm(self):
"""Testing the POST <URL> API as a user with the auth.add_user
permission
"""
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='auth',
codename='add_user'))
rsp = self.api_post(
get_user_list_url(),
{
'username': 'username',
'password': 'password',
'email': 'email@example.com',
},
expected_mimetype=user_item_mimetype)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'ok')
self.compare_item(rsp['user'], User.objects.get(username='username'))
@webapi_test_template
def test_post_local_site(self):
"""Testing the POST <URL> API with a local site"""
local_site = LocalSite.objects.create(name='test', public=True)
self.client.login(username='admin', password='admin')
rsp = self.api_post(
get_user_list_url(local_site.name),
{
'username': 'username',
'password': 'password',
'email': 'email@example.com'
},
expected_status=403)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'fail')
self.assertIn('err', rsp)
self.assertIn('code', rsp['err'])
self.assertEqual(rsp['err']['code'], 101)
@webapi_test_template
def test_post_duplicate_username(self):
"""Testing the POST <URL> API for a username that already exists"""
self.client.login(username='admin', password='admin')
rsp = self.api_post(
get_user_list_url(),
{
'username': 'doc',
'password': 'password',
'email': 'doc@example.com'
},
expected_status=400)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'fail')
self.assertIn('fields', rsp)
self.assertIn('username', rsp['fields'])
@webapi_test_template
def test_post_invalid_email(self):
"""Testing the POST <URL> API for an invalid e-mail address"""
self.client.login(username='admin', password='admin')
rsp = self.api_post(
get_user_list_url(),
{
'username': 'username',
'password': 'password',
'email': 'invalid e-mail',
},
expected_status=400)
self.assertIn('stat', rsp)
self.assertEqual(rsp['stat'], 'fail')
self.assertIn('fields', rsp)
self.assertIn('email', rsp['fields'])
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(AvatarServicesTestMixin, BaseWebAPITestCase):
"""Testing the UserResource item API tests."""
fixtures = ['test_users']
sample_api_url = 'users/<username>/'
resource = resources.user
def setUp(self):
super(ResourceItemTests, self).setUp()
avatar_services.enable_service(GravatarService, save=False)
def setup_http_not_allowed_item_test(self, user):
return get_user_item_url(user.username)
def compare_item(self, item_rsp, user):
self.assertEqual(item_rsp['id'], user.pk)
self.assertEqual(item_rsp['username'], user.username)
self.assertEqual(item_rsp['first_name'], user.first_name)
self.assertEqual(item_rsp['last_name'], user.last_name)
self.assertEqual(item_rsp['email'], user.email)
# There's no simple way to test the specific URLs that are returned,
# but we can at least make sure everything we expect to be present is
# present.
self.assertIn('avatar_url', item_rsp)
self.assertIn('1x', item_rsp['avatar_urls'])
self.assertIn('2x', item_rsp['avatar_urls'])
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
return (get_user_item_url(user.username, local_site_name),
user_item_mimetype,
user)
def test_get_not_modified(self):
"""Testing the GET users/<username>/ API with Not Modified response"""
self._testHttpCaching(get_user_item_url('doc'),
check_etags=True)
@add_fixtures(['test_site'])
def test_get_with_site_and_profile_private(self):
"""Testing the GET users/<username>/ API
with a local site and private profile
"""
self._login_user(local_site=True)
username = 'admin'
user = User.objects.get(username=username)
profile, is_new = Profile.objects.get_or_create(user=user)
profile.is_private = True
profile.save()
rsp = self.api_get(get_user_item_url(username, self.local_site_name),
expected_mimetype=user_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['user']['username'], user.username)
self.assertNotIn('first_name', rsp['user'])
self.assertNotIn('last_name', rsp['user'])
self.assertNotIn('email', rsp['user'])
@add_fixtures(['test_site'])
def test_get_missing_user_with_site(self):
"""Testing the GET users/<username>/ API with a local site"""
self._login_user(local_site=True)
self.api_get(get_user_item_url('dopey', self.local_site_name),
expected_status=404)
@webapi_test_template
def test_get_with_profile_private_and_only_fields(self):
"""Testing the GET <URL> API with a private profile and ?only-fields=
"""
username = 'dopey'
user = User.objects.get(username=username)
profile, is_new = Profile.objects.get_or_create(user=user)
profile.is_private = True
profile.save()
rsp = self.api_get(
'%s?only-fields=username' % get_user_item_url(username),
expected_mimetype=user_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['user']['username'], user.username)
self.assertNotIn('first_name', rsp['user'])
self.assertNotIn('last_name', rsp['user'])
self.assertNotIn('email', rsp['user'])
@webapi_test_template
def test_get_inactive_user(self):
"""Testing the GET <URL> API for an inactive user"""
dopey = User.objects.get(username='dopey')
dopey.is_active = False
dopey.save()
rsp = self.api_get(get_user_item_url('dopey'),
expected_mimetype=user_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['user']['is_active'], False)
|
brennie/reviewboard
|
reviewboard/webapi/tests/test_user.py
|
Python
|
mit
| 14,393
|
import os
import re
from setuptools import find_packages, setup
def get_version(package):
"""
Return package version as listed in `__version__` in `version.py`.
"""
init_py = open(os.path.join(package, 'version.py')).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
setup(
name='docker-utils',
version=get_version('docker_utils'),
description='Utilities for Docker',
author='Andy McKay',
author_email='andym@mozilla.com',
license='BSD',
install_requires=['docker-compose', 'requests==2.5.3'],
packages=find_packages(),
entry_points={
'console_scripts': [
'docker-utils = docker_utils.entry:entry'
]
},
url='https://github.com/andymckay/docker-utils',
zip_safe=True,
)
|
kumar303/docker-utils
|
setup.py
|
Python
|
apache-2.0
| 798
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest import test
class HostsAdminV3Test(base.BaseV3ComputeAdminTest):
"""
Tests hosts API using admin privileges.
"""
_interface = 'json'
@classmethod
def setUpClass(cls):
super(HostsAdminV3Test, cls).setUpClass()
cls.client = cls.hosts_admin_client
@test.attr(type='gate')
def test_list_hosts(self):
resp, hosts = self.client.list_hosts()
self.assertEqual(200, resp.status)
self.assertTrue(len(hosts) >= 2, str(hosts))
@test.attr(type='gate')
def test_list_hosts_with_zone(self):
self.useFixture(fixtures.LockFixture('availability_zone'))
resp, hosts = self.client.list_hosts()
host = hosts[0]
zone_name = host['zone']
params = {'zone': zone_name}
resp, hosts = self.client.list_hosts(params)
self.assertEqual(200, resp.status)
self.assertTrue(len(hosts) >= 1)
self.assertIn(host, hosts)
@test.attr(type='gate')
def test_list_hosts_with_a_blank_zone(self):
# If send the request with a blank zone, the request will be successful
# and it will return all the hosts list
params = {'zone': ''}
resp, hosts = self.client.list_hosts(params)
self.assertNotEqual(0, len(hosts))
self.assertEqual(200, resp.status)
@test.attr(type='gate')
def test_list_hosts_with_nonexistent_zone(self):
# If send the request with a nonexistent zone, the request will be
# successful and no hosts will be retured
params = {'zone': 'xxx'}
resp, hosts = self.client.list_hosts(params)
self.assertEqual(0, len(hosts))
self.assertEqual(200, resp.status)
@test.attr(type='gate')
def test_show_host_detail(self):
resp, hosts = self.client.list_hosts()
self.assertEqual(200, resp.status)
hosts = [host for host in hosts if host['service'] == 'compute']
self.assertTrue(len(hosts) >= 1)
for host in hosts:
hostname = host['host_name']
resp, resources = self.client.show_host_detail(hostname)
self.assertEqual(200, resp.status)
self.assertTrue(len(resources) >= 1)
host_resource = resources[0]['resource']
self.assertIsNotNone(host_resource)
self.assertIsNotNone(host_resource['cpu'])
self.assertIsNotNone(host_resource['disk_gb'])
self.assertIsNotNone(host_resource['memory_mb'])
self.assertIsNotNone(host_resource['project'])
self.assertEqual(hostname, host_resource['host'])
|
ntymtsiv/tempest
|
tempest/api/compute/v3/admin/test_hosts.py
|
Python
|
apache-2.0
| 3,304
|
#!/usr/bin/python3
# Copyright 2019 by Jeff Woods
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import sys
# ### Entity Generator ###
# The EntityGenerator class serves as a container for Elements. When the
# create() method is called on this object, it walks through its list of
# children calling create() on each of them in turn.
# ### Elements ###
# There are three main types of elements. These elements are all derrived
# from the EntityElement class, and each may be added as a child to the
# EntityGenerator object (above). These Element types include:
#
# - A SimpleElement. This is an Element which creates nothing more than
# a simple value, such as a string or integer.
# - An ArrayElement. This Element is implemented as a list of homogenous
# items. The items INSIDE this container, which are created by the
# generator passed in the object initialization, may be of any supported
# Element type (Simple, Array, or Dict). The number of items generated
# inside the array will be repeated as described by the count_fn param.
# - A DictElement. This Element returns a dict populated with key/value
# pairs. This is the most sophisticated Element, supporting the nesting
# of child Elements (see below).
#
# Each type of Element supports one or more generators. A sample of
# generators might include gender (simple), dob (simple), name (dict),
# address (dict), and ssn (simple). An entity would almost certainly have
# more than one address, so we could employ an ArrayElement to enclose the
# address dict.
# ### Child Elements ###
# Outside of the EntityGenerator class, only Elements based on the DictElement
# class may have children. This makes sense because:
# - the entity to which the child is added must be a container of some
# sort. Of the three Element types, only Dict and Array qualify.
# - An ArrayElement gets filled with a consistent type of element (names,
# addresses, trades, etc).
class EntityGenerator(object):
'''
EntityGenerator maintains the structured relationships between initialized
EntityElements. It is responsible for initializing the root data element
and walking the tree of generator Entities. EntityElements are added to
the EntityGenerator much like nodes might be added to an XML document when
working with DOM.
'''
def __init__(self):
self.data = None # the current data object being built
# the fact that children is an array is VERY IMPORTANT. The order
# in which the elements are created must be guaranteed so that
# generators which reference other elements can be guaranteed that
# referenced values exists. As an example, assume that we have name
# and gender elements, with the generation of the name depending on
# the value selected for gender. It wouldn't do much good to generate
# the name before the gender.
self.children = [] # a list of child generators (populate data)
return
def addElement(self, elem, label = None):
if label is None:
label = elem.name
elem.setRoot(self)
x = (label, elem)
self.children.append(x)
return
def create(self, **kwargs):
self.data = {}
for elem in self.children:
e_nam = elem[0]
e_val = elem[1].create(**kwargs)
self.data[e_nam] = e_val
return self.data
def getValueByPath(self, path):
'''
Traverse the data element being generated and return the value
matching the given path. If the path cannot be parsed, we will assume
that the given value was intended to be a literal value. If we
encounter an array during our traversal, we will always select or
navigate through the last element in the list (assuming it is the
most recently generated).
'''
parts = path.split('/')
if len(parts[0]) == 0:
element = self.data
for i in parts[1:]: # skip the first (empty) part
if type(element) is list:
element = element[-1]
element = element[i]
return element
return path # can't parse? return the whole dang thing as literal
@staticmethod
def defaultDataPath():
p = os.path.dirname(__file__)
datapath = os.path.join(p, 'data')
return datapath
class EntityElement(object):
'''
Base class for all Entity Elements. An EntityElement may be added to
either an Entity (as an element on the root of the element) or
as a child of another EntityElement.
Constructor accepts the arguments:
name - the name of the element in generated output (dict key)
count - the number of times this element will be repeated. This
may be a callable (function) or an integer value.
generator - the generator class used to create data
params - parameters to be passed to each create() call. The list
of valid parameters is relative to the generator being used
root - a reference to EntityGenerator object used to create this
data entity.
'''
def __init__(self, name = None,
generator = None,
params = None,
root = None):
self.name = name
self.root = root
self.params = params
self.generator = None
self.mods = None
return
def setRoot(self, root):
self.root = root
@staticmethod
def count_const_fn(x):
return lambda: x
@staticmethod
def count_rand_fn(max, min=0):
if max <= min:
raise ValueError('min must be less than max in count_rand_fn()')
return lambda: int(((max - min) * random.random()) + min)
@staticmethod
def count_norm_fn(mean=0.0, stdev=1.0, integer=False):
if integer is False:
return lambda: random.normalvariate(mu=mean, sigma=stdev)
return lambda: int(random.normalvariate(mu=mean, sigma=stdev))
class ArrayElement(EntityElement):
'''
'''
def __init__(self, generator = None,
count_fn = None,
count = None,
**kwargs):
EntityElement.__init__(self, **kwargs)
# TODO - an ArrayElement may not have children
# count must be a callable function, but we can also accept an integer.
# If given an integer, we'll convert it using a lambda function which
# returns the appropriate value.
if not callable(count_fn):
if type(count) is int:
count_fn = lambda: count
else:
raise ValueError('Invalid type for element count')
self.count = count_fn
self.count_fn = count_fn
self.generator = generator
return
def create(self, **kwargs):
data = []
if self.count_fn is None: return data
c = self.count_fn()
while c > 0:
e = self.generator.create(root = data)
data.append(e)
c -= 1
return data
class DictElement(EntityElement):
'''
A DictElement may have children of any type.
'''
def __init__(self, **kwargs):
EntityElement.__init__(self, **kwargs)
self.children = None
return
def addElement(self, elem, label = None):
if not isinstance(elem, EntityElement):
raise ValueError('element not EntityElement type in addElement')
if label is None:
label = elem.name
# the fact that children is an array is VERY IMPORTANT. The order
# in which the elements are created must be guaranteed so that
# parameters which reference other elements can be guaranteed that
# referenced values exists.
self.children = []
elem.setRoot(self.root)
x = (label, elem)
self.children.append(x)
return
def addChildren(self, data, **kwargs):
if self.children is None: return None
for child in self.children:
enam = child[0]
egen = child[1]
data[enam] = egen.create()
return
class SimpleElement(EntityElement):
'''
A SimpleElement cannot have children.
'''
def __init__(self, **kwargs):
EntityElement.__init__(self, **kwargs)
# TODO - a SimpleElement may not have children
return
def create(self, **kwargs):
return
|
jcwoods/datagen
|
datagen/entitygenerator.py
|
Python
|
apache-2.0
| 9,231
|
"""
Django settings for drf_bench project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGE THIS!!!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'drf_bench.core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'drf_bench.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'drf_bench.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
xordoquy/django-rest-framework-benchmark
|
drf_bench/settings.py
|
Python
|
mit
| 2,632
|
# pylint: disable=E1101
import time
import itertools as it
import numpy as np
from bl.core.io import MessageStreamWriter
import bl.core.gt.messages.SnpCall as SnpCall
import bl.vl.utils as vlu
from kb_object_creator import KBObjectCreator
from bl.vl.kb.drivers.omero.proxy_core import convert_from_numpy
from bl.vl.kb.drivers.omero.genomics import MARKER_LABEL_SIZE, MARKER_MASK_SIZE
PAYLOAD_MSG_TYPE = 'core.gt.messages.SampleSnpCall'
MSET_TABLE_COLS_DTYPE = [('label', '|S%d' % MARKER_LABEL_SIZE),
('index', 'i8'),
('mask', '|S%d' % MARKER_MASK_SIZE),
('permutation', '?')]
class UTCommon(KBObjectCreator):
def create_markers_set_from_stream(self, N):
label = 'ams-%f' % time.time()
maker, model, release = 'FOO', 'FOO1', '%f' % time.time()
rows = np.array([('M%d' % i, i, 'AC[A/G]GT', False)
for i in xrange(N)],
dtype=MSET_TABLE_COLS_DTYPE)
def stream(rows):
dtype = rows.dtype
for r in rows:
yield dict([(k, convert_from_numpy(r[k])) for k in dtype.names])
mset = self.kb.genomics.create_markers_array(
label, maker, model, release, stream(rows), self.action
)
return mset, rows
def create_markers_set(self, N):
label = 'ams-%f' % time.time()
maker, model, release = 'FOO', 'FOO1', '%f' % time.time()
rows = np.array([('M%d' % i, i, 'AC[A/G]GT', False)
for i in xrange(N)],
dtype=MSET_TABLE_COLS_DTYPE)
mset = self.kb.genomics.create_markers_array(
label, maker, model, release, rows, self.action
)
return mset, rows
def create_reference_genome(self, action):
conf = {'nChroms' : 10,
'maker': vlu.make_random_str(),
'model': vlu.make_random_str(),
'release' : vlu.make_random_str(),
'label': vlu.make_random_str(),
'status' : self.kb.DataSampleStatus.USABLE,
'action': action}
reference_genome = self.kb.factory.create(self.kb.ReferenceGenome,
conf).save()
return reference_genome
@staticmethod
def make_fake_data(n, add_nan=False):
probs = 0.5 * np.cast[np.float32](np.random.random((2, n)))
confs = np.cast[np.float32](np.random.random(n))
if add_nan:
rand_indices = np.random.random_integers(
0, len(probs[0]) - 1, len(probs[0]) / 2
)
for x in set(rand_indices):
probs[0][x] = np.nan
probs[1][x] = np.nan
return probs, confs
@staticmethod
def make_fake_ssc(mset, labels, sample_id, probs, conf, fn):
header = {'markers_set' : mset.label, 'sample_id': sample_id}
stream = MessageStreamWriter(fn, PAYLOAD_MSG_TYPE, header)
for l, p_AA, p_BB, c in it.izip(labels, probs[0], probs[1], conf):
p_AB = 1.0 - (p_AA + p_BB)
w_aa, w_ab, w_bb = p_AA, p_AB, p_BB
stream.write({
'sample_id': sample_id,
'snp_id': l,
'call': SnpCall.NOCALL, # we will not test this anyway
'confidence': float(c),
'sig_A': float(p_AA),
'sig_B': float(p_BB),
'w_AA': float(w_aa),
'w_AB': float(w_ab),
'w_BB': float(w_bb),
})
stream.close()
def create_data_sample(self, mset, label, action):
conf = {
'label': label,
'status': self.kb.DataSampleStatus.USABLE,
'action': action,
'snpMarkersSet': mset,
}
data_sample = self.kb.factory.create(self.kb.GenotypeDataSample,
conf).save()
return data_sample
def create_data_object(self, data_sample, action, add_nan=False):
n = self.kb.genomics.get_number_of_markers(data_sample.snpMarkersSet)
probs, confs = self.make_fake_data(n, add_nan)
do = self.kb.genomics.add_gdo_data_object(action,
data_sample, probs, confs).save()
return do, probs, confs
def create_variant_call_support(self, mset, reference_genome, action,
chromosome=1):
VariantCallSupport = self.kb.VariantCallSupport
N = self.kb.genomics.get_number_of_markers(mset)
mset_vid = mset.id
nodes = np.array([(chromosome, 10 * i) for i in xrange(N)],
dtype=VariantCallSupport.NODES_DTYPE)
field = np.array([(i, mset_vid, i) for i in range(len(nodes))],
dtype=VariantCallSupport.ATTR_ORIGIN_DTYPE)
label = vlu.make_random_str()
conf = {'referenceGenome' : reference_genome,
'label' : label,
'status' : self.kb.DataSampleStatus.USABLE,
'action': action}
vcs = self.kb.factory.create(VariantCallSupport, conf)
vcs.define_support(nodes)
vcs.define_field('origin', field)
vcs.save()
return vcs
|
crs4/omero.biobank
|
test/genotype/common.py
|
Python
|
gpl-2.0
| 5,300
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import _
no_cache = True
def get_context():
from portal.utils import get_transaction_context
context = get_transaction_context("Sales Order", webnotes.form_dict.name)
modify_status(context.get("doc"))
context.update({
"parent_link": "orders",
"parent_title": "My Orders"
})
return context
def modify_status(doc):
doc.status = []
if 0 < doc.per_billed < 100:
doc.status.append(("label-warning", "icon-ok", _("Partially Billed")))
elif doc.per_billed == 100:
doc.status.append(("label-success", "icon-ok", _("Billed")))
if 0 < doc.per_delivered < 100:
doc.status.append(("label-warning", "icon-truck", _("Partially Delivered")))
elif doc.per_delivered == 100:
doc.status.append(("label-success", "icon-truck", _("Delivered")))
doc.status = " " + " ".join(('<span class="label %s"><i class="icon-fixed-width %s"></i> %s</span>' % s
for s in doc.status))
|
saurabh6790/test-med-app
|
selling/doctype/sales_order/templates/pages/order.py
|
Python
|
agpl-3.0
| 1,086
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Python -*-
"""
@file WiiRemoteTest.py
@brief Test Component
@date $Date$
"""
import sys
import time
sys.path.append(".")
# Import RTM module
import RTC
import OpenRTM_aist
import os
def cls():
os.system(['clear','cls'][os.name == 'nt'])
# Import Service implementation class
# <rtc-template block="service_impl">
# </rtc-template>
# Import Service stub modules
# <rtc-template block="consumer_import">
# </rtc-template>
# This module's spesification
# <rtc-template block="module_spec">
wiiremotetest_spec = ["implementation_id", "WiiRemoteTest",
"type_name", "WiiRemoteTest",
"description", "Test Component",
"version", "1.0.0",
"vendor", "Sugar Sweet Robotics",
"category", "Tutorial",
"activity_type", "DataFlowComponent",
"max_instance", "1",
"language", "Python",
"lang_type", "SCRIPT",
""]
# </rtc-template>
##
# @class WiiRemoteTest
# @brief Test Component
#
#
class WiiRemoteTest(OpenRTM_aist.DataFlowComponentBase):
##
# @brief constructor
# @param manager Maneger Object
#
def __init__(self, manager):
OpenRTM_aist.DataFlowComponentBase.__init__(self, manager)
self._d_buttons = RTC.TimedBooleanSeq(RTC.Time(0,0),[])
"""
"""
self._buttonsIn = OpenRTM_aist.InPort("buttons", self._d_buttons)
self._d_accel = RTC.TimedAngularAcceleration3D(RTC.Time(0,0),0)
"""
"""
self._accelIn = OpenRTM_aist.InPort("accel", self._d_accel)
self._d_orientation = RTC.TimedOrientation3D(RTC.Time(0,0),0)
"""
"""
self._orientationIn = OpenRTM_aist.InPort("orientation", self._d_orientation)
self._d_cursor = RTC.TimedPoint2D(RTC.Time(0,0),0)
"""
"""
self._cursorIn = OpenRTM_aist.InPort("cursor", self._d_cursor)
self._d_distance = RTC.TimedLong(RTC.Time(0,0),0)
"""
id
"""
self._distanceIn = OpenRTM_aist.InPort("distance", self._d_distance)
self._d_ir = RTC.TimedPoint2D(RTC.Time(0,0),0)
"""
"""
self._irIn = OpenRTM_aist.InPort("ir", self._d_ir)
self._d_rumble = RTC.TimedBoolean(RTC.Time(0,0),0)
"""
"""
self._rumbleOut = OpenRTM_aist.OutPort("rumble", self._d_rumble)
self._d_leds = RTC.TimedBooleanSeq(RTC.Time(0,0),[])
"""
"""
self._ledsOut = OpenRTM_aist.OutPort("leds", self._d_leds)
"""
self.buttons = [False] * 12
self.cursor = [0, 0]
self.ir = [0, 0]
self.distance = 0
self.accel = [0, 0, 0]
self.orientation = [0, 0, 0]
"""
self.rumble = False
self.leds = [False] * 4
# initialize of configuration-data.
# <rtc-template block="init_conf_param">
# </rtc-template>
##
#
# The initialize action (on CREATED->ALIVE transition)
# formaer rtc_init_entry()
#
# @return RTC::ReturnCode_t
#
#
def onInitialize(self):
# Bind variables and configuration variable
# Set InPort buffers
self.addInPort("buttons",self._buttonsIn)
self.addInPort("accel",self._accelIn)
self.addInPort("orientation",self._orientationIn)
self.addInPort("cursor",self._cursorIn)
self.addInPort("distance",self._distanceIn)
self.addInPort("ir",self._irIn)
# Set OutPort buffers
self.addOutPort("rumble",self._rumbleOut)
self.addOutPort("leds",self._ledsOut)
# Set service provider to Ports
# Set service consumers to Ports
# Set CORBA Service Ports
return RTC.RTC_OK
# ##
# #
# # The finalize action (on ALIVE->END transition)
# # formaer rtc_exiting_entry()
# #
# # @return RTC::ReturnCode_t
#
# #
#def onFinalize(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The startup action when ExecutionContext startup
# # former rtc_starting_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onStartup(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The shutdown action when ExecutionContext stop
# # former rtc_stopping_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onShutdown(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The activated action (Active state entry action)
# # former rtc_active_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
def onActivated(self, ec_id):
self.buttons = [False] * 11
self.accel = RTC.AngularAcceleration3D(0, 0, 0)
self.orientation = RTC.Orientation3D(0, 0, 0)
self.cursor = RTC.Point2D(0, 0)
self.distance = 0
self.ir = RTC.Point2D(0, 0)
return RTC.RTC_OK
# ##
# #
# # The deactivated action (Active state exit action)
# # former rtc_active_exit()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onDeactivated(self, ec_id):
#
# return RTC.RTC_OK
##
#
# The execution action that is invoked periodically
# former rtc_active_do()
#
# @param ec_id target ExecutionContext Id
#
# @return RTC::ReturnCode_t
#
#
def onExecute(self, ec_id):
if self._buttonsIn.isNew():
data = self._buttonsIn.read()
self.buttons = data.data
if self._accelIn.isNew():
data = self._accelIn.read()
self.accel = data.data
if self._orientationIn.isNew():
data = self._orientationIn.read()
self.orientation = data.data
if self._irIn.isNew():
data = self._irIn.read()
self.ir = data.data
if self._cursorIn.isNew():
data = self._cursorIn.read()
self.cursor = data.data
sys.stdout.write('[WiiRemoteTest] buttons:')
for b in self.buttons:
sys.stdout.write('T' if b else 'F')
sys.stdout.write('\n')
sys.stdout.write('[WiiRemoteTest] accel: aax=%3.3f, aay=%3.3f, aaz=%3.3f\n' % (self.accel.aax, self.accel.aay, self.accel.aaz))
sys.stdout.write('[WiiRemoteTest] orientation: r=%3.3f, p=%3.3f, y=%3.3f\n' % (self.orientation.r, self.orientation.p, self.orientation.y))
sys.stdout.write('[WiiRemoteTest] cursor: x=%3.3f, y=%3.3f\n' % (self.cursor.x, self.cursor.y))
sys.stdout.write('[WiiRemoteTest] distance: d=%3.3f\n' % self.distance)
sys.stdout.write('[WiiRemoteTest] ir: x=%3.3f, y=%3.3f\n' % (self.ir.x, self.ir.y))
return RTC.RTC_OK
# ##
# #
# # The aborting action when main logic error occurred.
# # former rtc_aborting_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onAborting(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The error action in ERROR state
# # former rtc_error_do()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onError(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The reset action that is invoked resetting
# # This is same but different the former rtc_init_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onReset(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The state update action that is invoked after onExecute() action
# # no corresponding operation exists in OpenRTm-aist-0.2.0
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onStateUpdate(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The action that is invoked when execution context's rate is changed
# # no corresponding operation exists in OpenRTm-aist-0.2.0
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onRateChanged(self, ec_id):
#
# return RTC.RTC_OK
def WiiRemoteTestInit(manager):
profile = OpenRTM_aist.Properties(defaults_str=wiiremotetest_spec)
manager.registerFactory(profile,
WiiRemoteTest,
OpenRTM_aist.Delete)
def MyModuleInit(manager):
WiiRemoteTestInit(manager)
# Create a component
comp = manager.createComponent("WiiRemoteTest")
def main():
mgr = OpenRTM_aist.Manager.init(sys.argv)
mgr.setModuleInitProc(MyModuleInit)
mgr.activateManager()
mgr.runManager()
if __name__ == "__main__":
main()
|
sugarsweetrobotics/WiiRemoteTest
|
WiiRemoteTest.py
|
Python
|
gpl-3.0
| 8,534
|
#!/usr/bin/env python3
import argparse
import sys
import numpy as np
#from translate.evaluation import corpus_bleu, corpus_ter
parser = argparse.ArgumentParser()
# parser.add_argument('source1')
# parser.add_argument('source2')
# parser.add_argument('target')
#
# parser.add_argument('--bleu', action='store_true')
# parser.add_argument('--max-size', type=int)
# parser.add_argument('--case-insensitive', '-i', action='store_true')
#
# parser.add_argument('--draws', type=int, default=1000)
# parser.add_argument('--sample-size', type=int, default=0)
# parser.add_argument('-p', type=float, default=0.05)
parser.add_argument('--set-type')
parser.add_argument('--set-id')
args = parser.parse_args()
if args.set_type is not None:
if args.set_id is None:
args.set_id = 'dummy'
print('<{} setid="{}" srclang="any" trglang="any">'.format(args.set_type, args.set_id))
print('<doc docid="dummy" sysid="{}">'.format(args.set_type))
for i, line in enumerate(sys.stdin, 1):
print('<seg id="{}">{}</seg>'.format(i, line.strip()))
print('</doc>')
if args.set_type is not None:
print('</{}>'.format(args.set_type))
|
eske/seq2seq
|
scripts/post_editing/to-sgm.py
|
Python
|
apache-2.0
| 1,133
|
"""Unit test for trace websocket API.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import jsonschema
import mock
from treadmill.trace.app import events
from treadmill.websocket.api import trace
class WSTraceAPITest(unittest.TestCase):
"""Tests for trace websocket API."""
def setUp(self):
self.api = trace.TraceAPI()
def test_subscribe(self):
"""Test subscription registration."""
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': 'foo.bar#1234'}),
[('/trace/*', 'foo.bar#1234,*')]
)
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': 'foo.bar'}),
[('/trace/*', 'foo.bar#*,*')]
)
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': 'foo.bar*'}),
[('/trace/*', 'foo.bar*#*,*')]
)
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': 'foo.*'}),
[('/trace/*', 'foo.*#*,*')]
)
with self.assertRaisesRegex(jsonschema.exceptions.ValidationError,
r"'\*' is not valid"):
self.api.subscribe({'topic': '/trace',
'filter': '*'})
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': '*@foo.*'}),
[('/trace/*', '*@foo.*#*,*')]
)
self.assertEqual(
self.api.subscribe({'topic': '/trace',
'filter': 'foo@bar.baz#1234'}),
[('/trace/*', 'foo@bar.baz#1234,*')]
)
with self.assertRaisesRegex(jsonschema.exceptions.ValidationError,
r"'\*@\*' is not valid"):
self.api.subscribe({'topic': '/trace',
'filter': '*@*'})
with self.assertRaisesRegex(jsonschema.exceptions.ValidationError,
r"'\*@\*\.\*' is not valid"):
self.api.subscribe({'topic': '/trace',
'filter': '*@*.*'})
self.assertEqual(
self.api.subscribe({
'sub-id': 'aae32dcf-4fbd-4831-b7c7-49d5afac57fa',
'topic': '/trace',
'filter': 'foo.bar#1234'
}),
[('/trace/*', 'foo.bar#1234,*')]
)
with self.assertRaisesRegex(jsonschema.exceptions.ValidationError,
"'invalid' does not match"):
self.api.subscribe({'sub-id': 'invalid',
'topic': '/trace',
'filter': 'foo.bar#1234'})
@mock.patch('treadmill.trace.app.events.AppTraceEvent',
mock.Mock(set_spec=True))
def test_on_event(self):
"""Tests payload generation."""
mock_event = events.AppTraceEvent.from_data.return_value
self.assertEqual(
self.api.on_event(
'/trace/00C2/foo.bar#1234,123.04,b,c,d',
None,
'xxx'
),
{
'topic': '/trace',
'event': mock_event.to_dict.return_value
}
)
events.AppTraceEvent.from_data.assert_called_with(
timestamp=123.04,
source='b',
instanceid='foo.bar#1234',
event_type='c',
event_data='d',
payload='xxx'
)
if __name__ == '__main__':
unittest.main()
|
Morgan-Stanley/treadmill
|
lib/python/treadmill/tests/websocket/api/trace_test.py
|
Python
|
apache-2.0
| 3,800
|
import time
import uuid
from datetime import timedelta
from golem.core.keysauth import EllipticalKeysAuth
from mock import Mock, patch
from golem.core.common import get_current_time, timeout_to_deadline
from golem.network.p2p.node import Node
from golem.task.taskbase import Task, TaskHeader, ComputeTaskDef, TaskEventListener
from golem.task.taskclient import TaskClient
from golem.task.taskmanager import TaskManager, logger
from golem.task.taskstate import SubtaskStatus, SubtaskState, TaskState, TaskStatus, ComputerState
from golem.tools.assertlogs import LogTestCase
from golem.tools.testdirfixture import TestDirFixture
class TestTaskManager(LogTestCase, TestDirFixture):
def setUp(self):
super(TestTaskManager, self).setUp()
self.tm = TaskManager("ABC", Node(), Mock(), root_path=self.path)
self.tm.key_id = "KEYID"
self.tm.listen_address = "10.10.10.10"
self.tm.listen_port = 2222
self.addr_return = ("10.10.10.10", 1111, "Full NAT")
@staticmethod
def _get_task_mock(task_id="xyz", subtask_id="xxyyzz", timeout=120, subtask_timeout=120):
task_mock = Mock()
task_mock.header.task_id = task_id
task_mock.header.resource_size = 2 * 1024
task_mock.header.estimated_memory = 3 * 1024
task_mock.header.max_price = 10000
task_mock.header.deadline = timeout_to_deadline(timeout)
task_mock.header.subtask_timeout = subtask_timeout
extra_data = Mock()
extra_data.ctd = ComputeTaskDef()
extra_data.ctd.task_id = task_id
extra_data.ctd.subtask_id = subtask_id
extra_data.ctd.environment = "DEFAULT"
extra_data.ctd.deadline = timeout_to_deadline(subtask_timeout)
extra_data.should_wait = False
task_mock.query_extra_data.return_value = extra_data
task_mock.get_progress.return_value = 0.3
return task_mock
@patch("golem.task.taskmanager.get_external_address")
def test_get_next_subtask(self, mock_addr):
mock_addr.return_value = self.addr_return
assert isinstance(self.tm, TaskManager)
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert subtask is None
assert wrong_task
task_mock = self._get_task_mock()
# Task's initial state is set to 'waiting' (found in activeStatus)
self.tm.add_new_task(task_mock)
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert subtask is not None
assert not wrong_task
self.tm.tasks_states["xyz"].status = self.tm.activeStatus[0]
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 1, 10, 2, "10.10.10.10")
assert subtask is None
assert not wrong_task
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 2, 2, "10.10.10.10")
assert subtask is None
assert not wrong_task
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert subtask is None
assert not wrong_task
task_mock.query_extra_data.return_value.ctd.subtask_id = "xyzxyz"
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert isinstance(subtask, ComputeTaskDef)
assert not wrong_task
assert self.tm.tasks_states["xyz"].subtask_states[subtask.subtask_id].computer.price == 10
task_mock.query_extra_data.return_value.ctd.subtask_id = "xyzxyz2"
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 20000, 5, 10, 2, "10.10.10.10")
assert subtask is None
assert not wrong_task
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert isinstance(subtask, ComputeTaskDef)
assert not wrong_task
del self.tm.subtask2task_mapping["xyzxyz2"]
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert subtask is None
del self.tm.tasks_states["xyz"].subtask_states["xyzxyz2"]
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert isinstance(subtask, ComputeTaskDef)
task_mock.query_extra_data.return_value.ctd.subtask_id = None
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
assert subtask is None
self.tm.delete_task("xyz")
assert self.tm.tasks.get("xyz") is None
assert self.tm.tasks_states.get("xyz") is None
@patch("golem.task.taskmanager.get_external_address")
def test_get_and_set_value(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING") as l:
self.tm.set_value("xyz", "xxyyzz", 13)
assert any("not my task" in log for log in l.output)
with self.assertLogs(logger, level="WARNING"):
self.tm.get_value("xxyyzz")
with self.assertLogs(logger, level="WARNING"):
self.tm.set_computation_time("xxyyzz", 12)
task_mock = self._get_task_mock()
self.tm.add_new_task(task_mock)
with self.assertLogs(logger, level="WARNING") as l:
self.tm.set_value("xyz", "xxyyzz", 13)
assert any("not my subtask" in log for log in l.output)
self.tm.tasks_states["xyz"].status = self.tm.activeStatus[0]
subtask, wrong_task, wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1000, 10, 5, 10, 2, "10.10.10.10")
self.assertIsInstance(subtask, ComputeTaskDef)
self.assertEqual(wrong_task, False)
self.tm.set_value("xyz", "xxyyzz", 13)
self.assertEqual(self.tm.tasks_states["xyz"].subtask_states["xxyyzz"].value, 13)
self.assertEqual(self.tm.get_value("xxyyzz"), 13)
self.tm.set_computation_time("xxyyzz", 3601)
self.assertEqual(self.tm.tasks_states["xyz"].subtask_states["xxyyzz"].value, 11)
def test_change_config(self):
self.assertTrue(self.tm.use_distributed_resources)
self.tm.change_config(self.path, False)
self.assertFalse(self.tm.use_distributed_resources)
@patch("golem.task.taskmanager.get_external_address")
def test_get_resources(self, mock_addr):
mock_addr.return_value = self.addr_return
task_id = "xyz"
resources = ['first', 'second']
def get_resources(*args):
return resources
task_mock = self._get_task_mock()
task_mock.get_resources = get_resources
self.tm.add_new_task(task_mock)
assert self.tm.get_resources(task_id, task_mock.header) is resources
assert not self.tm.get_resources(task_id + "2", task_mock.header)
@patch("golem.task.taskmanager.get_external_address")
def test_computed_task_received(self, mock_addr):
mock_addr.return_value = self.addr_return
self.tm.listeners.append(Mock())
th = TaskHeader("ABC", "xyz", "10.10.10.10", 1024, "key_id", "DEFAULT")
th.max_price = 50
class TestTask(Task):
def __init__(self, header, src_code, subtasks_id, verify_subtasks):
super(TestTask, self).__init__(header, src_code)
self.finished = {k: False for k in subtasks_id}
self.restarted = {k: False for k in subtasks_id}
self.verify_subtasks = verify_subtasks
self.subtasks_id = subtasks_id
def query_extra_data(self, perf_index, num_cores=1, node_id=None, node_name=None):
ctd = ComputeTaskDef()
ctd.task_id = self.header.task_id
ctd.subtask_id = self.subtasks_id[0]
ctd.environment = "DEFAULT"
ctd.should_wait = False
self.subtasks_id = self.subtasks_id[1:]
e = self.ExtraData(False, ctd)
return e
def needs_computation(self):
return sum(self.finished.values()) != len(self.finished)
def computation_finished(self, subtask_id, task_result, result_type=0):
if not self.restarted[subtask_id]:
self.finished[subtask_id] = True
def verify_subtask(self, subtask_id):
return self.verify_subtasks[subtask_id]
def finished_computation(self):
return not self.needs_computation()
def verify_task(self):
return self.finished_computation()
def restart_subtask(self, subtask_id):
self.restarted[subtask_id] = True
t = TestTask(th, "print 'Hello world'", ["xxyyzz"], verify_subtasks={"xxyyzz": True})
self.tm.add_new_task(t)
ctd, wrong_task, should_wait = self.tm.get_next_subtask("DEF", "DEF", "xyz", 1030, 10, 10000, 10000, 10000)
assert not wrong_task
assert ctd.subtask_id == "xxyyzz"
assert not should_wait
task_id = self.tm.subtask2task_mapping["xxyyzz"]
assert task_id == "xyz"
ss = self.tm.tasks_states["xyz"].subtask_states["xxyyzz"]
assert ss.subtask_status == SubtaskStatus.starting
assert self.tm.computed_task_received("xxyyzz", [], 0)
assert t.finished["xxyyzz"]
assert ss.subtask_progress == 1.0
assert ss.subtask_rem_time == 0.0
assert ss.subtask_status == SubtaskStatus.finished
assert self.tm.tasks_states["xyz"].status == TaskStatus.finished
th.task_id = "abc"
t2 = TestTask(th, "print 'Hello world'", ["aabbcc"], verify_subtasks={"aabbcc": True})
self.tm.add_new_task(t2)
ctd, wrong_task, should_wait = self.tm.get_next_subtask("DEF", "DEF", "abc", 1030, 10, 10000, 10000, 10000)
assert not wrong_task
assert ctd.subtask_id == "aabbcc"
assert not should_wait
self.tm.restart_subtask("aabbcc")
ss = self.tm.tasks_states["abc"].subtask_states["aabbcc"]
assert ss.subtask_status == SubtaskStatus.restarted
assert not self.tm.computed_task_received("aabbcc", [], 0)
assert ss.subtask_progress == 0.0
assert ss.subtask_status == SubtaskStatus.restarted
assert not t2.finished["aabbcc"]
th.task_id = "qwe"
t3 = TestTask(th, "print 'Hello world!", ["qqwwee", "rrttyy"], {"qqwwee": True, "rrttyy": True})
self.tm.add_new_task(t3)
ctd, wrong_task, should_wait = self.tm.get_next_subtask("DEF", "DEF", "qwe", 1030, 10, 10000, 10000, 10000)
assert not wrong_task
assert ctd.subtask_id == "qqwwee"
self.tm.task_computation_failure("qqwwee", "something went wrong")
ss = self.tm.tasks_states["qwe"].subtask_states["qqwwee"]
assert ss.subtask_status == SubtaskStatus.failure
assert ss.subtask_progress == 1.0
assert ss.subtask_rem_time == 0.0
assert ss.stderr == "something went wrong"
with self.assertLogs(logger, level="WARNING"):
assert not self.tm.computed_task_received("qqwwee", [], 0)
th.task_id = "task4"
t2 = TestTask(th, "print 'Hello world!", ["ttt4", "sss4"], {'ttt4': False, 'sss4': True})
self.tm.add_new_task(t2)
ctd, wrong_task, should_wait = self.tm.get_next_subtask("DEF", "DEF", "task4", 1000, 10, 5, 10, 2,
"10.10.10.10")
assert not wrong_task
assert ctd.subtask_id == "ttt4"
assert not self.tm.computed_task_received("ttt4", [], 0)
self.tm.listeners[0].task_status_updated.assert_called_with("task4")
assert self.tm.tasks_states["task4"].subtask_states["ttt4"].subtask_status == SubtaskStatus.failure
prev_call = self.tm.listeners[0].task_status_updated.call_count
assert not self.tm.computed_task_received("ttt4", [], 0)
assert self.tm.listeners[0].task_status_updated.call_count == prev_call + 1
ctd, wrong_task, should_wait = self.tm.get_next_subtask("DEF", "DEF", "task4", 1000, 10, 5, 10, 2, "10.10.10.10")
assert not wrong_task
assert ctd.subtask_id == "sss4"
assert self.tm.computed_task_received("sss4", [], 0)
@patch("golem.task.taskmanager.get_external_address")
def test_task_result_incoming(self, mock_addr):
mock_addr.return_value = self.addr_return
subtask_id = "xxyyzz"
node_id = 'node'
task_mock = self._get_task_mock()
task_mock.counting_nodes = {}
self.tm.task_result_incoming(subtask_id)
assert not task_mock.result_incoming.called
task_mock.subtasks_given = dict()
task_mock.subtasks_given[subtask_id] = TaskClient(node_id)
subtask_state = SubtaskState()
subtask_state.status = SubtaskStatus.waiting
subtask_state.subtask_id = subtask_id
subtask_state.computer = Mock()
subtask_state.computer.node_id = node_id
task_state = TaskState()
task_state.computer = Mock()
task_state.subtask_states[subtask_id] = subtask_state
self.tm.add_new_task(task_mock)
self.tm.subtask2task_mapping[subtask_id] = "xyz"
self.tm.tasks_states["xyz"] = task_state
self.tm.task_result_incoming(subtask_id)
assert task_mock.result_incoming.called
task_mock.result_incoming.called = False
self.tm.tasks = []
self.tm.task_result_incoming(subtask_id)
assert not task_mock.result_incoming.called
@patch("golem.task.taskmanager.get_external_address")
def test_get_subtasks(self, mock_addr):
mock_addr.return_value = self.addr_return
assert self.tm.get_subtasks("Task 1") is None
task_mock = self._get_task_mock()
self.tm.add_new_task(task_mock)
task_mock2 = self._get_task_mock("TASK 1", "SUBTASK 1")
self.tm.add_new_task(task_mock2)
assert self.tm.get_subtasks("xyz") == []
assert self.tm.get_subtasks("TASK 1") == []
self.tm.get_next_subtask("NODEID", "NODENAME", "xyz", 1000, 100, 10000, 10000)
self.tm.get_next_subtask("NODEID", "NODENAME", "TASK 1", 1000, 100, 10000, 10000)
task_mock.query_extra_data.return_value.ctd.subtask_id = "aabbcc"
self.tm.get_next_subtask("NODEID2", "NODENAME", "xyz", 1000, 100, 10000, 10000)
task_mock.query_extra_data.return_value.ctd.subtask_id = "ddeeff"
self.tm.get_next_subtask("NODEID3", "NODENAME", "xyz", 1000, 100, 10000, 10000)
assert set(self.tm.get_subtasks("xyz")) == {"xxyyzz", "aabbcc", "ddeeff"}
assert self.tm.get_subtasks("TASK 1") == ["SUBTASK 1"]
@patch("golem.task.taskmanager.get_external_address")
def test_resource_send(self, mock_addr):
mock_addr.return_value = self.addr_return
self.tm.listeners.append(Mock())
t = Task(TaskHeader("ABC", "xyz", "10.10.10.10", 1023, "abcde",
"DEFAULT"), "print 'hello world'")
self.tm.add_new_task(t)
self.tm.resources_send("xyz")
assert self.tm.listeners[0].notice_task_updated.called_with("xyz")
@patch("golem.task.taskmanager.get_external_address")
def test_check_timeouts(self, mock_addr):
mock_addr.return_value = self.addr_return
self.tm.listeners.append(Mock())
# Task with timeout
t = self._get_task_mock(timeout=0.1)
self.tm.add_new_task(t)
assert self.tm.tasks_states["xyz"].status in self.tm.activeStatus
time.sleep(0.1)
self.tm.check_timeouts()
assert self.tm.tasks_states['xyz'].status == TaskStatus.timeout
# Task with subtask timeout
t2 = self._get_task_mock(task_id="abc", subtask_id="aabbcc", timeout=10, subtask_timeout=0.1)
self.tm.add_new_task(t2)
self.tm.get_next_subtask("ABC", "ABC", "abc", 1000, 10, 5, 10, 2, "10.10.10.10")
time.sleep(0.1)
self.tm.check_timeouts()
assert self.tm.tasks_states["abc"].status == TaskStatus.waiting
assert self.tm.tasks_states["abc"].subtask_states["aabbcc"].subtask_status == SubtaskStatus.failure
# Task with task and subtask timeout
t3 = self._get_task_mock(task_id="qwe", subtask_id="qwerty", timeout=0.1, subtask_timeout=0.1)
self.tm.add_new_task(t3)
self.tm.get_next_subtask("ABC", "ABC", "qwe", 1000, 10, 5, 10, 2, "10.10.10.10")
time.sleep(0.1)
self.tm.check_timeouts()
assert self.tm.tasks_states["qwe"].status == TaskStatus.timeout
assert self.tm.tasks_states["qwe"].subtask_states["qwerty"].subtask_status == SubtaskStatus.failure
def test_task_event_listener(self):
self.tm.notice_task_updated = Mock()
assert isinstance(self.tm, TaskEventListener)
self.tm.notify_update_task("xyz")
self.tm.notice_task_updated.assert_called_with("xyz")
@patch("golem.task.taskmanager.get_external_address")
def test_query_task_state(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING"):
assert self.tm.query_task_state("xyz") is None
t = self._get_task_mock()
self.tm.add_new_task(t)
with self.assertNoLogs(logger, level="WARNING"):
ts = self.tm.query_task_state("xyz")
assert ts is not None
assert ts.progress == 0.3
@patch("golem.task.taskmanager.get_external_address")
def test_resume_task(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING"):
assert self.tm.resume_task("xyz") is None
t = self._get_task_mock()
self.tm.add_new_task(t)
with self.assertNoLogs(logger, level="WARNING"):
self.tm.resume_task("xyz")
assert self.tm.tasks["xyz"].task_status == TaskStatus.starting
assert self.tm.tasks_states["xyz"].status == TaskStatus.starting
@patch("golem.task.taskmanager.get_external_address")
def test_restart_task(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING"):
assert self.tm.restart_task("xyz") is None
t = self._get_task_mock()
self.tm.add_new_task(t)
with self.assertNoLogs(logger, level="WARNING"):
self.tm.restart_task("xyz")
assert self.tm.tasks["xyz"].task_status == TaskStatus.waiting
assert self.tm.tasks_states["xyz"].status == TaskStatus.waiting
self.tm.get_next_subtask("NODEID", "NODENAME", "xyz", 1000, 100, 10000, 10000)
t.query_extra_data.return_value.ctd.subtask_id = "xxyyzz2"
self.tm.get_next_subtask("NODEID2", "NODENAME2", "xyz", 1000, 100, 10000, 10000)
assert len(self.tm.tasks_states["xyz"].subtask_states) == 2
with self.assertNoLogs(logger, level="WARNING"):
self.tm.restart_task("xyz")
assert self.tm.tasks["xyz"].task_status == TaskStatus.waiting
assert self.tm.tasks_states["xyz"].status == TaskStatus.waiting
assert len(self.tm.tasks_states["xyz"].subtask_states) == 2
for ss in self.tm.tasks_states["xyz"].subtask_states.values():
assert ss.subtask_status == SubtaskStatus.restarted
@patch("golem.task.taskmanager.get_external_address")
def test_abort_task(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING"):
assert self.tm.abort_task("xyz") is None
t = self._get_task_mock()
self.tm.add_new_task(t)
with self.assertNoLogs(logger, level="WARNING"):
self.tm.abort_task("xyz")
assert self.tm.tasks["xyz"].task_status == TaskStatus.aborted
assert self.tm.tasks_states["xyz"].status == TaskStatus.aborted
@patch("golem.task.taskmanager.get_external_address")
def test_pause_task(self, mock_addr):
mock_addr.return_value = self.addr_return
with self.assertLogs(logger, level="WARNING"):
assert self.tm.pause_task("xyz") is None
t = self._get_task_mock()
self.tm.add_new_task(t)
with self.assertNoLogs(logger, level="WARNING"):
self.tm.pause_task("xyz")
assert self.tm.tasks["xyz"].task_status == TaskStatus.paused
assert self.tm.tasks_states["xyz"].status == TaskStatus.paused
@patch('golem.network.p2p.node.Node.collect_network_info')
def test_get_tasks(self, _):
tm = TaskManager("ABC", Node(), Mock(), root_path=self.path)
count = 3
tasks, tasks_states, task_id, subtask_id = self.__build_tasks(count)
tm.tasks = tasks
tm.tasks_states = tasks_states
tm.subtask2task_mapping = self.__build_subtask2task(tasks)
one_task = tm.get_dict_task(task_id)
assert one_task
assert isinstance(one_task, dict)
assert len(one_task)
all_tasks = tm.get_dict_tasks()
assert all_tasks
assert isinstance(all_tasks, list)
assert len(all_tasks) == count
assert all([isinstance(t, dict) for t in all_tasks])
one_subtask = tm.get_dict_subtask(subtask_id)
assert one_subtask
assert isinstance(one_subtask, dict)
assert len(one_subtask)
task_subtasks = tm.get_dict_subtasks(task_id)
assert task_subtasks
assert isinstance(task_subtasks, list)
assert all([isinstance(t, dict) for t in task_subtasks])
@patch("golem.task.taskmanager.get_external_address")
def test_change_timeouts(self, mock_addr):
mock_addr.return_value = self.addr_return
t = self._get_task_mock(timeout=20, subtask_timeout=40)
self.tm.add_new_task(t)
assert get_current_time() + timedelta(seconds=15) <= t.header.deadline
assert t.header.deadline <= get_current_time() + timedelta(seconds=20)
assert t.header.subtask_timeout == 40
self.tm.change_timeouts("xyz", 60, 10)
assert get_current_time() + timedelta(seconds=55) <= t.header.deadline
assert t.header.deadline <= get_current_time() + timedelta(seconds=60)
assert t.header.subtask_timeout == 10
@patch("golem.task.taskmanager.get_external_address", side_effect=lambda *a, **k: ('1.2.3.4', 40103, None))
def test_update_signatures(self, _):
node = Node("node", "key_id", "10.0.0.10", 40103, "1.2.3.4", 40103, None, 40102, 40102)
task = Mock()
task.header = TaskHeader("node", "task_id", "1.2.3.4", 1234, "key_id", "environment",
task_owner=node)
self.tm.keys_auth = EllipticalKeysAuth(self.path)
self.tm.add_new_task(task)
sig = task.header.signature
self.tm.update_task_signatures()
assert task.header.signature == sig
task.header.task_owner.pub_port = 40104
self.tm.update_task_signatures()
assert task.header.signature != sig
@classmethod
def __build_tasks(cls, n):
tasks = dict()
tasks_states = dict()
task_id = None
subtask_id = None
for i in xrange(0, n):
task = Mock()
task.header.task_id = str(uuid.uuid4())
task.get_total_tasks.return_value = i + 2
task.get_progress.return_value = i * 10
state = Mock()
state.status = 'waiting'
state.remaining_time = 100 - i
subtask_states, subtask_id = cls.__build_subtasks(n)
state.subtask_states = subtask_states
task.subtask_states = subtask_states
task_id = task.header.task_id
tasks[task.header.task_id] = task
tasks_states[task.header.task_id] = state
return tasks, tasks_states, task_id, subtask_id
@staticmethod
def __build_subtasks(n):
subtasks = dict()
subtask_id = None
for i in xrange(0, n):
subtask = Mock()
subtask.subtask_id = str(uuid.uuid4())
subtask.computer = ComputerState()
subtask.computer.node_name = 'node_{}'.format(i)
subtask.computer.node_id = 'deadbeef0{}'.format(i)
subtask_id = subtask.subtask_id
subtasks[subtask.subtask_id] = subtask
return subtasks, subtask_id
@staticmethod
def __build_subtask2task(tasks):
subtask2task = dict()
for k, t in tasks.items():
print k, t.subtask_states
for sk, st in t.subtask_states.items():
subtask2task[st.subtask_id] = t.header.task_id
return subtask2task
|
imapp-pl/golem
|
tests/golem/task/test_taskmanager.py
|
Python
|
gpl-3.0
| 24,910
|
# This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import logging.handlers
import re
import sys
import types
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, basestring): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = root.manager.loggerDict.keys()
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError, te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError, e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError, e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError, e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError, te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError, e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/utils/dictconfig.py
|
Python
|
bsd-3-clause
| 22,939
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import auth
from tempest.common import cred_provider
from tempest import config
from tempest import exceptions
CONF = config.CONF
class Manager(object):
"""
Base manager class
Manager objects are responsible for providing a configuration object
and a client object for a test case to use in performing actions.
"""
def __init__(self, credentials=None):
"""
We allow overriding of the credentials used within the various
client classes managed by the Manager object. Left as None, the
standard username/password/tenant_name[/domain_name] is used.
:param credentials: Override of the credentials
"""
self.auth_version = CONF.identity.auth_version
if credentials is None:
self.credentials = cred_provider.get_configured_credentials('user')
else:
self.credentials = credentials
# Check if passed or default credentials are valid
if not self.credentials.is_valid():
raise exceptions.InvalidCredentials()
# Tenant isolation creates TestResources, but Accounts and some tests
# creates Credentials
if isinstance(credentials, cred_provider.TestResources):
creds = self.credentials.credentials
else:
creds = self.credentials
# Creates an auth provider for the credentials
self.auth_provider = get_auth_provider(creds)
# FIXME(andreaf) unused
self.client_attr_names = []
def get_auth_provider_class(credentials):
if isinstance(credentials, auth.KeystoneV3Credentials):
return auth.KeystoneV3AuthProvider, CONF.identity.uri_v3
else:
return auth.KeystoneV2AuthProvider, CONF.identity.uri
def get_auth_provider(credentials):
default_params = {
'disable_ssl_certificate_validation':
CONF.identity.disable_ssl_certificate_validation,
'ca_certs': CONF.identity.ca_certificates_file,
'trace_requests': CONF.debug.trace_requests
}
if credentials is None:
raise exceptions.InvalidCredentials(
'Credentials must be specified')
auth_provider_class, auth_url = get_auth_provider_class(
credentials)
return auth_provider_class(credentials, auth_url, **default_params)
|
fengbeihong/tempest_automate_ironic
|
tempest/manager.py
|
Python
|
apache-2.0
| 2,949
|
from django.shortcuts import render_to_response
from django.core.context_processors import csrf
from django.conf import settings
def my_render(request, template, context={}):
context.update(csrf(request))
context['STATIC_URL'] = settings.STATIC_URL
context['flash'] = request.get_flash()
context['user'] = request.user
context['user_perfil'] = request.get_perfil()
context['credenciales'] = set(request.get_credenciales())
context['settings'] = settings
return render_to_response(template, context)
|
MERegistro/meregistro
|
meregistro/shortcuts.py
|
Python
|
bsd-3-clause
| 533
|
def hash_index_terms(filepath):
index_terms = {}
with open(filepath) as index_dict:
while True:
pos = index_dict.tell()
try:
term, docs = index_dict.readline().split(' ')
except ValueError: # nothing left to split: EOF
break
else:
num_docs = len(docs.split(','))
index_terms[term] = (pos, num_docs)
return index_terms
|
moritzschaefer/the-search-engine
|
tsg/ranker/hasher.py
|
Python
|
mit
| 453
|
# -*- coding: utf-8 -*-
# MouseTrap
#
# Copyright 2009 Flavio Percoco Premoli
#
# This file is part of mouseTrap.
#
# MouseTrap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2 as published
# by the Free Software Foundation.
#
# mouseTrap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mouseTrap. If not, see <http://www.gnu.org/licenses/>.
"""MouseTrap's main script."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2008 Flavio Percoco Premoli"
__license__ = "GPLv2"
####################### TAKEN FROM ORCA'S CODE ###################
# We're going to force the name of the app to "mousetrap" so pygtk
# will end up showing us as "mousetrap" to the AT-SPI. If we don't
# do this, the name can end up being "-c". See Orca's bug 364452 at
# http://bugzilla.gnome.org/show_bug.cgi?id=364452 for more
# information.
import sys
sys.argv[0] = "mousetrap"
from gi.repository import GObject
import debug
import getopt
import environment as env
from mousetrap.ocvfw import pocv
from .ui.i18n import _
from .ui.main import MainGui
from .ui.scripts import get_script_class
from .lib import httpd, dbusd, settings
class Controller():
"""
MouseTrap's Controller Class
"""
def __init__(self):
"""
The MouseTrap controller init class
Arguments:
- self: The main object pointer.
"""
# We don't want to load the settings each time we need them. do we?
self.cfg = None
self.loop = GObject.MainLoop()
self.httpd = httpd.HttpdServer(20433)
self.dbusd = dbusd.DbusServer()
def start(self):
"""
Starts the modules, views classes.
Arguments:
- self: The main object pointer.
"""
if self.cfg is None:
conf_created, self.cfg = settings.load()
self.proc_args()
if not self.dbusd.start():
self.httpd.start()
if self.cfg.getboolean("main", "startCam"):
# Lets start the module
idm = pocv.get_idm(self.cfg.get("main", "algorithm"))
self.idm = idm.Module(self)
self.idm.set_capture(self.cfg.getint("cam", "inputDevIndex"))
#Will return false when cap.image() is false in ui/main
GObject.timeout_add(500, self.update_frame) #Thread that updates the image on the screen
GObject.timeout_add(50, self.update_pointers) #Thread that moves the mouse
debug.info("mousetrap", "Idm loaded and started")
# Lets build the interface
self.itf = MainGui(self)
self.itf.build_interface()
self.itf.load_addons()
if conf_created:
from .ui import settings_gui
settings_gui.showPreffGui(self)
debug.info("mousetrap", "MouseTrap's Interface Built and Loaded")
GObject.threads_init()
self.loop.run()
def proc_args(self):
"""
Process the startup flags
Arguments:
- self: The main object pointer.
"""
arguments = sys.argv[1:]
if len(arguments) == 1:
arguments = arguments[0].split()
env.flags = dict((key[0], {"section" : sec}) for sec in self.cfg.sections()
for key in self.cfg.items(sec))
try:
# ? for help
# e for enable
# d for disable
# t for mouse tiemout
opts, args = getopt.getopt(
arguments,
"?hve:d:s:",
["help",
"version",
"enable=",
"disable=",
"set="])
for opt, val in opts:
key = False
# This will change the default video device input
if opt in ("-s", "--set"):
key, value = val.strip().split("-")
if opt in ("-e", "--enable"):
key, value = [val.strip(), "True"]
if opt in ("-d", "--disable"):
key, value = [val.strip(), "False"]
if key in env.flags:
self.cfg.set(env.flags[key]["section"], key, value)
elif key:
self.usage()
self.quit(2)
if opt in ("-v", "--version"):
print((env.version))
self.quit(0)
# This will show the usage of mouseTrap
if opt in ("-?", "-h", "--help"):
self.usage()
self.quit(0)
except getopt.GetoptError as err:
print(str(err))
self.usage()
self.quit(2)
pass
def usage(self):
"""
Prints the usage
Arguments:
- self: The main object pointer
"""
print(( _("Usage: mouseTrap [OPTION...]")))
# '-?, --help' that is used to display usage information.
#
print(( "-?, -h, --help " + \
_(" Show this help message")))
# Option:
# '-i' that is used to set the input camera index. E.g: -i 0
print(( "-s, --set " + \
_(" Sets new value to Non Boolean options E.g -s inputDevIndex-1")))
# Options:
# -e, --enable Allow the users to enable modules not permantly
print(( "-e, --enable=[" \
+ "main-window" + "|" \
+ "cam") + "]")
print(( _(" Enable the selected options")))
# Options:
# -d, --disable Allow the users to disable modules not permanently.
print(( "-d, --disable=[" \
+ "main-window" + "|" \
+ "cam" + "]"))
print(( _(" Disable the selected options")))
# Options:
# -t --timeout To change the mouse timeout not permanently.
print(( "-v, --version " + \
_(" Shows mouseTrap version")))
print(( _("\nReport bugs to flaper87@flaper87.org")))
def script(self):
"""
Returns the main script class object.
Arguments:
- self: The main object pointer.
"""
return get_script_class(self.cfg.get("scripts", "name"))()
def update_frame(self):
"""
Updates the User Interface frame with the latest capture.
Arguments:
- self: The main object pointer.
"""
self.itf.update_frame(self.idm.get_capture(), self.idm.get_pointer())
return True
def update_pointers(self):
"""
Gets the new mouse pointer position based on the las calcs.
Arguments:
- self: The main object pointer.
"""
self.itf.script.update_items(self.idm.get_pointer())
return True
def quit(self, exitcode=1):
"""
Quits mouseTrap and all its process
Arguments:
- self: The main object pointer.
- exitcode: The exitcode number. It helps to handle some quit events.
"""
sys.exit(exitcode)
|
lhotchkiss/mousetrap
|
src/mousetrap/app/main.py
|
Python
|
gpl-2.0
| 7,724
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains model definitions for versions of the Oxford VGG network.
These model definitions were introduced in the following technical report:
Very Deep Convolutional Networks For Large-Scale Image Recognition
Karen Simonyan and Andrew Zisserman
arXiv technical report, 2015
PDF: http://arxiv.org/pdf/1409.1556.pdf
ILSVRC 2014 Slides: http://www.robots.ox.ac.uk/~karen/pdf/ILSVRC_2014.pdf
CC-BY-4.0
More information can be obtained from the VGG website:
www.robots.ox.ac.uk/~vgg/research/very_deep/
Usage:
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_a(inputs)
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_16(inputs)
@@vgg_a
@@vgg_16
@@vgg_19
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def vgg_arg_scope(weight_decay=0.0005):
"""Defines the VGG arg scope.
Args:
weight_decay: The l2 regularization coefficient.
Returns:
An arg_scope.
"""
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc:
return arg_sc
def vgg_base_arg_scope(weight_decay=0.0005, data_format='NHWC'):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.contrib.layers.xavier_initializer(),
biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
padding='SAME',
data_format=data_format) as sc:
return sc
def vgg_a_base(inputs, scope='vgg_a'):
with tf.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 1, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 1, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 2, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
return net, end_points
def vgg_a(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_a',
fc_conv_padding='VALID'):
"""Oxford Net VGG 11-Layers version A Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output.
Otherwise, the output prediction map will be (input / 32) - 6 in case of
'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 1, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 1, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 2, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_a.default_image_size = 224
def vgg_16_base(inputs, scope='vgg_16',):
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.name + 'end_points'
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [3, 3], stride=1, scope='pool5')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
return net, end_points
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_16',
fc_conv_padding='VALID'):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output.
Otherwise, the output prediction map will be (input / 32) - 6 in case of
'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_16.default_image_size = 224
def vgg_19_base(inputs, scope='vgg_19'):
with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
return net, end_points
def vgg_19(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_19',
fc_conv_padding='VALID'):
"""Oxford Net VGG 19-Layers version E Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output.
Otherwise, the output prediction map will be (input / 32) - 6 in case of
'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_19.default_image_size = 224
# Alias
vgg_d = vgg_16
vgg_e = vgg_19
|
fisheess/modular_SSD_tensorflow
|
nets/vgg.py
|
Python
|
mit
| 17,014
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for custom training loops that involves advanced optimizer usage."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.distribute import combinations as ds_combinations
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.distribute import values
from tensorflow.python.eager import def_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_combinations as combinations
from tensorflow.python.keras.optimizer_v2 import gradient_descent
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class OptimizerTest(test.TestCase, parameterized.TestCase):
@ds_combinations.generate(
combinations.times(
combinations.combine(
distribution=strategy_combinations.multidevice_strategies,
mode=["eager"],
),
combinations.combine(
experimental_aggregate_gradients=True,
expected=[[[-0.3, -0.3], [-0.3, -0.3]]]) +
combinations.combine(
experimental_aggregate_gradients=False,
expected=[[[-0.1, -0.1], [-0.2, -0.2]]])
))
def test_custom_aggregation(self, distribution,
experimental_aggregate_gradients, expected):
with distribution.scope():
v = variables.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
@def_function.function
def optimize():
grads = values.PerReplica([
ops.convert_to_tensor_v2_with_dispatch([1., 1.]),
ops.convert_to_tensor_v2_with_dispatch([2., 2.]),
])
def step_fn(grads):
optimizer.apply_gradients(
[(grads, v)],
experimental_aggregate_gradients=experimental_aggregate_gradients)
return v.read_value()
return distribution.experimental_local_results(
distribution.run(step_fn, args=(grads,)))
self.assertAllClose(optimize(), expected)
@ds_combinations.generate(
combinations.combine(
distribution=strategy_combinations.one_device_strategy,
mode=["eager"],
experimental_aggregate_gradients=[True, False]))
def test_custom_aggregation_one_device(self, distribution,
experimental_aggregate_gradients):
with distribution.scope():
v = variables.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
@def_function.function
def optimize():
grads = ops.convert_to_tensor_v2_with_dispatch([1., 1.])
def step_fn(grads):
optimizer.apply_gradients(
[(grads, v)],
experimental_aggregate_gradients=experimental_aggregate_gradients)
return v.read_value()
return distribution.experimental_local_results(
distribution.run(step_fn, args=(grads,)))
self.assertAllClose(optimize(), [[-0.1, -0.1]])
@ds_combinations.generate(
combinations.combine(distribution=[
strategy_combinations.central_storage_strategy_with_gpu_and_cpu
]))
def test_custom_aggregation_central_storage(self, distribution):
with distribution.scope():
v = variables.Variable([0., 0.])
optimizer = gradient_descent.SGD(0.1)
grads = ops.convert_to_tensor_v2_with_dispatch([1., 1.])
def step_fn(grads):
with self.assertRaises(NotImplementedError):
optimizer.apply_gradients([(grads, v)],
experimental_aggregate_gradients=False)
return distribution.run(step_fn, args=(grads,))
if __name__ == "__main__":
test.main()
|
karllessard/tensorflow
|
tensorflow/python/keras/distribute/custom_training_loop_optimizer_test.py
|
Python
|
apache-2.0
| 4,404
|
#!/usr/bin/env python
#
# SessionStorage.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .Storage import Storage
class SessionStorage(Storage):
pass
|
buffer/thug
|
thug/DOM/SessionStorage.py
|
Python
|
gpl-2.0
| 756
|
"""Physical constants."""
import numpy as np
k_B = 1.380649e-16
"""Exact value of the Boltzmann constant in cgs units."""
h = 6.62607015e-27
"""Exact value of Planck's constant in cgs units."""
c_light = 2.99792458e10
"""Exact value of the speed of light in cgs units."""
sigma_SB = 2.*np.pi**5*k_B**4/(15.*h**3*c_light**2)
"""Exact value of the Stefan-Boltzmann constant in cgs units."""
# IAU B3
GMsun = 1.3271244e26
"""Solar standard gravitational parameter in cgs units (IAU B3)."""
Rsun = 695700e5
"""Solar radius in cgs units (IAU B3)."""
Lsun = 3.828e33
"""Solar luminosity in cgs units (IAU B3)."""
Teff_sun = (Lsun/(4.*np.pi*Rsun**2*sigma_SB))**0.25
"""Solar effective temperature in K (IAU B3)."""
# solar nu_max from text (not abstract) of Kiefer et al. (2019) [1]
# agrees with 3078 +/- 13 uHz by Lund et al. (2017) [2]
# solar Dnu from Lund et al. (2017) [2]
# [1] https://ui.adsabs.harvard.edu/abs/2018SoPh..293..151K
# [2] https://ui.adsabs.harvard.edu/abs/2017ApJ...835..172L
nu_max_sun = 3079.76 # +/- 0.17 uHz
Dnu_sun = 134.91 # +/- 0.02 uHz
# G (uncertainty)
G_CODATA_2018 = 6.67430e-8 # (15) # MESA since r12934
G_CODATA_2014 = 6.67408e-8 # (31)
G_CODATA_2010 = 6.67384e-8 # (80)
G_CODATA_2006 = 6.67428e-8 # (67) # MESA until r12934
G_CODATA_2002 = 6.6742e-8 # (10)
G_CODATA_1998 = 6.673e-8 # (10)
G_CODATA_1986 = 6.67259e-8 # (85)
G_CODATA_1973 = 6.6720e-8 # (41)
G_MODEL_S = 6.67232e-8
G_DEFAULT = G_CODATA_2018
Msun = GMsun/G_DEFAULT
|
warrickball/tomso
|
tomso/constants.py
|
Python
|
mit
| 1,473
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GeoSemanticsSpecification.geocode_address'
db.add_column('semanticizer_geosemanticsspecification', 'geocode_address',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'GeoSemanticsSpecification.geocode_address'
db.delete_column('semanticizer_geosemanticsspecification', 'geocode_address')
models = {
'postdoc.datamodel': {
'Meta': {'object_name': 'DataModel'},
'concept': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'container': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'contains'", 'null': 'True', 'to': "orm['postdoc.DataModel']"}),
'geo_representation': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_base': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'super': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['postdoc.DataModel']", 'null': 'True', 'blank': 'True'})
},
'postdoc.datamodelattribute': {
'Meta': {'object_name': 'DataModelAttribute'},
'data_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['postdoc.DataModel']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['postdoc.DataModel']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'semanticizer.dataset': {
'Meta': {'object_name': 'DataSet'},
'file': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'format': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['semanticizer.DataSetFormat']"}),
'format_configuration': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refresh_period': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'semanticizer.datasetcolumn': {
'Meta': {'unique_together': "(('dataset', 'name'),)", 'object_name': 'DataSetColumn'},
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'columns'", 'to': "orm['semanticizer.DataSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'semanticizer.datasetformat': {
'Meta': {'object_name': 'DataSetFormat'},
'configuration_requirements': ('django.db.models.fields.TextField', [], {}),
'geographic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_api': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'module': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'semanticizer.geosemanticsspecification': {
'Meta': {'object_name': 'GeoSemanticsSpecification'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'data_transformation': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'geocode_address': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_geo_x': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_geo_y': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'semantics': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'geo_associations'", 'to': "orm['semanticizer.Semantics']"})
},
'semanticizer.semantics': {
'Meta': {'object_name': 'Semantics'},
'data_model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['postdoc.DataModel']"}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'semantics'", 'to': "orm['semanticizer.DataSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'semanticizer.semanticsspecification': {
'Meta': {'object_name': 'SemanticsSpecification'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['postdoc.DataModelAttribute']"}),
'column': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'data_transformation': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'semantics': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'associations'", 'to': "orm['semanticizer.Semantics']"}),
'via': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'path'", 'null': 'True', 'to': "orm['semanticizer.SemanticsSpecificationPath']"})
},
'semanticizer.semanticsspecificationpath': {
'Meta': {'object_name': 'SemanticsSpecificationPath'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['postdoc.DataModelAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'next': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'previous'", 'null': 'True', 'to': "orm['semanticizer.SemanticsSpecificationPath']"})
}
}
complete_apps = ['semanticizer']
|
citizennerd/VivaCity
|
semanticizer/migrations/0005_auto__add_field_geosemanticsspecification_geocode_address.py
|
Python
|
mit
| 6,563
|
import distutils.sysconfig
import getopt
import glob
import os
import platform
import shutil
import subprocess
import stat
import sys
sys.path.append(os.path.join("..", "ScintillaEdit"))
import WidgetGen
scintillaDirectory = "../.."
scintillaScriptsDirectory = os.path.join(scintillaDirectory, "scripts")
sys.path.append(scintillaScriptsDirectory)
from FileGenerator import GenerateFile
# Decide up front which platform, treat anything other than Windows or OS X as Linux
PLAT_WINDOWS = platform.system() == "Windows"
PLAT_DARWIN = platform.system() == "Darwin"
PLAT_LINUX = not (PLAT_DARWIN or PLAT_WINDOWS)
def IsFileNewer(name1, name2):
""" Returns whether file with name1 is newer than file with name2. Returns 1
if name2 doesn't exist. """
if not os.path.exists(name1):
return 0
if not os.path.exists(name2):
return 1
mod_time1 = os.stat(name1)[stat.ST_MTIME]
mod_time2 = os.stat(name2)[stat.ST_MTIME]
return (mod_time1 > mod_time2)
def textFromRun(args):
(stdoutdata, stderrdata) = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE).communicate()
return stdoutdata
def runProgram(args, exitOnFailure):
print(" ".join(args))
retcode = subprocess.call(" ".join(args), shell=True, stderr=subprocess.STDOUT)
if retcode:
print("Failed in " + " ".join(args) + " return code = " + str(retcode))
if exitOnFailure:
sys.exit()
def usage():
print("sepbuild.py [-h|--help][-c|--clean][-u|--underscore-names]")
print("")
print("Generate PySide wappers and build them.")
print("")
print("options:")
print("")
print("-c --clean remove all object and generated files")
print("-b --pyside-base Location of the PySide+Qt4 sandbox to use")
print("-h --help display this text")
print("-d --debug=yes|no force debug build (or non-debug build)")
print("-u --underscore-names use method_names consistent with GTK+ standards")
modifyFunctionElement = """ <modify-function signature="%s">%s
</modify-function>"""
injectCode = """
<inject-code class="target" position="beginning">%s
</inject-code>"""
injectCheckN = """
if (!cppArg%d) {
PyErr_SetString(PyExc_ValueError, "Null string argument");
return 0;
}"""
def methodSignature(name, v, options):
argTypes = ""
p1Type = WidgetGen.cppAlias(v["Param1Type"])
if p1Type == "int":
p1Type = "sptr_t"
if p1Type:
argTypes = argTypes + p1Type
p2Type = WidgetGen.cppAlias(v["Param2Type"])
if p2Type == "int":
p2Type = "sptr_t"
if p2Type and v["Param2Type"] != "stringresult":
if p1Type:
argTypes = argTypes + ", "
argTypes = argTypes + p2Type
methodName = WidgetGen.normalisedName(name, options, v["FeatureType"])
constDeclarator = " const" if v["FeatureType"] == "get" else ""
return methodName + "(" + argTypes + ")" + constDeclarator
def printTypeSystemFile(f, options):
out = []
for name in f.order:
v = f.features[name]
if v["Category"] != "Deprecated":
feat = v["FeatureType"]
if feat in ["fun", "get", "set"]:
checks = ""
if v["Param1Type"] == "string":
checks = checks + (injectCheckN % 0)
if v["Param2Type"] == "string":
if v["Param1Type"] == "": # Only arg 2 -> treat as first
checks = checks + (injectCheckN % 0)
else:
checks = checks + (injectCheckN % 1)
if checks:
inject = injectCode % checks
out.append(modifyFunctionElement % (methodSignature(name, v, options), inject))
#if v["Param1Type"] == "string":
# out.append("<string-xml>" + name + "</string-xml>\n")
return out
def doubleBackSlashes(s):
# Quote backslashes so qmake does not produce warnings
return s.replace("\\", "\\\\")
class SepBuilder:
def __init__(self):
# Discover configuration parameters
self.ScintillaEditIncludes = [".", "../ScintillaEdit", "../ScintillaEditBase", "../../include"]
if PLAT_WINDOWS:
self.MakeCommand = "nmake"
self.MakeTarget = "release"
else:
self.MakeCommand = "make"
self.MakeTarget = ""
if PLAT_DARWIN:
self.QMakeOptions = "-spec macx-g++"
else:
self.QMakeOptions = ""
# Default to debug build if running in a debug build interpreter
self.DebugBuild = hasattr(sys, 'getobjects')
# Python
self.PyVersion = "%d.%d" % sys.version_info[:2]
self.PyVersionSuffix = distutils.sysconfig.get_config_var("VERSION")
self.PyIncludes = distutils.sysconfig.get_python_inc()
self.PyPrefix = distutils.sysconfig.get_config_var("prefix")
self.PyLibDir = distutils.sysconfig.get_config_var(
("LIBDEST" if sys.platform == 'win32' else "LIBDIR"))
# Scintilla
with open("../../version.txt") as f:
version = f.read()
self.ScintillaVersion = version[0] + '.' + version[1] + '.' + version[2]
# Find out what qmake is called
self.QMakeCommand = "qmake"
if not PLAT_WINDOWS:
# On Unix qmake may not be present but qmake-qt4 may be so check
pathToQMake = textFromRun("which qmake-qt4 || which qmake").rstrip()
self.QMakeCommand = os.path.basename(pathToQMake)
# Qt default location from qmake
self._SetQtIncludeBase(textFromRun(self.QMakeCommand + " -query QT_INSTALL_HEADERS").rstrip())
# PySide default location
# No standard for installing PySide development headers and libs on Windows so
# choose /usr to be like Linux
self._setPySideBase('\\usr' if PLAT_WINDOWS else '/usr')
self.ProInclude = "sepbuild.pri"
self.qtStyleInterface = True
def _setPySideBase(self, base):
self.PySideBase = base
if PLAT_LINUX:
self.PySideTypeSystem = textFromRun("pkg-config --variable=typesystemdir pyside").rstrip()
self.PySideIncludeBase = textFromRun("pkg-config --variable=includedir pyside").rstrip()
self.ShibokenIncludeBase = textFromRun("pkg-config --variable=includedir shiboken").rstrip()
else:
self.PySideTypeSystem = os.path.join(self.PySideBase, "share", "PySide", "typesystems")
self.ShibokenIncludeBase = os.path.join(self.PySideBase, "include", "shiboken")
self.PySideIncludeBase = os.path.join(self.PySideBase, "include", "PySide")
self.PySideIncludes = [
self.ShibokenIncludeBase,
self.PySideIncludeBase,
os.path.join(self.PySideIncludeBase, "QtCore"),
os.path.join(self.PySideIncludeBase, "QtGui")]
self.PySideLibDir = os.path.join(self.PySideBase, "lib")
self.AllIncludes = os.pathsep.join(self.QtIncludes + self.ScintillaEditIncludes + self.PySideIncludes)
self.ShibokenGenerator = "shiboken"
# Is this still needed? It doesn't work with latest shiboken sources
#if PLAT_DARWIN:
# # On OS X, can not automatically find Shiboken dylib so provide a full path
# self.ShibokenGenerator = os.path.join(self.PySideLibDir, "generatorrunner", "shiboken")
def generateAPI(self, args):
os.chdir(os.path.join("..", "ScintillaEdit"))
if not self.qtStyleInterface:
args.insert(0, '--underscore-names')
WidgetGen.main(args)
f = WidgetGen.readInterface(False)
os.chdir(os.path.join("..", "ScintillaEditPy"))
options = {"qtStyle": self.qtStyleInterface}
GenerateFile("typesystem_ScintillaEdit.xml.template", "typesystem_ScintillaEdit.xml",
"<!-- ", True, printTypeSystemFile(f, options))
def runGenerator(self):
generatorrunner = "shiboken"
for name in ('shiboken', 'generatorrunner'):
if PLAT_WINDOWS:
name += '.exe'
name = os.path.join(self.PySideBase, "bin", name)
if os.path.exists(name):
generatorrunner = name
break
args = [
generatorrunner,
"--generator-set=" + self.ShibokenGenerator,
"global.h ",
"--avoid-protected-hack",
"--enable-pyside-extensions",
"--include-paths=" + self.AllIncludes,
"--typesystem-paths=" + self.PySideTypeSystem,
"--output-directory=.",
"typesystem_ScintillaEdit.xml"]
print(" ".join(args))
retcode = subprocess.call(" ".join(args), shell=True, stderr=subprocess.STDOUT)
if retcode:
print("Failed in generatorrunner", retcode)
sys.exit()
def writeVariables(self):
# Write variables needed into file to be included from project so it does not have to discover much
with open(self.ProInclude, "w") as f:
f.write("SCINTILLA_VERSION=" + self.ScintillaVersion + "\n")
f.write("PY_VERSION=" + self.PyVersion + "\n")
f.write("PY_VERSION_SUFFIX=" + self.PyVersionSuffix + "\n")
f.write("PY_PREFIX=" + doubleBackSlashes(self.PyPrefix) + "\n")
f.write("PY_INCLUDES=" + doubleBackSlashes(self.PyIncludes) + "\n")
f.write("PY_LIBDIR=" + doubleBackSlashes(self.PyLibDir) + "\n")
f.write("PYSIDE_INCLUDES=" + doubleBackSlashes(self.PySideIncludeBase) + "\n")
f.write("PYSIDE_LIB=" + doubleBackSlashes(self.PySideLibDir) + "\n")
f.write("SHIBOKEN_INCLUDES=" + doubleBackSlashes(self.ShibokenIncludeBase) + "\n")
if self.DebugBuild:
f.write("CONFIG += debug\n")
else:
f.write("CONFIG += release\n")
def make(self):
runProgram([self.QMakeCommand, self.QMakeOptions], exitOnFailure=True)
runProgram([self.MakeCommand, self.MakeTarget], exitOnFailure=True)
def cleanEverything(self):
self.generateAPI(["--clean"])
runProgram([self.MakeCommand, "distclean"], exitOnFailure=False)
filesToRemove = [self.ProInclude, "typesystem_ScintillaEdit.xml",
"../../bin/ScintillaEditPy.so", "../../bin/ScintillaConstants.py"]
for file in filesToRemove:
try:
os.remove(file)
except OSError:
pass
for logFile in glob.glob("*.log"):
try:
os.remove(logFile)
except OSError:
pass
shutil.rmtree("debug", ignore_errors=True)
shutil.rmtree("release", ignore_errors=True)
shutil.rmtree("ScintillaEditPy", ignore_errors=True)
def buildEverything(self):
cleanGenerated = False
opts, args = getopt.getopt(sys.argv[1:], "hcdub",
["help", "clean", "debug=",
"underscore-names", "pyside-base="])
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-c", "--clean"):
cleanGenerated = True
elif opt in ("-d", "--debug"):
self.DebugBuild = (arg == '' or arg.lower() == 'yes')
if self.DebugBuild and sys.platform == 'win32':
self.MakeTarget = 'debug'
elif opt in ("-b", '--pyside-base'):
self._SetQtIncludeBase(os.path.join(os.path.normpath(arg), 'include'))
self._setPySideBase(os.path.normpath(arg))
elif opt in ("-u", "--underscore-names"):
self.qtStyleInterface = False
if cleanGenerated:
self.cleanEverything()
else:
self.writeVariables()
self.generateAPI([""])
self.runGenerator()
self.make()
self.copyScintillaConstants()
def copyScintillaConstants(self):
orig = 'ScintillaConstants.py'
dest = '../../bin/' + orig
if IsFileNewer(dest, orig):
return
f = open(orig, 'r')
contents = f.read()
f.close()
f = open(dest, 'w')
f.write(contents)
f.close()
def _SetQtIncludeBase(self, base):
self.QtIncludeBase = base
self.QtIncludes = [self.QtIncludeBase] + [os.path.join(self.QtIncludeBase, sub) for sub in ["QtCore", "QtGui"]]
# Set path so correct qmake is found
path = os.environ.get('PATH', '').split(os.pathsep)
qt_bin_dir = os.path.join(os.path.dirname(base), 'bin')
if qt_bin_dir not in path:
path.insert(0, qt_bin_dir)
os.environ['PATH'] = os.pathsep.join(path)
if __name__ == "__main__":
sepBuild = SepBuilder()
sepBuild.buildEverything()
|
dmpas/e8-scintilla-patch
|
qt/ScintillaEditPy/sepbuild.py
|
Python
|
lgpl-3.0
| 11,490
|
from openerp import api, models, fields, SUPERUSER_ID
class product_template(models.Model):
_name = 'product.template'
_inherit = ['product.template', 'website_seo_url']
seo_url = fields.Char('SEO URL', translate=True, index=True)
|
bmya/website-addons
|
website_seo_url_product/models.py
|
Python
|
lgpl-3.0
| 246
|
from __future__ import absolute_import
"""Midi processing for segmented midi taggers.
"""
"""
============================== License ========================================
Copyright (C) 2008, 2010-12 University of Edinburgh, Mark Granroth-Wilding
This file is part of The Jazz Parser.
The Jazz Parser is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Jazz Parser is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with The Jazz Parser. If not, see <http://www.gnu.org/licenses/>.
============================ End license ======================================
"""
__author__ = "Mark Granroth-Wilding <mark.granroth-wilding@ed.ac.uk>"
from midi import NoteOnEvent
def midi_to_emission_stream(segmidi, metric=True, remove_empty=True, unique_notes=False):
"""
Get a list of emissions from the midi stream's note on events.
Returns a 2-tuple of the list of emissions and their
corresponding start times in midi ticks.
@type segmidi: L{jazzparser.data.input.SegmentedMidiInput}
@param segmidi: midi input
@type metric: bool
@param metric: include a metrical model. Each observation has a metrical
value associated with it. If this is False, the value will always be 0
@type remove_empty: bool
@param remove_empty: remove any chunks that have no observations in them
(default True)
"""
chunks = []
start_times = []
tick_unit = segmidi.tick_unit
for segment in segmidi:
segment_start = segment.segment_start
start_times.append(segment_start)
note_ons = [ev for ev in segment.trackpool if isinstance(ev, NoteOnEvent)]
# Produce an observation for every event
chunk = []
for ev in note_ons:
if metric:
# Compute the metrical prominence value
bar_time = ev.tick - segment_start
if bar_time == 0:
rhythm = 0
elif bar_time == tick_unit/2:
rhythm = 1
elif bar_time == tick_unit/4 or bar_time == tick_unit*3/4:
rhythm = 2
else:
rhythm = 3
else:
# No metrical values: always 0
rhythm = 0
pc = ev.pitch % 12
chunk.append((pc, rhythm))
chunks.append(chunk)
# Get rid of duplicate values in the chunks (octaves)
if unique_notes:
chunks = [list(set(c)) for c in chunks]
if remove_empty:
# Remove chunks that have no observations in them
chunks = [ems for ems in chunks if len(ems) > 0]
chunks_times = zip(chunks, start_times)
# Return a tuple of the chunks and the start times
return zip(*chunks_times)
|
markgw/jazzparser
|
src/jazzparser/taggers/segmidi/midi.py
|
Python
|
gpl-3.0
| 3,221
|
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'queue_overview_form.ui'
##
## Created by: Qt User Interface Compiler version 5.15.0
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from Qt.QtCore import (QCoreApplication, QDate, QDateTime, QMetaObject,
QObject, QPoint, QRect, QSize, QTime, QUrl, Qt)
from Qt.QtGui import (QBrush, QColor, QConicalGradient, QCursor, QFont,
QFontDatabase, QIcon, QKeySequence, QLinearGradient, QPalette, QPainter,
QPixmap, QRadialGradient)
from Qt.QtWidgets import *
class Ui_Form(object):
def setupUi(self, Form):
if not Form.objectName():
Form.setObjectName(u"Form")
Form.resize(449, 86)
self.verticalLayout = QVBoxLayout(Form)
self.verticalLayout.setObjectName(u"verticalLayout")
self.label = QLabel(Form)
self.label.setObjectName(u"label")
self.label.setAlignment(Qt.AlignCenter)
self.verticalLayout.addWidget(self.label)
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.lapsed_time_label = QLabel(Form)
self.lapsed_time_label.setObjectName(u"lapsed_time_label")
self.horizontalLayout.addWidget(self.lapsed_time_label)
self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout.addItem(self.horizontalSpacer)
self.estimated_time_label = QLabel(Form)
self.estimated_time_label.setObjectName(u"estimated_time_label")
self.horizontalLayout.addWidget(self.estimated_time_label)
self.verticalLayout.addLayout(self.horizontalLayout)
self.general_progress = QProgressBar(Form)
self.general_progress.setObjectName(u"general_progress")
self.general_progress.setValue(24)
self.general_progress.setAlignment(Qt.AlignCenter)
self.verticalLayout.addWidget(self.general_progress)
self.retranslateUi(Form)
QMetaObject.connectSlotsByName(Form)
# setupUi
def retranslateUi(self, Form):
Form.setWindowTitle(QCoreApplication.translate("Form", u"Form", None))
self.label.setText(QCoreApplication.translate("Form", u"Overall Progress", None))
self.lapsed_time_label.setText(QCoreApplication.translate("Form", u"Lapsed time: 00:00:00", None))
self.estimated_time_label.setText(QCoreApplication.translate("Form", u"Estimated time remaining: 00:00:00", None))
# retranslateUi
|
hasielhassan/AgnosticQueue
|
python/ui/queue_overview_form.py
|
Python
|
mit
| 2,703
|
from django.conf.urls import include, url
from django.contrib.auth import views as auth_views
from rest_framework import routers
from . import views
from linky import settings
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'links', views.LinkViewSet, base_name='links')
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration/', include('rest_auth.registration.urls')),
url(r'^reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$', auth_views.password_reset_confirm, name='password_reset_confirm'),
]
|
sbdchd/linky
|
backend/core/urls.py
|
Python
|
bsd-2-clause
| 647
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
"""
This file contains classes and utilities for Alignak tests modules
"""
import os
import sys
import signal
import time
import string
import re
import locale
import traceback
import requests
import configparser
from six import string_types
import shutil
import psutil
import subprocess
import threading
from copy import deepcopy
import unittest2
import logging
from logging import Handler, Formatter
from logging.handlers import TimedRotatingFileHandler
import requests_mock
import alignak
from alignak.log import setup_logger, ALIGNAK_LOGGER_NAME, ColorStreamHandler, CollectorHandler
from alignak.bin.alignak_environment import AlignakConfigParser
from alignak.objects.config import Config
from alignak.objects.command import Command
from alignak.objects.module import Module
from alignak.dispatcher import Dispatcher
from alignak.scheduler import Scheduler
from alignak.macroresolver import MacroResolver
from alignak.external_command import ExternalCommandManager, ExternalCommand
from alignak.check import Check
from alignak.message import Message
from alignak.misc.serialization import serialize, unserialize
from alignak.objects.arbiterlink import ArbiterLink
from alignak.objects.schedulerlink import SchedulerLink
from alignak.objects.pollerlink import PollerLink
from alignak.objects.reactionnerlink import ReactionnerLink
from alignak.objects.brokerlink import BrokerLink
from alignak.objects.satellitelink import SatelliteLink
from alignak.notification import Notification
from alignak.modulesmanager import ModulesManager
from alignak.basemodule import BaseModule
from alignak.brok import Brok
from alignak.misc.common import DICT_MODATTR
from alignak.daemons.schedulerdaemon import Alignak
from alignak.daemons.brokerdaemon import Broker
from alignak.daemons.arbiterdaemon import Arbiter
from alignak.daemons.receiverdaemon import Receiver
class AlignakTest(unittest2.TestCase):
if sys.version_info < (2, 7):
def assertRegex(self, *args, **kwargs):
return self.assertRegex(*args, **kwargs)
def setUp(self):
"""All tests initialization:
- output test identifier
- setup test logger
- track running Alignak daemons
- output system cpu/memory
"""
self.my_pid = os.getpid()
print("\n" + self.id())
print("-" * 80)
self._launch_dir = os.getcwd()
print("Test current working directory: %s" % self._launch_dir)
# Configure Alignak logger with test configuration
logger_configuration_file = os.path.join(os.getcwd(), './etc/alignak-logger.json')
print("Logger configuration: %s" % logger_configuration_file)
# try:
# os.makedirs('/tmp/monitoring-log')
# except OSError as exp:
# pass
self.former_log_level = None
# Call with empty parameters to force log file truncation!
setup_logger(logger_configuration_file, log_dir=None, process_name='', log_file='')
self.logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
self.logger_.warning("Test: %s", self.id())
# To make sure that no running daemon exist
print("Checking Alignak running daemons...")
running_daemons = False
for daemon in ['broker', 'poller', 'reactionner', 'receiver', 'scheduler', 'arbiter']:
for proc in psutil.process_iter():
if 'alignak' in proc.name() and daemon in proc.name():
running_daemons = True
if running_daemons:
self._stop_alignak_daemons(arbiter_only=False)
# assert False, "*** Found a running Alignak daemon: %s" % (proc.name())
print("System information:")
perfdatas = []
cpu_count = psutil.cpu_count()
perfdatas.append("'cpu_count'=%d" % cpu_count)
cpu_percents = psutil.cpu_percent(percpu=True)
cpu = 1
for percent in cpu_percents:
perfdatas.append("'cpu_%d_percent'=%.2f%%" % (cpu, percent))
cpu += 1
print("-> cpu: %s" % " ".join(perfdatas))
perfdatas = []
virtual_memory = psutil.virtual_memory()
for key in virtual_memory._fields:
if 'percent' in key:
perfdatas.append("'mem_percent_used_%s'=%.2f%%"
% (key, getattr(virtual_memory, key)))
swap_memory = psutil.swap_memory()
for key in swap_memory._fields:
if 'percent' in key:
perfdatas.append("'swap_used_%s'=%.2f%%"
% (key, getattr(swap_memory, key)))
print("-> memory: %s" % " ".join(perfdatas))
print(("-" * 80) + "\n")
def tearDown(self):
"""Test ending:
- restore initial log level if it got changed
"""
# Clear Alignak unit tests log list
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if getattr(handler, '_name', None) == 'unit_tests':
print("Log handler %s, stored %d logs" % (handler._name, len(handler.collector)))
handler.collector = []
# Restore the collector logger log level
if self.former_log_level:
handler.level = self.former_log_level
break
def set_unit_tests_logger_level(self, log_level=logging.DEBUG):
"""Set the test logger at the provided level -
useful for some tests that check debug log
"""
# Change the logger and its hadlers log level
print("Set unit_tests logger: %s" % log_level)
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
logger_.setLevel(log_level)
for handler in logger_.handlers:
# print("- handler: %s" % handler)
handler.setLevel(log_level)
if getattr(handler, '_name', None) == 'unit_tests':
self.former_log_level = handler.level
handler.setLevel(log_level)
print("Unit tests handler is set at %s" % log_level)
break
def _prepare_hosts_configuration(self, cfg_folder, hosts_count=10,
target_file_name=None, realms=None):
"""Prepare the Alignak configuration
:return: the count of errors raised in the log files
"""
start = time.time()
if realms is None:
realms = ['All']
filename = cfg_folder + '/test-templates/host.tpl'
if os.path.exists(filename):
with open(filename, "r") as pattern_file:
host_pattern = pattern_file.read()
host_pattern = host_pattern.decode('utf-8')
else:
host_pattern = """
define host {
# Variable defined
use test-host
contact_groups admins
#hostgroups allhosts
host_name host-%s-%s
address 127.0.0.1
realm %s
}
"""
hosts = ""
hosts_set = 0
for realm in realms:
for index in range(hosts_count):
hosts = hosts + (host_pattern % (realm.lower(), index, realm)) + "\n"
hosts_set += 1
filename = os.path.join(cfg_folder, 'many_hosts_%d.cfg' % hosts_count)
if target_file_name is not None:
filename = os.path.join(cfg_folder, target_file_name)
if os.path.exists(filename):
os.remove(filename)
with open(filename, 'w') as outfile:
outfile.write(hosts)
print("Prepared a configuration with %d hosts, duration: %d seconds"
% (hosts_set, (time.time() - start)))
def _prepare_configuration(self, copy=True, cfg_folder='/tmp/alignak', daemons_list=None):
if daemons_list is None:
daemons_list = ['arbiter-master', 'scheduler-master', 'broker-master',
'poller-master', 'reactionner-master', 'receiver-master']
cfg_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), cfg_folder)
# Copy the default Alignak shipped configuration to the run directory
if copy:
print("Copy default configuration (../etc) to %s..." % cfg_folder)
if os.path.exists('%s/etc' % cfg_folder):
shutil.rmtree('%s/etc' % cfg_folder)
shutil.copytree('../etc', '%s/etc' % cfg_folder)
# Load and update the configuration
for f in ['alignak.log', 'alignak-events.log']:
if os.path.exists('%s/log/%s' % (cfg_folder, f)):
os.remove('%s/log/%s' % (cfg_folder, f))
# Clean the former existing pid and log files
print("Cleaning pid and log files...")
for daemon in daemons_list:
if os.path.exists('%s/run/%s.pid' % (cfg_folder, daemon)):
print("- removing pid %s/run/%s.pid" % (cfg_folder, daemon))
os.remove('%s/run/%s.pid' % (cfg_folder, daemon))
if os.path.exists('%s/log/%s.log' % (cfg_folder, daemon)):
print("- removing log %s/log/%s.log" % (cfg_folder, daemon))
os.remove('%s/log/%s.log' % (cfg_folder, daemon))
# Update monitoring configuration parameters
files = ['%s/etc/alignak.ini' % cfg_folder,
'%s/etc/alignak.d/daemons.ini' % cfg_folder,
'%s/etc/alignak.d/modules.ini' % cfg_folder]
# Update monitoring configuration file variables
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Configuration directories
cfg.set('DEFAULT', '_dist', cfg_folder)
# Do not set a specific bin directory to use the default Alignak one
cfg.set('DEFAULT', '_dist_BIN', '')
cfg.set('DEFAULT', '_dist_ETC', '%s/etc' % cfg_folder)
cfg.set('DEFAULT', '_dist_VAR', '%s/var' % cfg_folder)
cfg.set('DEFAULT', '_dist_RUN', '%s/run' % cfg_folder)
cfg.set('DEFAULT', '_dist_LOG', '%s/log' % cfg_folder)
# Nagios legacy files
cfg.set('alignak-configuration', 'cfg', '%s/etc/alignak.cfg' % cfg_folder)
# Daemons launching and check
cfg.set('alignak-configuration', 'polling_interval', '1')
cfg.set('alignak-configuration', 'daemons_check_period', '1')
cfg.set('alignak-configuration', 'daemons_stop_timeout', '10')
cfg.set('alignak-configuration', 'daemons_start_timeout', '1')
cfg.set('alignak-configuration', 'daemons_new_conf_timeout', '1')
cfg.set('alignak-configuration', 'daemons_dispatch_timeout', '1')
# Poller/reactionner workers count limited to 1
cfg.set('alignak-configuration', 'min_workers', '1')
cfg.set('alignak-configuration', 'max_workers', '1')
with open('%s/etc/alignak.ini' % cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
def _files_update(self, files, replacements):
"""Update files content with the defined replacements
:param files: list of files to parse and replace
:param replacements: list of values to replace
:return:
"""
for filename in files:
lines = []
with open(filename) as infile:
for line in infile:
for src, target in replacements.items():
line = line.replace(src, target)
lines.append(line)
with open(filename, 'w') as outfile:
for line in lines:
outfile.write(line)
def _stop_alignak_daemons(self, arbiter_only=True, request_stop_uri=''):
""" Stop the Alignak daemons started formerly
If request_stop is not set, the this function will try so stop the daemons with the
/stop_request API, else it will directly send a kill signal.
If some alignak- daemons are still running after the kill, force kill them.
:return: None
"""
print("Stopping the daemons...")
start = time.time()
if request_stop_uri:
req = requests.Session()
raw_data = req.get("%s/stop_request" % request_stop_uri, params={'stop_now': '1'})
data = raw_data.json()
# Let the process 20 seconds to exit
time.sleep(20)
no_daemons = True
for daemon in ['broker', 'poller', 'reactionner', 'receiver', 'scheduler', 'arbiter']:
for proc in psutil.process_iter():
try:
if daemon not in proc.name():
continue
if getattr(self, 'my_pid', None) and proc.pid == self.my_pid:
continue
print("- ***** remaining %s / %s" % (proc.name(), proc.status()))
if proc.status() == 'running':
no_daemons = False
except psutil.NoSuchProcess:
print("not existing!")
continue
except psutil.TimeoutExpired:
print("***** timeout 10 seconds, force-killing the daemon...")
# Do not assert because some processes are sometimes zombies that are
# removed by the Python GC
# assert no_daemons
return
if getattr(self, 'procs', None):
for name, proc in list(self.procs.items()):
if arbiter_only and name not in ['arbiter-master']:
continue
if getattr(self, 'my_pid', None) and proc.pid == self.my_pid:
print("- do not kill myself!")
continue
print("Asking %s (pid=%d) to end..." % (name, proc.pid))
try:
daemon_process = psutil.Process(proc.pid)
except psutil.NoSuchProcess:
print("not existing!")
continue
# children = daemon_process.children(recursive=True)
daemon_process.terminate()
try:
# The default arbiter / daemons stopping process is 30 seconds graceful ... so
# not really compatible with this default delay. The test must update the
# default delay or set a shorter delay than the default one
daemon_process.wait(10)
except psutil.TimeoutExpired:
print("***** stopping timeout 10 seconds, force-killing the daemon...")
daemon_process.kill()
except psutil.NoSuchProcess:
print("not existing!")
pass
print("%s terminated" % (name))
print("Stopping daemons duration: %d seconds" % (time.time() - start))
time.sleep(1.0)
print("Killing remaining processes...")
for daemon in ['broker', 'poller', 'reactionner', 'receiver', 'scheduler', 'arbiter']:
for proc in psutil.process_iter():
try:
if daemon not in proc.name():
continue
if getattr(self, 'my_pid', None) and proc.pid == self.my_pid:
continue
print("- killing %s" % (proc.name()))
daemon_process = psutil.Process(proc.pid)
daemon_process.terminate()
daemon_process.wait(10)
except psutil.AccessDenied:
print("-> access denied...")
continue
except psutil.NoSuchProcess:
print("-> not existing!")
continue
except psutil.TimeoutExpired:
print("-> timeout 10 seconds, force-killing the daemon...")
daemon_process.kill()
def _run_command_with_timeout(self, cmd, timeout_sec):
"""Execute `cmd` in a subprocess and enforce timeout `timeout_sec` seconds.
Return subprocess exit code on natural completion of the subprocess.
Returns None if timeout expires before subprocess completes."""
start = time.time()
proc = subprocess.Popen(cmd)
print("%s launched (pid=%d)" % (cmd, proc.pid))
timer = threading.Timer(timeout_sec, proc.kill)
timer.start()
proc.communicate()
if timer.is_alive():
# Process completed naturally - cancel timer and return exit code
timer.cancel()
print("-> exited with %s after %.2d seconds" % (proc.returncode, time.time() - start))
return proc.returncode
# Process killed by timer - raise exception
print('Process #%d killed after %f seconds' % (proc.pid, timeout_sec))
return None
def _run_alignak_daemons(self, cfg_folder='/tmp/alignak', runtime=30,
daemons_list=None, spare_daemons=[], piped=False, run_folder='',
arbiter_only=True, update_configuration=True, verbose=False):
""" Run the Alignak daemons for a passive configuration
Let the daemons run for the number of seconds defined in the runtime parameter and
then kill the required daemons (list in the spare_daemons parameter)
Check that the run daemons did not raised any ERROR log
:return: None
"""
if daemons_list is None:
daemons_list = [
'scheduler-master', 'broker-master',
'poller-master', 'reactionner-master', 'receiver-master'
]
# Load and test the configuration
cfg_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), cfg_folder)
if not run_folder:
run_folder = cfg_folder
print("Running Alignak daemons, cfg_folder: %s, run_folder: %s" % (cfg_folder, run_folder))
for f in ['alignak.log', 'alignak-events.log']:
if os.path.exists('%s/log/%s' % (cfg_folder, f)):
os.remove('%s/log/%s' % (cfg_folder, f))
# Clean the former existing pid and log files
print("Cleaning pid and log files...")
for daemon in daemons_list + ['arbiter-master']:
if os.path.exists('%s/%s.pid' % (self._launch_dir, daemon)):
print("- removing pid %s/%s.pid" % (self._launch_dir, daemon))
os.remove('%s/%s.pid' % (self._launch_dir, daemon))
if os.path.exists('%s/run/%s.pid' % (run_folder, daemon)):
print("- removing pid %s/run/%s.pid" % (run_folder, daemon))
os.remove('%s/run/%s.pid' % (run_folder, daemon))
if os.path.exists('%s/%s.log' % (self._launch_dir, daemon)):
print("- removing log %s/%s.log" % (self._launch_dir, daemon))
os.remove('%s/%s.log' % (self._launch_dir, daemon))
if os.path.exists('%s/log/%s.log' % (run_folder, daemon)):
print("- removing log %s/log/%s.log" % (run_folder, daemon))
os.remove('%s/log/%s.log' % (run_folder, daemon))
# Update monitoring configuration parameters
if update_configuration:
files = ['%s/etc/alignak.ini' % cfg_folder,
'%s/etc/alignak.d/daemons.ini' % cfg_folder,
'%s/etc/alignak.d/modules.ini' % cfg_folder]
# Update monitoring configuration file variables
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Configuration directories
cfg.set('DEFAULT', '_dist', cfg_folder)
# Do not set a specific bin directory to use the default Alignak one
cfg.set('DEFAULT', '_dist_BIN', '')
cfg.set('DEFAULT', '_dist_ETC', '%s/etc' % cfg_folder)
cfg.set('DEFAULT', '_dist_VAR', '%s/var' % run_folder)
cfg.set('DEFAULT', '_dist_RUN', '%s/run' % run_folder)
cfg.set('DEFAULT', '_dist_LOG', '%s/log' % run_folder)
# Nagios legacy files
cfg.set('alignak-configuration', 'cfg', '%s/etc/alignak.cfg' % cfg_folder)
# Daemons launching and check
cfg.set('alignak-configuration', 'polling_interval', '1')
cfg.set('alignak-configuration', 'daemons_check_period', '1')
cfg.set('alignak-configuration', 'daemons_stop_timeout', '20')
cfg.set('alignak-configuration', 'daemons_start_timeout', '5')
cfg.set('alignak-configuration', 'daemons_new_conf_timeout', '1')
cfg.set('alignak-configuration', 'daemons_dispatch_timeout', '1')
# Poller/reactionner workers count limited to 1
cfg.set('alignak-configuration', 'min_workers', '1')
cfg.set('alignak-configuration', 'max_workers', '1')
with open('%s/etc/alignak.ini' % cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
# If some Alignak daemons are still running...
self._stop_alignak_daemons()
# # # Some script commands may exist in the test folder ...
# if os.path.exists(cfg_folder + '/dummy_command.sh'):
# shutil.copy(cfg_folder + '/dummy_command.sh', '/tmp/dummy_command.sh')
#
print("Launching the daemons...")
self.procs = {}
for name in daemons_list + ['arbiter-master']:
if arbiter_only and name not in ['arbiter-master']:
continue
args = ["../alignak/bin/alignak_%s.py" % name.split('-')[0], "-n", name,
"-e", "%s/etc/alignak.ini" % cfg_folder]
if verbose:
args.append("--debug")
print("- %s arguments: %s" % (name, args))
if piped:
print("- capturing stdout/stderr" % name)
self.procs[name] = \
subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
self.procs[name] = subprocess.Popen(args)
time.sleep(0.1)
print("- %s launched (pid=%d)" % (name, self.procs[name].pid))
time.sleep(3)
print("Testing daemons start")
for name, proc in list(self.procs.items()):
ret = proc.poll()
if ret is not None:
print("*** %s exited on start!" % (name))
if os.path.exists("/tmp/alignak.log"):
with open("/tmp/alignak.log") as f:
for line in f:
print("xxx %s" % line)
if os.path.exists("%s/log/arbiter-master.log" % cfg_folder):
with open("%s/log/arbiter-master.log" % cfg_folder) as f:
for line in f:
print("... %s" % line)
if proc.stdout:
for line in iter(proc.stdout.readline, b''):
print(">>> " + str(line).rstrip())
else:
print("No stdout!")
if proc.stderr:
for line in iter(proc.stderr.readline, b''):
print(">>> " + str(line).rstrip())
else:
print("No stderr!")
assert ret is None, "Daemon %s not started!" % name
print("%s running (pid=%d)" % (name, self.procs[name].pid))
# Let the daemons start ...
time.sleep(3)
print("Testing pid files and log files...")
for name in daemons_list + ['arbiter-master']:
if arbiter_only and name not in ['arbiter-master']:
continue
print("- %s for %s" % ('%s/run/%s.pid' % (run_folder, name), name))
# Some times pid and log files may not exist ...
if not os.path.exists('%s/run/%s.pid' % (run_folder, name)):
print('%s/run/%s.pid does not exist!' % (run_folder, name))
print("- %s for %s" % ('%s/log/%s.log' % (run_folder, name), name))
if not os.path.exists('%s/log/%s.log' % (run_folder, name)):
print('%s/log/%s.log does not exist!' % (run_folder, name))
time.sleep(1)
# Let the arbiter build and dispatch its configuration
# Let the schedulers get their configuration and run the first checks
time.sleep(runtime)
def _check_daemons_log_for_errors(self, daemons_list, run_folder='/tmp/alignak',
ignored_warnings=None, ignored_errors=None, dump_all=True):
"""
Check that the daemons all started correctly and that they got their configuration
ignored_warnings and ignored_errors are lists of strings that make a WARNING or ERROR log
not to be considered as a warning or error
:return:
"""
print("Get information from log files...")
travis_run = 'TRAVIS' in os.environ
if ignored_errors is None:
ignored_errors = []
if ignored_warnings is None:
ignored_warnings = []
ignored_warnings.extend([
u'Cannot call the additional groups setting ',
u'loop exceeded the maximum expected',
u'ignoring repeated file'
])
nb_errors = 0
nb_warnings = 0
for daemon in ['arbiter-master'] + daemons_list:
log_file = "/%s/log/%s.log" % (run_folder, daemon)
if not os.path.exists(log_file):
log_file = "/%s/run/%s.log" % (run_folder, daemon)
if not os.path.exists(log_file):
assert os.path.exists("%s/%s.log" % (self._launch_dir, daemon)), '%s/%s.log does not exist!' % (self._launch_dir, daemon)
log_file = "%s/%s.log" % (self._launch_dir, daemon)
daemon_errors = False
print("-----\n%s log file: %s\n-----\n" % (daemon,
'/%s/log/%s.log' % (run_folder, daemon)))
with open(log_file) as f:
for line in f:
if 'WARNING: ' in line or daemon_errors:
if dump_all and not travis_run:
print(line[:-1])
for ignore_line in ignored_warnings:
if ignore_line in line:
break
else:
nb_warnings += 1
print("-W-" + line[:-1])
if 'ERROR: ' in line or 'CRITICAL: ' in line:
if dump_all and not daemon_errors:
print(line[:-1])
for ignore_line in ignored_errors:
if ignore_line in line:
break
else:
nb_errors += 1
print("*E*" + line[:-1])
if nb_errors > 0:
daemon_errors = True
return (nb_errors, nb_warnings)
def setup_with_file(self, configuration_file=None, env_file=None,
verbose=False, unit_test=True):
"""
Load alignak with the provided configuration and environment files
If verbose is True the envirnment loading is printed out on the console.
If the configuration loading fails, a SystemExit exception is raised to the caller.
The conf_is_correct property indicates if the configuration loading succeeded or failed.
The configuration errors property contains a list of the error message that are normally
logged as ERROR by the arbiter.
If unit_test is True it will simulate the dispatcher configuration sending
to the declared satellites in the configuration. Set to False if you intend to run
real daemons that will receive their configuration!
:param configuration_file: path + file name of the main configuration file
:type configuration_file: str
:param env_file: path + file name of the alignak environment file
:type env_file: str
:param verbose: load Alignak environment in verbose mode (defaults True)
:type verbose: bool
:return: None
"""
self.broks = []
# Our own satellites lists ...
self.arbiters = {}
self.schedulers = {}
self.brokers = {}
self.pollers = {}
self.receivers = {}
self.reactionners = {}
# Our own schedulers lists ...
# Indexed on the scheduler name
self._schedulers = {}
# The main arbiter and scheduler daemons
self._arbiter = None
self._scheduler_daemon = None
self._scheduler = None
self.conf_is_correct = False
self.configuration_warnings = []
self.configuration_errors = []
# # This to allow using a reference configuration if needed,
# # and to make some tests easier to set-up
# print("Preparing default configuration...")
# if os.path.exists('/tmp/etc/alignak'):
# shutil.rmtree('/tmp/etc/alignak')
#
# if os.path.exists('../etc'):
# shutil.copytree('../etc', '/tmp/etc/alignak')
# cfg_folder = '/tmp/etc/alignak'
# files = ['%s/alignak.ini' % cfg_folder,
# '%s/alignak.d/daemons.ini' % cfg_folder,
# '%s/alignak.d/modules.ini' % cfg_folder,
# '%s/alignak-logger.json' % cfg_folder]
# replacements = {
# '_dist=/usr/local/': '_dist=/tmp',
# 'user=alignak': ';user=alignak',
# 'group=alignak': ';group=alignak'
#
# }
# self._files_update(files, replacements)
# print("Prepared")
# Initialize the Arbiter with no daemon configuration file
assert configuration_file or env_file
current_dir = os.getcwd()
configuration_dir = current_dir
print("Current directory: %s" % current_dir)
if configuration_file:
configuration_dir = os.path.dirname(configuration_file)
print("Test configuration directory: %s, file: %s"
% (os.path.abspath(configuration_dir), configuration_file))
else:
configuration_dir = os.path.dirname(env_file)
print("Test configuration directory: %s, file: %s"
% (os.path.abspath(configuration_dir), env_file))
self.env_filename = None
if env_file is not None:
self.env_filename = env_file
else:
self.env_filename = os.path.join(configuration_dir, 'alignak.ini')
if os.path.exists(os.path.join(configuration_dir, 'alignak.ini')):
# alignak.ini in the same directory as the legacy configuration file
self.env_filename = os.path.join(configuration_dir, 'alignak.ini')
elif os.path.exists(os.path.join(current_dir, './etc/alignak.ini')):
# alignak.ini in the test/etc directory
self.env_filename = os.path.join(current_dir, './etc/alignak.ini')
else:
print("No Alignak configuration file found for the test: %s!" % self.env_filename)
raise SystemExit("No Alignak configuration file found for the test!")
self.env_filename = os.path.abspath(self.env_filename)
print("Found Alignak environment file: %s" % self.env_filename)
# Get Alignak environment
args = {'<cfg_file>': self.env_filename, '--verbose': verbose}
self.alignak_env = AlignakConfigParser(args)
self.alignak_env.parse()
arbiter_cfg = None
for daemon_section, daemon_cfg in list(self.alignak_env.get_daemons().items()):
if daemon_cfg['type'] == 'arbiter':
arbiter_cfg = daemon_cfg
arbiter_name = 'Default-Arbiter'
if arbiter_cfg:
arbiter_name = arbiter_cfg['name']
# Using default values that are usually provided by the command line parameters
args = {
'alignak_name': 'alignak-test', 'daemon_name': arbiter_name,
'env_file': self.env_filename
}
if configuration_file:
args.update({
'legacy_cfg_files': [configuration_file]
})
self._arbiter = Arbiter(**args)
try:
# Configure the logger
# self._arbiter.debug = True
self._arbiter.setup_alignak_logger()
# # Setup our modules manager
# self._arbiter.load_modules_manager()
# Load and initialize the arbiter configuration
self._arbiter.load_monitoring_config_file()
# If this assertion does not match, then there is a bug in the arbiter :)
self.assertTrue(self._arbiter.conf.conf_is_correct)
self.conf_is_correct = True
self.configuration_warnings = self._arbiter.conf.configuration_warnings
self.configuration_errors = self._arbiter.conf.configuration_errors
except SystemExit:
self.configuration_warnings = self._arbiter.conf.configuration_warnings
self.configuration_errors = self._arbiter.conf.configuration_errors
self.show_configuration_logs()
self.show_logs()
raise
# Prepare the configuration dispatching
for arbiter_link in self._arbiter.conf.arbiters:
if arbiter_link.get_name() == self._arbiter.arbiter_name:
self._arbiter.link_to_myself = arbiter_link
assert arbiter_link is not None, "There is no arbiter link in the configuration!"
if not unit_test:
return
# Prepare the configuration dispatching
self._arbiter.dispatcher = Dispatcher(self._arbiter.conf, self._arbiter.link_to_myself)
self._arbiter.dispatcher.prepare_dispatch()
# Create an Arbiter external commands manager in dispatcher mode
self._arbiter.external_commands_manager = ExternalCommandManager(self._arbiter.conf,
'dispatcher',
self._arbiter,
accept_unknown=True)
print("All daemons address: %s" % ["%s:%s" % (link.address, link.port) for link in self._arbiter.dispatcher.all_daemons_links])
# Simulate the daemons HTTP interface (very simple simulation !)
with requests_mock.mock() as mr:
for link in self._arbiter.dispatcher.all_daemons_links:
# mr.get('http://%s:%s/ping' % (link.address, link.port), json='pong')
mr.get('http://%s:%s/identity' % (link.address, link.port),
json={"running_id": 123456.123456})
mr.get('http://%s:%s/_wait_new_conf' % (link.address, link.port), json=True)
mr.post('http://%s:%s/_push_configuration' % (link.address, link.port), json=True)
mr.get('http://%s:%s/_initial_broks' % (link.address, link.port), json=[])
mr.get('http://%s:%s/managed_configurations' % (link.address, link.port), json={})
self._arbiter.dispatcher.check_reachable(test=True)
# self._arbiter.dispatcher.dispatch(test=True)
self._arbiter.dispatcher.check_dispatch()
print("-----\nConfiguration got dispatched.")
# Check that all the daemons links got a configuration
for sat_type in ('arbiters', 'schedulers', 'reactionners',
'brokers', 'receivers', 'pollers'):
if verbose:
print("- for %s:" % (sat_type))
for sat_link in getattr(self._arbiter.dispatcher, sat_type):
if verbose:
print(" - %s" % (sat_link))
pushed_configuration = getattr(sat_link, 'unit_test_pushed_configuration', None)
if pushed_configuration:
if verbose:
print(" pushed configuration, contains:")
for key in pushed_configuration:
print(" . %s = %s" % (key, pushed_configuration[key]))
# Update the test class satellites lists
getattr(self, sat_type).update({sat_link.name: pushed_configuration})
if verbose:
print("- my %s: %s" % (sat_type, list(getattr(self, sat_type).keys())))
self.eca = None
# Initialize a Scheduler daemon
for scheduler in self._arbiter.dispatcher.schedulers:
print("-----\nGot a scheduler: %s (%s)" % (scheduler.name, scheduler))
# Simulate the scheduler daemon start
args = {
'env_file': self.env_filename, 'daemon_name': scheduler.name,
}
self._scheduler_daemon = Alignak(**args)
# self._scheduler_daemon.load_modules_manager()
# Simulate the scheduler daemon receiving the configuration from its arbiter
pushed_configuration = scheduler.unit_test_pushed_configuration
self._scheduler_daemon.new_conf = pushed_configuration
self._scheduler_daemon.setup_new_conf()
assert self._scheduler_daemon.new_conf == {}
self._schedulers[scheduler.name] = self._scheduler_daemon.sched
# Store the last scheduler object to get used in some other functions!
# this is the real scheduler, not the scheduler daemon!
self._scheduler = self._scheduler_daemon.sched
self._scheduler.my_daemon = self._scheduler_daemon
print("Got a default scheduler: %s\n-----" % self._scheduler)
# Initialize a Broker daemon
for broker in self._arbiter.dispatcher.brokers:
print("-----\nGot a broker: %s (%s)" % (broker.name, broker))
# Simulate the broker daemon start
args = {
'env_file': self.env_filename, 'daemon_name': broker.name,
}
self._broker_daemon = Broker(**args)
# self._broker_daemon.load_modules_manager()
# Simulate the scheduler daemon receiving the configuration from its arbiter
pushed_configuration = broker.unit_test_pushed_configuration
self._broker_daemon.new_conf = pushed_configuration
self._broker_daemon.setup_new_conf()
assert self._broker_daemon.new_conf == {}
print("Got a default broker daemon: %s\n-----" % self._broker_daemon)
# Get my first broker link
self._main_broker = None
if self._scheduler.my_daemon.brokers:
self._main_broker = [b for b in list(self._scheduler.my_daemon.brokers.values())][0]
print("Main broker: %s" % self._main_broker)
# Initialize a Receiver daemon
self._receiver = None
for receiver in self._arbiter.dispatcher.receivers:
print("-----\nGot a receiver: %s (%s)" % (receiver.name, receiver))
# Simulate the receiver daemon start
args = {
'env_file': self.env_filename, 'daemon_name': receiver.name,
}
self._receiver_daemon = Receiver(**args)
# self._receiver_daemon.load_modules_manager()
# Simulate the scheduler daemon receiving the configuration from its arbiter
pushed_configuration = receiver.unit_test_pushed_configuration
self._receiver_daemon.new_conf = pushed_configuration
self._receiver_daemon.setup_new_conf()
assert self._receiver_daemon.new_conf == {}
self._receiver = receiver
print("Got a default receiver: %s\n-----" % self._receiver)
# for scheduler in self._receiver_daemon.schedulers.values():
# scheduler.my_daemon = self._receiver_daemon
self.ecm_mode = 'applyer'
# Now we create an external commands manager in receiver mode
self.ecr = None
if self._receiver:
self.ecr = ExternalCommandManager(None, 'receiver', self._receiver_daemon,
accept_unknown=True)
self._receiver.external_commands_manager = self.ecr
# and an external commands manager in dispatcher mode for the arbiter
self.ecd = ExternalCommandManager(self._arbiter.conf, 'dispatcher', self._arbiter,
accept_unknown=True)
self._arbiter.modules_manager.stop_all()
self._broker_daemon.modules_manager.stop_all()
self._scheduler_daemon.modules_manager.stop_all()
if self._receiver:
self._receiver_daemon.modules_manager.stop_all()
def fake_check(self, ref, exit_status, output="OK"):
"""
Simulate a check execution and result
:param ref: host/service concerned by the check
:param exit_status: check exit status code (0, 1, ...).
If set to None, the check is simply scheduled but not "executed"
:param output: check output (output + perf data)
:return:
"""
now = time.time()
check = ref.schedule(self._scheduler.hosts,
self._scheduler.services,
self._scheduler.timeperiods,
self._scheduler.macromodulations,
self._scheduler.checkmodulations,
self._scheduler.checks,
force=True, force_time=None)
# now the check is scheduled and we get it in the action queue
self._scheduler.add(check) # check is now in sched.checks[]
# Allows to force check scheduling without setting its status nor output.
# Useful for manual business rules rescheduling, for instance.
if exit_status is None:
return
# fake execution
check.check_time = now
# and lie about when we will launch it because
# if not, the schedule call for ref
# will not really reschedule it because there
# is a valid value in the future
ref.next_chk = now - 0.5
# Max plugin output is default to 8192
check.get_outputs(output, 8192)
check.exit_status = exit_status
check.execution_time = 0.001
check.status = 'waitconsume'
# Put the check result in the waiting results for the scheduler ...
self._scheduler.waiting_results.put(check)
def scheduler_loop(self, count, items=None, scheduler=None):
"""
Manage scheduler actions
:param count: number of loop turns to run
:type count: int
:param items: list of list [[object, exist_status, output]]
:type items: list
:param scheduler: The scheduler
:type scheduler: None | object
:return: None
"""
if scheduler is None:
scheduler = self._scheduler
if items is None:
items = []
macroresolver = MacroResolver()
macroresolver.init(scheduler.my_daemon.sched.pushed_conf)
for num in range(count):
# print("Scheduler loop turn: %s" % num)
for (item, exit_status, output) in items:
print("- item checks creation turn: %s" % item)
if len(item.checks_in_progress) == 0:
# A first full scheduler loop turn to create the checks
# if they do not yet exist!
for i in scheduler.recurrent_works:
(name, fun, nb_ticks) = scheduler.recurrent_works[i]
if nb_ticks == 1:
try:
# print(" . %s ...running." % name)
fun()
except Exception as exp:
print("Exception: %s\n%s" % (exp, traceback.format_exc()))
# else:
# print(" . %s ...ignoring, period: %d" % (name, nb_ticks))
else:
print("*** check is still in progress for %s!" % (item.get_full_name()))
self.assertGreater(len(item.checks_in_progress), 0)
chk = scheduler.checks[item.checks_in_progress[0]]
chk.set_type_active()
chk.check_time = time.time()
chk.wait_time = 0.0001
chk.last_poll = chk.check_time
chk.output = output
chk.exit_status = exit_status
scheduler.waiting_results.put(chk)
# print("-----\n- results fetching turn:")
for i in scheduler.recurrent_works:
(name, fun, nb_ticks) = scheduler.recurrent_works[i]
if nb_ticks == 1:
try:
# print(" . %s ...running." % name)
fun()
except Exception as exp:
print("Exception: %s\n%s" % (exp, traceback.format_exc()))
assert False
# else:
# print(" . %s ...ignoring, period: %d" % (name, nb_ticks))
self.assert_no_log_match("External command Brok could not be sent to any daemon!")
def manage_freshness_check(self, count=1, mysched=None):
"""Run the scheduler loop for freshness_check
:param count: number of scheduler loop turns
:type count: int
:param mysched: a specific scheduler to get used
:type mysched: None | object
:return: n/a
"""
checks = []
for num in range(count):
for i in self._scheduler.recurrent_works:
(name, fun, nb_ticks) = self._scheduler.recurrent_works[i]
if nb_ticks == 1:
fun()
if name == 'check_freshness':
checks = sorted(list(self._scheduler.checks.values()),
key=lambda x: x.creation_time)
checks = [chk for chk in checks if chk.freshness_expiry_check]
return len(checks)
def manage_external_command(self, external_command, run=True):
"""Manage an external command.
:return: result of external command resolution
"""
res = None
ext_cmd = ExternalCommand(external_command)
if self.ecm_mode == 'applyer':
res = None
self._scheduler.run_external_commands([external_command])
self.external_command_loop()
if self.ecm_mode == 'dispatcher':
res = self.ecd.resolve_command(ext_cmd)
if res and run:
self._arbiter.broks = []
self._arbiter.add(ext_cmd)
self._arbiter.push_external_commands_to_schedulers()
if self.ecm_mode == 'receiver':
res = self.ecr.resolve_command(ext_cmd)
if res and run:
self._receiver_daemon.broks = []
self._receiver_daemon.add(ext_cmd)
# self._receiver_daemon.push_external_commands_to_schedulers()
# # Our scheduler
# self._scheduler = self.schedulers['scheduler-master'].sched
# Give broks to our broker
for brok in self._receiver_daemon.broks:
print("Brok receiver: %s" % brok)
self._broker_daemon.external_broks.append(brok)
return res
def external_command_loop(self, count=1):
"""Execute the scheduler actions for external commands.
The scheduler is not an ECM 'dispatcher' but an 'applyer' ... so this function is on
the external command execution side of the problem.
:return:
"""
self.scheduler_loop(count=count)
def worker_loop(self, verbose=True):
self._scheduler.delete_zombie_checks()
self._scheduler.delete_zombie_actions()
checks = self._scheduler.get_to_run_checks(True, False, worker_name='tester')
actions = self._scheduler.get_to_run_checks(False, True, worker_name='tester')
if verbose is True:
self.show_actions()
for a in actions:
a.status = u'in_poller'
a.check_time = time.time()
a.exit_status = 0
self._scheduler.put_results(a)
if verbose is True:
self.show_actions()
def launch_internal_check(self, svc_br):
""" Launch an internal check for the business rule service provided """
# Launch an internal check
now = time.time()
self._scheduler.add(svc_br.launch_check(now - 1,
self._scheduler.hosts,
self._scheduler.services,
self._scheduler.timeperiods,
self._scheduler.macromodulations,
self._scheduler.checkmodulations,
self._scheduler.checks))
c = svc_br.actions[0]
self.assertEqual(True, c.internal)
self.assertTrue(c.is_launchable(now))
# ask the scheduler to launch this check
# and ask 2 loops: one to launch the check
# and another to get the result
self.scheduler_loop(2, [])
# We should not have the check anymore
self.assertEqual(0, len(svc_br.actions))
def show_logs(self):
"""Show logs. Get logs collected by the unit tests collector handler and print them"""
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if isinstance(handler, CollectorHandler):
print("--- logs <<<----------------------------------")
for log in handler.collector:
# self.safe_print(log)
print(log)
print("--- logs >>>----------------------------------")
break
else:
assert False, "Alignak test Logger is not initialized correctly!"
def show_actions(self):
""""Show the inner actions"""
macroresolver = MacroResolver()
macroresolver.init(self._scheduler_daemon.sched.pushed_conf)
print("--- Scheduler: %s" % self._scheduler.my_daemon.name)
print("--- actions >>>")
actions = sorted(list(self._scheduler.actions.values()), key=lambda x: (x.t_to_go, x.creation_time))
for action in actions:
print("Time to launch action: %s, creation: %s, now: %s" % (action.t_to_go, action.creation_time, time.time()))
if action.is_a == 'notification':
item = self._scheduler.find_item_by_id(action.ref)
if item.my_type == "host":
ref = "host: %s" % item.get_name()
else:
hst = self._scheduler.find_item_by_id(item.host)
ref = "svc: %s/%s" % (hst.get_name(), item.get_name())
print("NOTIFICATION %s (%s - %s) [%s], created: %s for '%s': %s"
% (action.type, action.uuid, action.status, ref,
time.asctime(time.localtime(action.t_to_go)),
action.contact_name, action.command))
elif action.is_a == 'eventhandler':
print("EVENTHANDLER:", action)
else:
print("ACTION:", action)
print("<<< actions ---")
def show_checks(self):
"""
Show checks from the scheduler
:return:
"""
print("--- Scheduler: %s" % self._scheduler.my_daemon.name)
print("--- checks >>>")
checks = sorted(list(self._scheduler.checks.values()), key=lambda x: x.creation_time)
for check in checks:
print("- %s" % check)
print("<<< checks ---")
def show_events(self):
"""
Show the events
:return:
"""
my_broker = [b for b in list(self._scheduler.my_daemon.brokers.values())][0]
monitoring_logs = []
for event in self._scheduler_daemon.events:
data = unserialize(event.data)
monitoring_logs.append((data['level'], data['message']))
for log in monitoring_logs:
print(log)
def show_and_clear_actions(self):
self.show_actions()
self.clear_actions()
def count_logs(self):
"""Count the logs collected by the unit tests collector handler and print them"""
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if isinstance(handler, CollectorHandler):
return len(handler.collector)
else:
assert False, "Alignak test Logger is not initialized correctly!"
def count_actions(self):
"""
Count the actions in the scheduler's actions.
@verified
:return:
"""
return len(list(self._scheduler.actions.values()))
def clear_logs(self):
"""
Remove all the logs stored in the logs collector
@verified
:return:
"""
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if isinstance(handler, CollectorHandler):
handler.collector = []
break
# else:
# assert False, "Alignak test Logger is not initialized correctly!"
def clear_actions(self):
"""
Clear the actions in the scheduler's actions.
:return:
"""
self._scheduler.actions = {}
def clear_checks(self):
"""
Clear the checks in the scheduler's checks.
:return:
"""
self._scheduler.checks = {}
def clear_events(self, daemon=None):
"""
Clear the checks in the scheduler's checks.
:return:
"""
if daemon is None:
daemon = self._scheduler_daemon
daemon.events = []
def assert_actions_count(self, number):
"""
Check the number of actions
:param number: number of actions we must have
:type number: int
:return: None
"""
actions = []
# I do this because sort take too times
if number != len(self._scheduler.actions):
actions = sorted(list(self._scheduler.actions.values()), key=lambda x: x.creation_time)
self.assertEqual(number, len(self._scheduler.actions),
"Not found expected number of actions:\nactions_logs=[[[\n%s\n]]]" %
('\n'.join('\t%s = creation: %s, is_a: %s, type: %s, status: %s, '
'planned: %s, command: %s' %
(idx, b.creation_time, b.is_a, b.type,
b.status, b.t_to_go, b.command)
for idx, b in enumerate(sorted(self._scheduler.actions.values(),
key=lambda x: (x.creation_time,
x.t_to_go))))))
def assert_actions_match(self, index, pattern, field):
"""
Check if pattern verified in field(property) name of the action with index in action list
@verified
:param index: index in the actions list. If index is -1, all the actions in the list are
searched for a matching pattern
:type index: int
:param pattern: pattern to verify is in the action
:type pattern: str
:param field: name of the field (property) of the action
:type field: str
:return: None
"""
regex = re.compile(pattern)
actions = sorted(self._scheduler.actions.values(), key=lambda x: (x.t_to_go, x.creation_time))
if index != -1:
myaction = actions[index]
self.assertTrue(regex.search(getattr(myaction, field)),
"Not found a matching pattern in actions:\n"
"index=%s field=%s pattern=%r\n"
"action_line=creation: %s, is_a: %s, type: %s, "
"status: %s, planned: %s, command: %s" % (
index, field, pattern, myaction.creation_time, myaction.is_a,
myaction.type, myaction.status, myaction.t_to_go, myaction.command))
return
for myaction in actions:
if regex.search(getattr(myaction, field)):
return
self.assertTrue(False,
"Not found a matching pattern in actions:\nfield=%s pattern=%r\n" %
(field, pattern))
def assert_log_count(self, number):
"""
Check the number of log
:param number: number of logs we must have
:type number: int
:return: None
"""
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if isinstance(handler, CollectorHandler):
self.assertEqual(number, len(handler.collector),
"Not found expected number of logs: %s vs %s"
% (number, len(handler.collector)))
break
else:
assert False, "Alignak test Logger is not initialized correctly!"
def assert_log_match(self, pattern, index=None):
"""
Search if the log with the index number has the pattern in the Arbiter logs.
If index is None, then all the collected logs are searched for the pattern
Logs numbering starts from 0 (the oldest stored log line)
This function assert on the search result. As of it, if no log is found with th search
criteria an assertion is raised and the test stops on error.
:param pattern: string to search in log
:type pattern: str
:param index: index number
:type index: int
:return: None
"""
self.assertIsNotNone(pattern, "Searched pattern can not be None!")
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if not isinstance(handler, CollectorHandler):
continue
regex = re.compile(pattern)
log_num = 0
found = False
for log in handler.collector:
if index is None:
if regex.search(log):
found = True
break
elif index == log_num:
if regex.search(log):
found = True
break
log_num += 1
self.assertTrue(found,
"Not found a matching log line in logs:\nindex=%s pattern=%r\n"
"logs=[[[\n%s\n]]]"
% (index, pattern, '\n'.join('\t%s=%s' % (idx, b.strip())
for idx, b in
enumerate(handler.collector))))
break
else:
assert False, "Alignak test Logger is not initialized correctly!"
def assert_checks_count(self, number):
"""
Check the number of actions
@verified
:param number: number of actions we must have
:type number: int
:return: None
"""
checks = sorted(list(self._scheduler.checks.values()), key=lambda x: x.creation_time)
self.assertEqual(number, len(checks),
"Not found expected number of checks:\nchecks_logs=[[[\n%s\n]]]" %
('\n'.join('\t%s = creation: %s, is_a: %s, type: %s, status: %s, planned: %s, '
'command: %s' %
(idx, b.creation_time, b.is_a, b.type, b.status, b.t_to_go, b.command)
for idx, b in enumerate(checks))))
def assert_checks_match(self, index, pattern, field):
"""
Check if pattern verified in field(property) name of the check with index in check list
@verified
:param index: index number of checks list
:type index: int
:param pattern: pattern to verify is in the check
:type pattern: str
:param field: name of the field (property) of the check
:type field: str
:return: None
"""
regex = re.compile(pattern)
checks = sorted(list(self._scheduler.checks.values()), key=lambda x: x.creation_time)
mycheck = checks[index]
self.assertTrue(regex.search(getattr(mycheck, field)),
"Not found a matching pattern in checks:\nindex=%s field=%s pattern=%r\n"
"check_line=creation: %s, is_a: %s, type: %s, status: %s, planned: %s, "
"command: %s" % (
index, field, pattern, mycheck.creation_time, mycheck.is_a,
mycheck.type, mycheck.status, mycheck.t_to_go, mycheck.command))
def _any_check_match(self, pattern, field, assert_not):
"""
Search if any check matches the requested pattern
@verified
:param pattern:
:param field to search with pattern:
:param assert_not:
:return:
"""
regex = re.compile(pattern)
checks = sorted(list(self._scheduler.checks.values()), key=lambda x: x.creation_time)
for check in checks:
if re.search(regex, getattr(check, field)):
self.assertTrue(not assert_not,
"Found check:\nfield=%s pattern=%r\n"
"check_line=creation: %s, is_a: %s, type: %s, status: %s, "
"planned: %s, command: %s" % (
field, pattern, check.creation_time, check.is_a,
check.type, check.status, check.t_to_go, check.command)
)
return
self.assertTrue(assert_not, "No matching check found:\n"
"pattern = %r\n" "checks = %r" % (pattern, checks))
def assert_any_check_match(self, pattern, field):
"""
Assert if any check matches the pattern
@verified
:param pattern:
:param field to search with pattern:
:return:
"""
self._any_check_match(pattern, field, assert_not=False)
def assert_no_check_match(self, pattern, field):
"""
Assert if no check matches the pattern
@verified
:param pattern:
:param field to search with pattern:
:return:
"""
self._any_check_match(pattern, field, assert_not=True)
def _any_log_match(self, pattern, assert_not):
"""
Search if any log in the Arbiter logs matches the requested pattern
If 'scheduler' is True, then uses the scheduler's broks list.
@verified
:param pattern:
:param assert_not:
:return:
"""
self.assertIsNotNone(pattern, "Searched pattern can not be None!")
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if not isinstance(handler, CollectorHandler):
continue
# print("-----\nParsing collector handler log events...")
# print("Searching for: %s (%s)" % (pattern, type(pattern)))
try:
regex = re.compile(pattern, re.ASCII)
except AttributeError:
regex = re.compile(pattern)
for log in handler.collector:
if re.search(regex, log):
# print("# found: %s" % (log))
self.assertTrue(
not assert_not,
"Found matching log line, pattern: %r\nlog: %r" % (pattern, log)
)
break
else:
# # Dump all known log events for analysis
# for log in handler.collector:
# print(". %s (%s)" % (repr(log), type(log)))
self.assertTrue(assert_not,
"No matching log line found, pattern: %r\n" % pattern)
break
else:
assert False, "Alignak test Logger is not initialized correctly!"
def assert_any_log_match(self, pattern):
"""Assert if any of the collected log matches the pattern
:param pattern:
:return:
"""
self._any_log_match(pattern, assert_not=False)
def assert_no_log_match(self, pattern):
"""Assert if no collected log matches the pattern
:param pattern:
:return:
"""
self._any_log_match(pattern, assert_not=True)
def _any_brok_match(self, pattern, level, assert_not):
"""
Search if any brok message in the Scheduler broks matches the requested pattern and
requested level
@verified
:param pattern:
:param assert_not:
:return:
"""
regex = re.compile(pattern)
my_broker = [b for b in list(self._scheduler.my_daemon.brokers.values())][0]
monitoring_logs = []
print("Broker broks: %s" % my_broker.broks)
for brok in my_broker.broks:
if brok.type == 'monitoring_log':
data = unserialize(brok.data)
monitoring_logs.append((data['level'], data['message']))
if re.search(regex, data['message']) and (level is None or data['level'] == level):
self.assertTrue(not assert_not, "Found matching brok:\n"
"pattern = %r\nbrok message = %r" % (pattern, data['message']))
return
self.assertTrue(assert_not, "No matching brok found:\n"
"pattern = %r\n" "monitring log = %r" % (pattern,
monitoring_logs))
def assert_any_brok_match(self, pattern, level=None):
"""
Search if any brok message in the Scheduler broks matches the requested pattern and
requested level
@verified
:param pattern:
:param scheduler:
:return:
"""
self._any_brok_match(pattern, level, assert_not=False)
def assert_no_brok_match(self, pattern, level=None):
"""
Search if no brok message in the Scheduler broks matches the requested pattern and
requested level
@verified
:param pattern:
:param scheduler:
:return:
"""
self._any_brok_match(pattern, level, assert_not=True)
def get_monitoring_events(self, daemon=None, no_date=False):
""" This function gets the monitoring events from the provided daemon
If no daemon is specified, it will get from the default Scheduler
the event Broks are sorted by ascending creation timestamp
If no_date is specified, then the events list will be filtered and the vents data will
not be returned. This makes it really easier for the unit tests that do not need to care
about the events timestamp to check if an event is raised or not!
:return:
"""
if daemon is None:
daemon = self._scheduler_daemon
monitoring_logs = []
for brok in sorted(daemon.events, key=lambda x: x.creation_time):
ts, level, message = brok.get_event()
print("Event: %s / %s / %s" % (ts, level, message))
if no_date:
monitoring_logs.append((level, message))
else:
monitoring_logs.append((ts, level, message))
return monitoring_logs
def check_monitoring_events_log(self, expected_logs, dump=True, assert_length=True):
"""
Get the monitoring_log broks and check that they match with the expected_logs provided
:param expected_logs: expected monitoring logs
:param dump: True to print out the monitoring logs
:param assert_length: True to compare list lengths
:return:
"""
# We got 'monitoring_log' broks for logging to the monitoring events..
# no_date to avoid comparing the events timestamp !
monitoring_events = self.get_monitoring_events(no_date=True)
if dump:
print("Monitoring events: ")
for level, message in monitoring_events:
print("- ('%s', '%s')" % (level, message))
for log_level, log_message in expected_logs:
try:
assert (log_level, log_message) in monitoring_events, "Not found :%s" % log_message
except UnicodeDecodeError:
assert (log_level.decode('utf8', 'ignore'), log_message.decode('utf8', 'ignore')) in monitoring_events, "Not found :%s" % log_message
if not assert_length:
return
assert len(expected_logs) == len(monitoring_events), "Length do not match: %d" \
% len(monitoring_events)
def _any_event_match(self, pattern, level, assert_not):
"""
Search if any event message in the Arbiter events matches the requested pattern and
requested level
@verified
:param pattern:
:param assert_not:
:return:
"""
regex = re.compile(pattern)
my_broker = [b for b in list(self._scheduler.my_daemon.brokers.values())][0]
monitoring_logs = []
print("Broker broks: %s" % my_broker.broks)
for brok in my_broker.broks:
print("- %s" % brok)
monitoring_logs = []
print("Arbiter events: %s" % self._arbiter.events)
print("Scheduler events: %s" % self._scheduler_daemon.events)
print("Receiver events: %s" % self._receiver_daemon.events)
for event in self._scheduler_daemon.events:
data = unserialize(event.data)
monitoring_logs.append((data['level'], data['message']))
if re.search(regex, data['message']) and (level is None or data['level'] == level):
self.assertTrue(not assert_not,
"Found matching event:\npattern = %r\nevent message = %r"
% (pattern, data['message']))
return
self.assertTrue(assert_not,
"No matching event found:\npattern = %r\n" "event message = %r"
% (pattern, monitoring_logs))
def assert_any_event_match(self, pattern, level=None):
"""
Search if any event message in the Scheduler events matches the requested pattern and
requested level
@verified
:param pattern:
:param scheduler:
:return:
"""
self._any_event_match(pattern, level, assert_not=False)
def assert_no_event_match(self, pattern, level=None):
"""
Search if no event message in the Scheduler events matches the requested pattern and
requested level
@verified
:param pattern:
:param scheduler:
:return:
"""
self._any_event_match(pattern, level, assert_not=True)
def get_log_match(self, pattern):
"""Get the collected logs matching the provided pattern"""
self.assertIsNotNone(pattern, "Searched pattern can not be None!")
logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME)
for handler in logger_.handlers:
if isinstance(handler, CollectorHandler):
regex = re.compile(pattern)
res = []
for log in handler.collector:
if re.search(regex, log):
res.append(log)
return res
else:
assert False, "Alignak test Logger is not initialized correctly!"
def show_configuration_logs(self):
"""
Prints the configuration logs
@verified
:return:
"""
print("Configuration warnings:")
for msg in self.configuration_warnings:
print(" - %s" % msg)
print("Configuration errors:")
for msg in self.configuration_errors:
print(" - %s" % msg)
def _any_cfg_log_match(self, pattern, assert_not):
"""
Search a pattern in configuration log (warning and error)
@verified
:param pattern:
:return:
"""
regex = re.compile(pattern)
cfg_logs = self.configuration_warnings + self.configuration_errors
for log in cfg_logs:
if re.search(regex, log):
self.assertTrue(not assert_not,
"Found matching log line:\n"
"pattern = %r\nlog = %r" % (pattern, log))
return
self.assertTrue(assert_not, "No matching log line found:\n"
"pattern = %r\n" "logs = %r" % (pattern, cfg_logs))
def assert_any_cfg_log_match(self, pattern):
"""
Assert if any configuration log matches the pattern
@verified
:param pattern:
:return:
"""
self._any_cfg_log_match(pattern, assert_not=False)
def assert_no_cfg_log_match(self, pattern):
"""
Assert if no configuration log matches the pattern
@verified
:param pattern:
:return:
"""
self._any_cfg_log_match(pattern, assert_not=True)
def guess_sys_stdout_encoding(self):
''' Return the best guessed encoding to be used for printing on sys.stdout. '''
return (
getattr(sys.stdout, 'encoding', None)
or getattr(sys.__stdout__, 'encoding', None)
or locale.getpreferredencoding()
or sys.getdefaultencoding()
or 'ascii'
)
def safe_print(self, *args, **kw):
"""" "print" args to sys.stdout,
If some of the args aren't unicode then convert them first to unicode,
using keyword argument 'in_encoding' if provided (else default to UTF8)
and replacing bad encoded bytes.
Write to stdout using 'out_encoding' if provided else best guessed encoding,
doing xmlcharrefreplace on errors.
"""
in_bytes_encoding = kw.pop('in_encoding', 'UTF-8')
out_encoding = kw.pop('out_encoding', self.guess_sys_stdout_encoding())
if kw:
raise ValueError('unhandled named/keyword argument(s): %r' % kw)
#
make_in_data_gen = lambda: ( a if isinstance(a, string_types) else str(a) for a in args )
possible_codings = ( out_encoding, )
if out_encoding != 'ascii':
possible_codings += ( 'ascii', )
for coding in possible_codings:
data = ' '.join(make_in_data_gen()).encode(coding, 'xmlcharrefreplace')
try:
sys.stdout.write(str(data))
break
except UnicodeError as err:
# there might still have some problem with the underlying sys.stdout.
# it might be a StringIO whose content could be decoded/encoded in this same process
# and have encode/decode errors because we could have guessed a bad encoding with it.
# in such case fallback on 'ascii'
if coding == 'ascii':
raise
sys.stderr.write('Error on write to sys.stdout with %s encoding: err=%s\nTrying with ascii' % (
coding, err))
sys.stdout.write('\n')
|
Alignak-monitoring-contrib/alignak-module-nsca
|
test/alignak_test.py
|
Python
|
agpl-3.0
| 77,995
|
"""Argument definitions for model training code in `trainer.model`."""
import argparse
from trainer import model
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--batch_size",
help="Batch size for training steps",
type=int,
default=32,
)
parser.add_argument(
"--eval_data_path",
help="GCS location pattern of eval files",
required=True,
)
parser.add_argument(
"--nnsize",
help="Hidden layer sizes (provide space-separated sizes)",
nargs="+",
type=int,
default=[32, 8],
)
parser.add_argument(
"--nbuckets",
help="Number of buckets to divide lat and lon with",
type=int,
default=10,
)
parser.add_argument(
"--lr", help="learning rate for optimizer", type=float, default=0.001
)
parser.add_argument(
"--num_evals",
help="Number of times to evaluate model on eval data training.",
type=int,
default=5,
)
parser.add_argument(
"--num_examples_to_train_on",
help="Number of examples to train on.",
type=int,
default=100,
)
parser.add_argument(
"--output_dir",
help="GCS location to write checkpoints and export models",
required=True,
)
parser.add_argument(
"--train_data_path",
help="GCS location pattern of train files containing eval URLs",
required=True,
)
parser.add_argument(
"--job-dir",
help="this model ignores this field, but it is required by gcloud",
default="junk",
)
args = parser.parse_args()
hparams = args.__dict__
hparams.pop("job-dir", None)
model.train_and_evaluate(hparams)
|
GoogleCloudPlatform/asl-ml-immersion
|
notebooks/building_production_ml_systems/solutions/taxifare/trainer/task.py
|
Python
|
apache-2.0
| 1,800
|
from data import *
# white
pvals = {
PAWN: 100,\
BISHOP: 300,\
KNIGHT: 300,\
ROOK: 500,\
QUEEN: 900,\
-PAWN: -100,\
-BISHOP: -300,\
-KNIGHT: -300,\
-ROOK: -500,\
-QUEEN: -900,\
KING: 10000,\
-KING: -10000,\
EMPTY: 0,\
}
def value(state):
return state.som * sum(pvals[state.board[cord]] for cord in fcords)
def game_lost(state):
try:
state.board.index(KING*state.som)
return False
except ValueError:
return True
def game_drawn(state):
if state.turn >= 80:
return True
else:
return False
|
edrex/minichess
|
minichess/eval.py
|
Python
|
gpl-2.0
| 605
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gevent import monkey
monkey.patch_all()
from gevent.pywsgi import WSGIServer
import re
from urllib import unquote
import logging
static_setting = {
'templates': r'templates'
}
from HTTPerror import HTTP404Error, HTTP403Error, HTTP502Error, HTTP302Error
class RouteError(Exception):
def __init__(self, info=None):
logging.debug("<" + info + ">")
def __str__(self):
if(self.info == 'too many re'):
return "<TOO MORE REGULAR SEARCH>"
if(self.info == 'route error'):
return '<WRONG ROUTE DESIGN>'
if(self.info == 'query already in request'):
return "<IT HAS ALREADY IN REQUEST VALUE>"
if(self.info == 'not tuple'):
return '<URL MUST BE TUPLE OF ROUTE AND HANDLER>'
if(self.info == 'not two items'):
return '<URL MUST BE TUPLE OF ROUTE AND HANDLER,JUST TWO ITEMS>'
class RequestError(Exception):
def __init__(self):
pass
class RequestValueError(RequestError):
def __str__(self):
return "<the value has already in request's data>"
class WebApp():
urls = []
_parsed_urls = []
global static_setting
templates = False
def __init__(self, environ=None, get_urls=True):
self.request = {}
if environ:
self._environ = environ
self._path = self._environ['PATH_INFO']
if self._path[-1] != '/':
self._path = self._path + '/'
try:
self.request['cookies'] = self._environ['HTTP_COOKIE']
except KeyError:
self.request['cookies'] = None
self.request['http_protocol'] = self._environ['SERVER_PROTOCOL']
self.request['user_agent'] = self._environ['HTTP_USER_AGENT']
try:
self.request['http_connect'] = self._environ['HTTP_CONNECTION']
except KeyError:
self.request['http_connect'] = None
self.request['http_port'] = self._environ['HTTP_HOST']
self.request['method'] = self._environ['REQUEST_METHOD']
try:
self.request['content_length'] = self._environ[
'CONTENT_LENGTH']
self.request['content_type'] = self._environ['CONTENT_TYPE']
self.request['http_accept_encoding'] = self._environ[
'HTTP_ACCEPT_ENCODING']
except KeyError:
self.request['content_length'] = None
self.request['content_type'] = None
self.request['http_accept_encoding'] = None
self.request['data'] = {}
self.request['query_string'] = {}
line = self._environ['QUERY_STRING']
if self.request['content_length']:
length = int(self.request['content_length'])
request_data = environ['wsgi.input'].read(length)
if request_data:
request_data = unquote(request_data)
for data_pair in request_data.split('&'):
key, value = data_pair.split('=')
self.request['data'][key] = value
query_string = self._environ['QUERY_STRING']
if query_string:
query_string = unquote(query_string)
for data_pair in query_string.split('&'):
try:
key, value = data_pair.split('=')
self.request['data'][key] = value
self.request['query_string'][key] = value
except ValueError:
pass
if not get_urls:
for url in self.urls:
if not isinstance(url, tuple):
raise RouteError('not tuple')
if len(url) != 2:
raise RouteError('not two items')
try:
res = self.url_parse(url[0])
except RouteError:
logging.debug("<the route design got some mistakes>")
raise HTTP404Error
if isinstance(res, tuple):
self._parsed_urls.append((res[0] + '$', url[1], res[1]))
else:
self._parsed_urls.append((res + '$', url[1]))
if self.templates:
static_setting['templates'] = self.templates
def __repr__(self):
return "Jolla.WebAppObject"
def __str__(self):
return "<class 'Jolla.WebAppObject'>"
def parse(self, urls):
for url_handler in urls:
if url_handler[0] == r'/':
if self._path != '/':
continue
else:
html_code = url_handler[1](self.request)
url_reg = re.compile(url_handler[0])
if url_reg.match(self._path):
if '?' in url_handler[0]:
re_query = re.findall(url_reg, self._path)
if re_query[0]:
for i in range(len(url_handler[2])):
if url_handler[2][i] in self.request:
raise RouteError("query already in request")
else:
self.request[url_handler[
2][i]] = re_query[0][i]
# html_code = url_handler[1](self.request)
# return html_code
try:
html_code = url_handler[1](self.request)
except TypeError:
html_code = url_handler[1]()
return html_code
raise HTTP404Error(
'REQUEST %s NOT FOUND IN ROUTE CONFIGURATION' % self._path)
def url_parse(self, path):
path = path.replace(' ', '')
path = path.replace("_", "-")
if path[-1] != '/':
path = path + '/'
if '<' in path and '>' in path:
if path.count("<") != path.count(">"):
raise RouteError("route error")
if path.count("<") > 5:
raise RouteError("too many re")
reg = re.compile(r'<([a-z0-9A-Z-_]+)>')
re_list = re.findall(reg, path)
the_url = path
for url_query in re_list:
the_url = the_url.replace(
'<' + url_query + '>', '(?P<' + url_query + '>[a-z0-9A-Z-]+)')
return (the_url, re_list)
return path
def get_parsed_urls(self):
return self._parsed_urls
class jolla_server(WSGIServer):
def __init__(self, app, listener=None, log=None):
if not listener:
self.listener = ("127.0.0.1", 8000)
self.host = "127.0.0.1"
self.port = 8000
else:
self.listener = listener
self.host = None
self.port = None
self.app = app
my_app = self.app(get_urls=False)
self.urls = my_app.get_parsed_urls()
if log:
logging.basicConfig(filename=log, level=logging.DEBUG,
format='%(asctime)s %(levelname)s:%(message)s', datefmt="[%m-%d-%Y %H:%M:%S]")
WSGIServer.__init__(self, listener=self.listener,
application=self.application, log=logging)
else:
WSGIServer.__init__(self, listener=self.listener,
application=self.application)
def __str__(self):
return "<class 'Jolla.jolla_serverObeject'>"
def __repr__(self):
return 'Jolla.jolla_serverObeject'
def application(self, environ, start_response):
try:
the_app = self.app(environ)
html_code = the_app.parse(self.urls)
if not isinstance(html_code, tuple):
html_code = (html_code, [('Content-Type', 'text/html')])
status = '200 OK'
except HTTP404Error as e:
status = e.error_header
html_code = ('404 NOT FOUND', [('Content-Type', 'text/html')])
except HTTP302Error as e:
status = e.error_header
html_code = ('', [('Location', e.target_url)])
header = [
('Server', 'Jolla/1.1')
]
for header_item in html_code[1]:
header.append(header_item)
start_response(status, header)
return html_code[0]
def run_server(self):
if self.host:
print "the jolla server is running on the {} in the port {}".format(self.host, self.port)
else:
if isinstance(self.listener, tuple):
print "the jolla server is running on the {} in the port {}".format(self.listener[0], self.listener[1])
self.serve_forever()
|
salamer/jolla
|
jolla/server.py
|
Python
|
apache-2.0
| 8,875
|
from django.test import TestCase
# Create your tests here.
from django.test import TestCase
from django.utils import timezone
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from principal.models import Proyecto
from principal.models import Rol
from principal.models import Permiso
from principal.models import Usuario
# Create your tests here.
def crear_proyecto(nombre, descripcion, fecha_creacion, complejidad_total, estado):
"""
Funcion: Encargada de crear un proyecto para realizacion de pruebas
"""
return Proyecto.objects.create(nombre=nombre, descripcion=descripcion,
fecha_creacion=fecha_creacion,
complejidad_total=complejidad_total,
estado=estado
)
def crear_rol(nombre, descripcion):
"""
Funcion: Encargada de crear un rol para realizacion de pruebas
"""
return Rol.objects.create(nombre=nombre, descripcion=descripcion)
def crear_permiso(nombre, valor):
"""
Funcion: Encargada de crear un permiso para la realizacion de pruebas
"""
return Permiso.objects.create(nombre=nombre, valor=valor)
def crear_usuario(username, password):
"""
Funcion: Encargada de crear un usuario para la realizacion de pruebas
"""
return Usuario.objects.create(username=username, password=password, nombre='ust1',
apellido='ust1', telefono='0000000000',
ci=4100100, email='ts1@mail.com'
)
class ProyectoTest(TestCase):
def test_creacion_proyecto(self):
"""
Se comprueba que el proyecto es creado exitosamente
"""
p = crear_proyecto("proyectoTest","Prueba de test.py", timezone.now(), 0, "no iniciado")
tp = Proyecto.objects.get(nombre="proyectoTest")
self.assertEqual(tp.nombre, "proyectoTest")
def test_eliminacion_proyecto(self):
"""
Se comprueba que al eliminar el proyecto, todos los roles
asociado al mismo tambien son eliminados
"""
p = crear_proyecto("proyectoTest","Prueba de test.py", timezone.now(), 0, "no iniciado")
p.save()
r = crear_rol("Administrador proyectoTest", "rol de prueba")
r.proyecto = p
r.save()
pr = crear_permiso("crear",0)
r.permisos.add(pr)
pr = crear_permiso("modificar",0)
r.permisos.add(pr)
Proyecto.objects.get(nombre="proyectoTest").delete()
tp = Proyecto.objects.all()
r = Rol.objects.all()
self.assertEqual(len(tp), 0)
self.assertEqual(len(r), 0)
def test_conexion(self):
#client = Client()
usuario = crear_usuario('admin', 'admin')
usuario.save()
p = crear_proyecto("proyectoTest","Prueba de test.py", timezone.now(), 0, "no iniciado")
p.save()
res = self.client.post('/login/',{'username':'admin','password':'admin'})
tsession = self.client.session
print 'data'
print tsession
print tsession['usuario']
print res
self.assertEqual(tsession['usuario'], 1)
res = self.client.get('/proyectos/')
self.assertContains(res,"proyectoTest")
self.assertEqual(res.status_code, 200)
|
SantiagoValdez/hpm
|
src/hpm/app_proyecto/tests.py
|
Python
|
gpl-2.0
| 2,905
|
#
# Copyright (C) 2015 Prevas A/S
#
# This file is part of dctrl, an embedded device control framework
#
# dctrl is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# dctrl is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
"""This is like pexpect, but working on an internal (py)serial connection."""
import serial, pexpect
import time
class serspawn(serial.Serial, pexpect.spawn):
closed = True
def __init__ (self, port, baudrate=115200, bytesize=serial.EIGHTBITS,
parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE,
readTimeout=0.1, writeTimeout=None, xonxoff=False, rtscts=False,
timeout=30, maxread=2000, searchwindowsize=None,
logfile=None):
self.readTimeout = readTimeout
serial.Serial.__init__(self, port=port, baudrate=baudrate,
bytesize=bytesize, parity=parity,
stopbits=stopbits, timeout=readTimeout,
xonxoff=xonxoff, rtscts=rtscts,
writeTimeout=writeTimeout)
self.args = None
self.command = None
pexpect.spawn.__init__(self, None, None, timeout, maxread,
searchwindowsize, logfile)
self.child_fd = -1
self.own_fd = False
self.closed = False
self.name = '<serial port %s>' % port
self.default_timeout = timeout
return
def __del__ (self):
serspawn.close(self)
return
def close (self):
if self.closed:
return
serial.Serial.close(self)
self.closed = True
return
def flush (self):
serial.Serial.flush(self)
return
def isatty (self):
return False
def isalive (self):
return not self.closed
def terminate (self, force=False):
raise ExceptionPexpect ('This method is not valid for serial ports.')
def kill (self, sig):
return
def send(self, s):
time.sleep(self.delaybeforesend)
if self.logfile is not None:
self.logfile.write (s)
self.logfile.flush()
if self.logfile_send is not None:
self.logfile_send.write (s)
self.logfile_send.flush()
serial.Serial.write(self, s)
return s
def send(self, s, noSendLog=None):
if noSendLog is not None:
return self.send(s)
else:
time.sleep(self.delaybeforesend)
serial.Serial.write(self, s)
return s
def read_nonblocking (self, size=1, timeout=-1):
# FIXME: find out why timeout is broken in pyserial
#if timeout != -1:
# self.timeout = 1
self.timeout = self.readTimeout
b = serial.Serial.read(self, size)
self.timeout = self.default_timeout
#if timeout != -1:
# self.timeout = self.default_timeout
if self.logfile is not None:
self.logfile.write(b)
self.logfile.flush()
if self.logfile_read is not None:
self.logfile_read.write(b)
self.logfile_read.flush()
return str(b)
|
DeviceTestFramework/dctrl
|
dctrl/expect/serpexpect.py
|
Python
|
gpl-2.0
| 3,807
|
#!/usr/bin/python
# This script reads through a enotype likelihood file and the respective mean genotype likelihood file. It writes a nexus file for all individuals and the given genotypesi, with '0' for ref homozygote, '1' for heterozygote, and '2' for alt homozygote.
# Usage: ~/vcf2nex012.py pubRetStriUG_unlnkd.gl pntest_pubRetStriUG_unlnkd.txt
from sys import argv
# read genotype likelihood file to get scaffold:bp (which is not in the same order as the vcf file, resulting from vcf2gl.py)
with open(argv[1], 'rb') as gl_file:
scafPos_gl = list()
for line in gl_file:
if line.split(' ')[0] == '65':
continue
elif line.split(' ')[0] == 'CR1043':
ind_id = line.split(' ')
ind_id[len(ind_id)-1] = ind_id[len(ind_id)-1].split('\n')[0]
else:
scafPos_gl.append(line.split(' ')[0])
# read the file with mean genotypes
with open(argv[2], 'rb') as mean_gt_file:
ind_dict = dict()
for line in mean_gt_file:
gt_line = line.split(' ')
for i, ind in enumerate(ind_id):
if not ind in ind_dict:
gt_line[i]
ind_dict[ind] = [float(gt_line[i])]
else:
ind_dict[ind].append(float(gt_line[i]))
# parse the mean genotypes and write the proper bases
for key, value in ind_dict.iteritems():
newline = list()
for i, pos in enumerate(scafPos_gl):
if round(float(value[i])) == 0:
newline.append(str(0))
elif round(float(value[i])) == 1:
newline.append(str(1))
elif round(float(value[i])) == 2:
newline.append(str(2))
else:
continue
print str(key + '\t' + ''.join(newline))
#print scafPos_gl
#for key, value in iter(refp_dict.iteritems()):
# print key, ''.join(value)
|
schimar/hts_tools
|
vcf2nex012.py
|
Python
|
gpl-2.0
| 1,837
|
# -*- coding: utf-8 -*-
# Copyright 2016 Open Net Sàrl
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import test_import
|
CompassionCH/l10n-switzerland
|
l10n_ch_import_cresus/tests/__init__.py
|
Python
|
agpl-3.0
| 147
|
from unittest import TestCase
import warnings
from pyml import *
class FullPageTests(TestCase):
def test_full_page(self):
"""Should be able to render a complex page."""
output = html(lang='en')(
head(
title('Test Page'),
),
body(
header(
nav(
ul(
li(a(href='/')('Home')),
li(a(href='/about')('About')),
),
),
),
h1('Test Page'),
section(id='main')(
h2('About'),
p('This is the about page.'),
),
footer('Copyright 2014'),
),
)
expected = ''.join([
'<html lang="en">',
'<head>',
'<title>Test Page</title>',
'</head>',
'<body>',
'<header>',
'<nav>',
'<ul>',
'<li><a href="/">Home</a></li>',
'<li><a href="/about">About</a></li>',
'</ul>',
'</nav>',
'</header>',
'<h1>Test Page</h1>',
'<section id="main">',
'<h2>About</h2>',
'<p>This is the about page.</p>',
'</section>',
'<footer>Copyright 2014</footer>',
'</body>',
'</html>',
])
self.assertEqual(output, expected)
class NonemptyElementTests(TestCase):
"""Tests for the non-empty elements."""
def test_no_contents(self):
"""Should be able to render an element without contents."""
self.assertEqual(
p(),
'<p></p>'
)
def test_contents(self):
"""Should be able to render with contents."""
self.assertEqual(
p('Hello world.'),
'<p>Hello world.</p>'
)
self.assertEqual(
div(p('Goodbye, ', em('cruel'), ' world!')),
'<div><p>Goodbye, <em>cruel</em> world!</p></div>'
)
def test_attributes(self):
"""Should be able to render with only attributes."""
self.assertEqual(
div(id='foo')(),
'<div id="foo"></div>'
)
def test_attributes_reserved_words(self):
"""Should be able to avoid reserved words with leading `_`."""
self.assertEqual(
div(_class='test')(),
'<div class="test"></div>'
)
def test_attributes_hyphenated(self):
"""Should be able to generate hyphenated attributes."""
self.assertEqual(
div(data_id="1")("Test"),
'<div data-id="1">Test</div>'
)
class EmptyElementTests(TestCase):
"""Tests for the empty elements."""
def test_no_attributes(self):
"""Should be able to render an element without attributes."""
self.assertEqual(
br(),
'<br>'
)
def test_attributes(self):
"""Empty elements should support attributes."""
self.assertEqual(
meta(charset='utf-8')(),
'<meta charset="utf-8">'
)
def test_warning_on_contents(self):
"""Empty elements can't have contents, so trying causes a warning."""
with warnings.catch_warnings(record=True) as w:
br('Hi!')
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, UserWarning))
def test_no_warning_if_no_contents(self):
"""Shouldn't issue a warning if there are no contents."""
with warnings.catch_warnings(record=True) as w:
br()
self.assertEqual(len(w), 0)
|
tsmall/pyml
|
pyml/test/test_elements.py
|
Python
|
gpl-3.0
| 3,842
|
from tornado.ioloop import IOLoop
from tornado.tcpserver import TCPServer
class StreamHandler:
def __init__(self, stream):
self._stream = stream
stream.set_nodelay(True)
self._stream.read_until(b'\n', self._handle_read)
def _handle_read(self, data):
self._stream.write(data)
self._stream.read_until(b'\n', self._handle_read)
class EchoServer(TCPServer):
def handle_stream(self, stream, address):
StreamHandler(stream)
if __name__ == '__main__':
server = EchoServer()
server.bind(25000)
server.start(1)
IOLoop.instance().start()
IOLoop.instance().close()
|
MagicStack/vmbench
|
servers/torecho_readline.py
|
Python
|
mit
| 641
|
# -*- coding: utf-8 -*-
#
# Copyright © 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
from katello.client.api.base import KatelloAPI
class EnvironmentAPI(KatelloAPI):
"""
Connection class to access environment calls
"""
def environments_by_org(self, orgId):
path = "/api/organizations/%s/environments" % orgId
envs = self.server.GET(path)[1]
return envs
def environment_by_org(self, orgId, envId):
path = "/api/organizations/%s/environments/%s" % (orgId, envId)
env = self.server.GET(path)[1]
return env
def environment_by_name(self, orgId, envName):
path = "/api/organizations/%s/environments/" % (orgId)
envs = self.server.GET(path, {"name": envName})[1]
if len(envs) > 0:
return envs[0]
else:
return None
def library_by_org(self, orgId):
path = "/api/organizations/%s/environments/" % (orgId)
envs = self.server.GET(path, {"library": "true"})[1]
if len(envs) > 0:
return envs[0]
else:
return None
def create(self, orgId, name, description, priorId):
envdata = {"name": name}
envdata = self.update_dict(envdata, "description", description)
envdata = self.update_dict(envdata, "prior", priorId)
path = "/api/organizations/%s/environments/" % orgId
return self.server.POST(path, {"environment": envdata})[1]
def update(self, orgId, envId, name, description, priorId):
envdata = {}
envdata = self.update_dict(envdata, "name", name)
envdata = self.update_dict(envdata, "description", description)
envdata = self.update_dict(envdata, "prior", priorId)
path = "/api/organizations/%s/environments/%s" % (orgId, envId)
return self.server.PUT(path, {"environment": envdata})[1]
def delete(self, orgId, envId):
path = "/api/organizations/%s/environments/%s" % (orgId, envId)
return self.server.DELETE(path)[1]
|
beav/katello
|
cli/src/katello/client/api/environment.py
|
Python
|
gpl-2.0
| 2,564
|
#!/usr/bin/env python
import argparse
import jinja2
import logging
import os
import sys
import traceback
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
LOG.addHandler(ch)
def main(main_args):
"""
Read environment variables to envvars_dict
Open file with file_path
Read contents as template_contents
Render template_contents as rendered_file_contents with envvars_dict
Send rendered_file_contents to file or stdout
"""
envvars_dict = dict(os.environ)
template_contents = read_file_contents(main_args.template_file)
rendered_file_contents = render_template(template_contents, envvars_dict)
if main_args.output_file:
sys.stdout = open(main_args.output_file, "w")
sys.stdout.write(rendered_file_contents)
sys.stdout.close()
def parse_cli_args():
p = argparse.ArgumentParser(description="Template Renderer")
p.add_argument("template_file",
type=str,
help="Path to template file")
p.add_argument("-o", "--output",
dest="output_file",
type=str,
required=False,
help="Path to output file")
return p.parse_known_args()
def read_file_contents(file_path):
contents = None
if os.path.isfile(file_path):
with open(file_path, "r") as f:
contents = f.read()
return contents
def render_template(template_contents, parameters_dict):
template = jinja2.Template(template_contents)
rendered_contents = template.render(**parameters_dict)
return rendered_contents
if __name__ == "__main__":
try:
args, args_other = parse_cli_args()
main(args)
except Exception as main_ex:
LOG.error("An error occurred in running the application!")
LOG.error(main_ex)
LOG.error(traceback.print_tb(sys.exc_info()[2]))
finally:
sys.exit(0)
|
tendrilinc/marathon-autoscaler
|
scripts/render_template.py
|
Python
|
apache-2.0
| 2,090
|
import inspect
from contextlib import contextmanager
from itertools import chain
from django.conf import settings
from django.utils.decorators import method_decorator
from django.utils.termcolors import colorize
from sekizai.helpers import validate_template
from cms import constants
from cms.models import AliasPluginModel
from cms.utils.compat.dj import is_installed
from cms.utils.conf import get_cms_setting
SUCCESS = 1
WARNING = 2
ERROR = 3
SKIPPED = 4
CHECKERS = []
class FileOutputWrapper:
"""
Wraps two file-like objects (that support at the very least the 'write'
method) into an API to be used by the check function further down in
this module.
The following properties are public (and required) by alternative implementations:
errors: integer count of errors encountered
successes: integer count of successes encountered
warnings: integer count of warnings encountered
skips: integer count of skips encountered
successful: Whether the checks were successful (no errors)
They must also provide these methods:
write_line(message=''): writes a message to stdout
write_stderr_line(message=''): writes a message to stderr
success(message): reports and registers a successful check
error(message): reports and registers an error
warn(message); reports and registers a warning
skip(message): reports and registers a skipped check
section(title): A context manager that starts a new section. For the
Section API see FileSectionWrapper
"""
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
self.section_wrapper = FileSectionWrapper
self.errors = 0
self.successes = 0
self.warnings = 0
self.skips = 0
def colorize(self, msg, opts=(), **kwargs):
return colorize(msg, opts=opts, **kwargs)
def write_line(self, message=''):
self.write(u'%s\n' % message)
def write(self, message):
self.stdout.write(message)
def write_stderr_line(self, message=''):
self.write_stderr(u'%s\n' % message)
def write_stderr(self, message):
self.stderr.write(message)
def success(self, message):
self.successes += 1
self.write_line(u'%s %s' % (message, self.colorize('[OK]', fg='green', opts=['bold'])))
def error(self, message):
self.errors += 1
self.write_stderr_line(u'%s %s' % (message, self.colorize('[ERROR]', fg='red', opts=['bold'])))
def warn(self, message):
self.warnings += 1
self.write_stderr_line(u'%s %s' % (message, self.colorize('[WARNING]', fg='yellow', opts=['bold'])))
def skip(self, message):
self.skips += 1
self.write_line(u'%s %s' % (message, self.colorize('[SKIP]', fg='blue', opts=['bold'])))
@method_decorator(contextmanager)
def section(self, title):
self.write_line(self.colorize(title, opts=['bold']))
self.write_line(self.colorize('=' * len(title), opts=['bold']))
self.write_line()
wrapper = self.section_wrapper(self)
try:
yield wrapper
except: # noqa: E722
self.error('Checker failed, see traceback')
raise
self.errors += wrapper.errors
self.successes += wrapper.successes
self.warnings += wrapper.warnings
self.skips += wrapper.skips
self.write_line('')
@property
def successful(self):
return not self.errors
class FileSectionWrapper(FileOutputWrapper):
"""
Used from FileOutputWrapper to report checks in a section.
If you want to provide your own output class, you may want to subclass
this class for the section reporting too. If you want to use your own,
you must defined at least the same API as FileOutputWrapper, as well
as these four additional methods:
finish_success(message): End the section (successfully)
finish_error(message): End the section with errors
finish_warning(message): End this section with a warning
finish_skip(message): End this (skipped) section
"""
def __init__(self, wrapper):
super().__init__(wrapper.stdout, wrapper.stderr)
self.wrapper = wrapper
def write_line(self, message=''):
self.write(u' - %s\n' % message)
def write_stderr_line(self, message=''):
self.write_stderr(u' - %s\n' % message)
def finish_success(self, message):
self.wrapper.write_line()
self.wrapper.success(message)
def finish_error(self, message):
self.wrapper.write_line()
self.wrapper.error(message)
def finish_warning(self, message):
self.wrapper.write_line()
self.wrapper.warning(message)
def finish_skip(self, message):
self.wrapper.write_lin()
self.wrapper.skip(message)
def define_check(func):
"""
Helper decorator to register a check function.
"""
CHECKERS.append(func)
return func
@define_check
def check_sekizai(output):
with output.section("Sekizai") as section:
sekizai_installed = is_installed('sekizai')
if sekizai_installed:
section.success("Sekizai is installed")
else:
section.error("Sekizai is not installed, could not find 'sekizai' in INSTALLED_APPS")
processors = list(
chain(*[template['OPTIONS'].get('context_processors', []) for template in settings.TEMPLATES]))
if 'sekizai.context_processors.sekizai' in processors:
section.success("Sekizai template context processor is installed")
else:
section.error("Sekizai template context processor is not installed, could not find "
"'sekizai.context_processors.sekizai' in TEMPLATES option context_processors")
if not sekizai_installed:
# sekizai is not installed.
# we can't reliable check templates
# because template loading won't work
return
for template, _ in get_cms_setting('TEMPLATES'):
if template == constants.TEMPLATE_INHERITANCE_MAGIC:
continue
if validate_template(template, ['js', 'css']):
section.success("Sekizai namespaces 'js' and 'css' found in %r" % template)
else:
section.error("Sekizai namespaces 'js' and 'css' not found in %r" % template)
if section.successful:
section.finish_success("Sekizai configuration okay")
else:
section.finish_error("Sekizai configuration has errors")
@define_check
def check_i18n(output):
with output.section("Internationalization") as section:
if isinstance(getattr(settings, 'CMS_LANGUAGES', {}), dict):
section.success("New style CMS_LANGUAGES")
else:
section.warn("Old style (tuple based) CMS_LANGUAGES, please switch to the new (dictionary based) style")
if getattr(settings, 'LANGUAGE_CODE', '').find('_') > -1:
section.warn("LANGUAGE_CODE must contain a valid language code, not a locale (e.g.: 'en-us' instead of "
"'en_US'): '%s' provided" % getattr(settings, 'LANGUAGE_CODE', ''))
for lang in getattr(settings, 'LANGUAGES', ()):
if lang[0].find('_') > -1:
section.warn("LANGUAGES must contain valid language codes, not locales (e.g.: 'en-us' instead of "
"'en_US'): '%s' provided" % lang[0])
if settings.SITE_ID == hash(settings.SITE_ID):
for site, items in get_cms_setting('LANGUAGES').items():
if type(site) == int:
for lang in items:
if lang['code'].find('_') > -1:
section.warn("CMS_LANGUAGES entries must contain valid language codes, not locales (e.g.: "
"'en-us' instead of 'en_US'): '%s' provided" % lang['code'])
else:
section.error("SITE_ID must be an integer, not %r" % settings.SITE_ID)
@define_check
def check_middlewares(output):
with output.section("Middlewares") as section:
required_middlewares = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
)
middlewares = settings.MIDDLEWARE
for middleware in required_middlewares:
if middleware not in middlewares:
section.error("%s middleware must be in MIDDLEWARE" % middleware)
@define_check
def check_context_processors(output):
with output.section("Context processors") as section:
processors = list(
chain(*[template['OPTIONS'].get('context_processors', []) for template in settings.TEMPLATES]))
required_processors = (
'cms.context_processors.cms_settings',
)
for processor in required_processors:
if processor not in processors:
section.error("%s context processor must be in TEMPLATES option context_processors" % processor)
@define_check
def check_plugin_instances(output):
from cms.management.commands.subcommands.list import plugin_report
with output.section("Plugin instances") as section:
# get the report
report = plugin_report()
section.success("Plugin instances of %s types found in the database" % len(report))
# loop over plugin types in the report
for plugin_type in report:
# warn about those that are not installed
if not plugin_type["model"]:
section.error("%s has instances but is no longer installed" % plugin_type["type"] )
# warn about those that have unsaved instances
if plugin_type["unsaved_instances"]:
section.error(
"%s has %s unsaved instances" % (plugin_type["type"], len(plugin_type["unsaved_instances"])))
if section.successful:
section.finish_success("The plugins in your database are in good order")
else:
section.finish_error(
"There are potentially serious problems with the plugins in your database. \nEven if "
"your site works, you should run the 'manage.py cms list plugins' \ncommand and then "
"the 'manage.py cms delete-orphaned-plugins' command. \nThis will alter your "
"database; read the documentation before using it."
)
@define_check
def check_copy_relations(output):
from cms.extensions import extension_pool
from cms.extensions.models import BaseExtension
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_pool import plugin_pool
def c_to_s(klass):
return '%s.%s' % (klass.__module__, klass.__name__)
def get_class(method_name, model):
for cls in inspect.getmro(model):
if method_name in cls.__dict__:
return cls
return None
with output.section('Presence of "copy_relations"') as section:
plugin_pool.discover_plugins()
for plugin in plugin_pool.plugins.values():
plugin_class = plugin.model
if get_class('copy_relations', plugin_class) is not CMSPlugin or plugin_class is CMSPlugin:
# this class defines a ``copy_relations`` method, nothing more
# to do
continue
for rel in plugin_class._meta.many_to_many:
section.warn('%s has a many-to-many relation to %s,\n but no "copy_relations" method defined.' % (
c_to_s(plugin_class),
c_to_s(rel.model),
))
for rel in plugin_class._get_related_objects():
if rel.model != CMSPlugin and not issubclass(rel.model, plugin.model) and rel.model != AliasPluginModel:
section.warn('%s has a foreign key from %s,\n but no "copy_relations" method defined.' % (
c_to_s(plugin_class),
c_to_s(rel.model),
))
for extension in chain(extension_pool.page_extensions, extension_pool.title_extensions):
if get_class('copy_relations', extension) is not BaseExtension:
# OK, looks like there is a 'copy_relations' defined in the
# extension... move along...
continue
for rel in extension._meta.many_to_many:
section.warn('%s has a many-to-many relation to %s,\n '
'but no "copy_relations" method defined.' % (
c_to_s(extension),
c_to_s(rel.remote_field.model),
))
for rel in extension._get_related_objects():
if rel.model != extension:
section.warn('%s has a foreign key from %s,\n but no "copy_relations" method defined.' % (
c_to_s(extension),
c_to_s(rel.model),
))
if not section.warnings:
section.finish_success('All plugins and page/title extensions have "copy_relations" method if needed.')
else:
section.finish_success('Some plugins or page/title extensions do not define a "copy_relations" method.\n'
'This might lead to data loss when publishing or copying plugins/extensions.\n'
'See https://django-cms.readthedocs.io/en/latest/extending_cms/custom_plugins.html#handling-relations or ' # noqa
'https://django-cms.readthedocs.io/en/latest/extending_cms/extending_page_title.html#handling-relations.') # noqa
@define_check
def check_placeholder_fields(output):
"""
ModelAdmin instances that are using PlaceholderField fields
should be also a subclass of PlaceholderAdminMixin
"""
from django.contrib.admin import site
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from cms.models.fields import PlaceholderField
with output.section("PlaceholderField") as section:
for model, model_admin in site._registry.items():
ph_fields = [field for field in model._meta.get_fields() if isinstance(field, PlaceholderField)]
if len(ph_fields) == 0:
continue
if not isinstance(model_admin, PlaceholderAdminMixin):
section.error(
"%s does not subclass of PlaceholderAdminMixin" % model_admin
)
if section.successful:
section.finish_success("PlaceholderField configuration okay")
def check(output):
"""
Checks the configuration/environment of this django CMS installation.
'output' should be an object that provides the same API as FileOutputWrapper.
Returns whether the configuration/environment are okay (has no errors)
"""
title = "Checking django CMS installation"
border = '*' * len(title)
output.write_line(output.colorize(border, opts=['bold']))
output.write_line(output.colorize(title, opts=['bold']))
output.write_line(output.colorize(border, opts=['bold']))
output.write_line()
for checker in CHECKERS:
checker(output)
output.write_line()
with output.section("OVERALL RESULTS"):
if output.errors:
output.write_stderr_line(output.colorize("%s errors!" % output.errors, opts=['bold'], fg='red'))
if output.warnings:
output.write_stderr_line(output.colorize("%s warnings!" % output.warnings, opts=['bold'], fg='yellow'))
if output.skips:
output.write_line(output.colorize("%s checks skipped!" % output.skips, opts=['bold'], fg='blue'))
output.write_line(output.colorize("%s checks successful!" % output.successes, opts=['bold'], fg='green'))
output.write_line()
if output.errors:
output.write_stderr_line(output.colorize('Please check the errors above', opts=['bold'], fg='red'))
elif output.warnings:
output.write_stderr_line(output.colorize('Installation okay, but please check warnings above',
opts=['bold'], fg='yellow'))
else:
output.write_line(output.colorize('Installation okay', opts=['bold'], fg='green'))
return output.successful
|
rsalmaso/django-cms
|
cms/utils/check.py
|
Python
|
bsd-3-clause
| 17,000
|
# Conjuntos
# Conjuntos dos numeros naturais
def naturais_sem_zero():
n = 1
print("Conjuntos do N*(Naturais sem Zero)")
while(n < 15):
print(n)
n = n + 1
#--------------------------------------------
# Conjuntos dos numeros naturais
def naturais_com_zero():
n = 0
print("Conjuntos do N(Naturais com Zero)")
while(n < 15):
print(n)
n = n + 1
print("1 para naturaiz sem o 0")
print("0 para naturaiz com o 0")
option = input()
if(option == 1):
naturais_sem_zero()
elif(option == 0):
naturais_com_zero()
|
joaopaulojpsp/python_matematica
|
Conjuntos/naturaiz.py
|
Python
|
apache-2.0
| 528
|
"""
The string "PAYPALISHIRING" is written in a zigzag pattern on a given number of rows like this: (you may want to display this pattern in a fixed font for better legibility)
P A H N
A P L S I I G
Y I R
And then read line by line: "PAHNAPLSIIGYIR"
Write the code that will take a string and make this conversion given a number of rows:
string convert(string text, int nRows);
convert("PAYPALISHIRING", 3) should return "PAHNAPLSIIGYIR".
"""
class Solution(object):
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
start = 0
newS = ""
L = len(s)
if numRows == 1:
return s
if (L-numRows) % (2*numRows - 2) == 0:
max_cols = (L-numRows) / (2*numRows - 2) + 1
else:
max_cols = (L-numRows) / (2*numRows - 2) + 2
for row in range(numRows):
for k in range(max_cols):
pos = k*numRows + k*(numRows-2) + row
if pos < L:
newS += s[pos]
if row >0 and row < numRows-1:
pos2 = k*numRows + k*(numRows-2) + numRows + numRows-2-row
if pos2 < L:
newS += s[pos2]
return newS
|
yingcuhk/LeetCode
|
Algorithms/#6 ZigZag Conversion/PythonCode.py
|
Python
|
mit
| 1,350
|
# from scrapy.spider import Spider
# from scrapy.selector import Selector
# from scrapy_laserprinter_usage_page.items import ScrapyLaserprinterUsagePageItem
# # update time
# import datetime
# # operate Mysql
# import MySQLdb
# import MySQLdb.cursors
# # two tables
# # displayer_cartridgerecord
# # displayer_printerrecord
# class LaserPrinterSpider(Spider):
# name = "P4015_maintainkit"
# allowed_domains = []
# print "### %s" % datetime.datetime.now()
# # Connect to Mysql to get printer list
# db = MySQLdb.connect("10.8.144.247", "root", "long841205", "hp_laserprinter_monitor")
# cursor =db.cursor()
# cursor.execute("select deviceIp from displayer_printerrecord where productName='P4015'") # Got all printers' name
# data = cursor.fetchall()
# db.close()
# # generate the printer_list
# printer_list = []
# for i in data:
# printer_list.append(i[0])
# # generate the start_urls
# start_urls = []
# for printer in printer_list:
# start_urls.append("https://%s/hp/device/this.LCDispatcher?nav=hp.Supplies" % printer.strip(u"\n"))
# def parse(self, response):
# sel = Selector(response)
# deviceIp = sel.xpath('//div[@class="networkInfo"]/text()').extract()[0][12:]
# #<tr><td class="hpConsumableBlockHeaderImage"></td><td class="hpConsumableBlockHeaderContent"><span class="hpConsumableBlockHeaderText" id="msg-9698-1">Maintenance Kit</span><br></br><span class="hpConsumableBlockHeaderText" id="msg-9725-1">Order HP Part: 110V-CB388A, 220V-CB389A</span></td><td class="hpConsumableBlockHeaderPctRemaining"><span class="hpConsumableBlockHeaderText">0%</span></td></tr>
# tr_list = sel.xpath('//tr')
# for tr in tr_list:
# title = tr.xpath('td[@class="hpConsumableBlockHeaderContent"]/span[@class="hpConsumableBlockHeaderText"]/text()').extract()
# try:
# if title[0] == u'Maintenance Kit':
# print title[0]
# maintainkit = tr.xpath('td[@class="hpConsumableBlockHeaderPctRemaining"]/span[@class="hpConsumableBlockHeaderText"]/text()').extract()[0].strip("%")
# print maintainkit
# except IndexError:
# pass
# item = ScrapyLaserprinterUsagePageItem() # load database fields
# item['maintainkit'] = maintainkit
# item['deviceIp'] = deviceIp
# return item
|
voostar/hp_laserprinter_monitor
|
background/scrapy_laserprinter_usage_page/scrapy_laserprinter_usage_page/spiders/P4015_maintainkit.py
|
Python
|
unlicense
| 2,405
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.