code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""CMS view for static pages"""
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from boski.mixins import LoginRequiredMixin
from boski.views.crud import ListView, CreateView
from boski.decorators import with_template
from module.static_page.models import Entry
from module.static_page.cms.forms import EntryForm
class List(ListView, LoginRequiredMixin):
breadcrumbs = (
{'name': _('Static pages'), 'url': 'cms:static_page:index'},
)
queryset = Entry.objects.non_deleted()
listingColumns = (
('id', '#'),
('title', _('Title')),
('created_at', _('Created')),
('action', _('Actions'))
)
filters = (
('created_at__gte', {
'label': _('Created from'),
'type': 'text',
'class': 'calendar',
}),
('created_at__lte', {
'label': _('To'),
'type': 'text',
'class': 'calendar',
})
)
mapColumns = {
'id': '_displayAsIs',
'title': '_displayEditLink',
'created_by': '_displayAsIs',
'created_at': '_displayDate',
'action': '_displayStaticActionLink',
}
orderingColumns = {'id', 'title', 'created_at'}
def get_fields_name(self):
fields_name = super(List, self).get_fields_name()
return fields_name + ['activated_at', 'slug']
class Create(LoginRequiredMixin, CreateView):
form_class = EntryForm
model = Entry
@property
def breadcrumbs(self):
return (
{'name': _('Static page'), 'url': 'cms:static_page:index'},
{
'name': self.name,
'url': 'cms:static_page:update',
'pk': self.get_object().pk
},
)
class Update(LoginRequiredMixin, CreateView):
form_class = EntryForm
model = Entry
@property
def breadcrumbs(self):
return (
{'name': _('Static page'), 'url': 'cms:static_page:index'},
{
'name': self.name,
'url': 'cms:static_page:update',
'pk': self.get_object().pk,
},
)
@login_required
@with_template('crud/create.html')
def create(request):
form = EntryForm(request.POST or None)
if form.is_valid():
entry = form.save(commit=False)
""":type : Entry """
entry.save()
messages.success(request, _('New static page has been created'))
return HttpResponseRedirect(reverse('cms:static_page:index'))
name = _('Create')
request.breadcrumbs = (
{'name': _('Static page'), 'url': 'cms:static_page:index'},
{'name': name, 'url': 'cms:static_page:create'},
)
actions = {
'create': 'create',
'update': 'update',
'delete': 'delete',
'index': 'index',
}
return locals()
@login_required
@with_template('crud/update.html')
def update(request, pk):
entry = Entry.objects.get(pk=pk)
form = EntryForm(request.POST or None, instance=entry)
if form.is_valid():
entry = form.save(commit=False)
""" :type : Entry """
entry.save()
messages.success(
request, _('Successfully updated static page "%s".') % entry)
if request.POST.get('next', None) == 'edit':
return HttpResponseRedirect(reverse(
'cms:static_page:update', args=[pk]
))
return HttpResponseRedirect(reverse('cms:static_page:index'))
name = _('Edit entry "%s"') % entry
request.breadcrumbs = (
{'name': _('Static page'), 'url': 'cms:static_page:index'},
{'name': name, 'url': 'cms:static_page:update', 'pk': entry.pk},
)
actions = {
'create': 'create',
'update': 'update',
'delete': 'delete',
'index': 'index',
}
return dict(locals().items() + {'object': entry}.items())
@login_required
@with_template('crud/delete.html')
def delete(request, pk):
entry = Entry.objects.get(pk=pk)
if request.POST:
entry.do_delete()
messages.success(
request, _('Static page "%s" has been deleted') % entry)
return HttpResponseRedirect(reverse('cms:static_page:index'))
name = _('Delete entry "%s"') % entry
request.breadcrumbs = (
{'name': _('Static page'), 'url': 'cms:static_page:index'},
{'name': name, 'url': 'cms:static_page:delete', 'pk': entry.pk},
)
actions = {
'create': 'create',
'update': 'update',
'delete': 'delete',
'index': 'index',
}
return dict(locals().items() + {'object': entry}.items())
@login_required
def activate(request, pk):
try:
entry = Entry.objects.get(pk=pk)
""" :type : Entry """
entry.do_activate()
messages.success(
request, _(u'Static page "%s" has been activated') % entry)
except Exception:
messages.error(request, _('Error occurred during saving'))
return HttpResponseRedirect(reverse('cms:static_page:index'))
|
Alkemic/webpage
|
module/static_page/cms/views.py
|
Python
|
mit
| 5,262
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import re
import shutil
import llnl.util.tty as tty
import llnl.util.lang
import spack.compiler
import spack.compilers.clang
import spack.util.executable
import spack.version
class AppleClang(spack.compilers.clang.Clang):
openmp_flag = "-Xpreprocessor -fopenmp"
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
ver = 'unknown'
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)',
output,
# Multi-line, since 'Apple clang' may not be on the first line
# in particular, when run as gcc, it seems to output
# "Configured with: --prefix=..." as the first line
re.M,
)
if match:
ver = match.group(match.lastindex)
return ver
@property
def cxx11_flag(self):
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
if self.version < spack.version.ver('4.0.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
)
return "-std=c++11"
@property
def cxx14_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('5.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
)
elif self.version < spack.version.ver('6.1.0'):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('6.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
)
return "-std=c++1z"
def setup_custom_environment(self, pkg, env):
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
On macOS, not all buildsystems support querying CC and CXX for the
compilers to use and instead query the Xcode toolchain for what
compiler to run. This side-steps the spack wrappers. In order to inject
spack into this setup, we need to copy (a subset of) Xcode.app and
replace the compiler executables with symlinks to the spack wrapper.
Currently, the stage is used to store the Xcode.app copies. We then set
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
related tools to use this Xcode.app.
"""
super(AppleClang, self).setup_custom_environment(pkg, env)
if not pkg.use_xcode:
# if we do it for all packages, we get into big troubles with MPI:
# filter_compilers(self) will use mockup XCode compilers on macOS
# with Clang. Those point to Spack's compiler wrappers and
# consequently render MPI non-functional outside of Spack.
return
# Use special XCode versions of compiler wrappers when using XCode
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
xcrun = spack.util.executable.Executable('xcrun')
xcode_clang = xcrun('-f', 'clang', output=str).strip()
xcode_clangpp = xcrun('-f', 'clang++', output=str).strip()
env.set('SPACK_CC', xcode_clang, force=True)
env.set('SPACK_CXX', xcode_clangpp, force=True)
xcode_select = spack.util.executable.Executable('xcode-select')
# Get the path of the active developer directory
real_root = xcode_select('--print-path', output=str).strip()
# The path name can be used to determine whether the full Xcode suite
# or just the command-line tools are installed
if real_root.endswith('Developer'):
# The full Xcode suite is installed
pass
else:
if real_root.endswith('CommandLineTools'):
# Only the command-line tools are installed
msg = 'It appears that you have the Xcode command-line tools '
msg += 'but not the full Xcode suite installed.\n'
else:
# Xcode is not installed
msg = 'It appears that you do not have Xcode installed.\n'
msg += 'In order to use Spack to build the requested application, '
msg += 'you need the full Xcode suite. It can be installed '
msg += 'through the App Store. Make sure you launch the '
msg += 'application and accept the license agreement.\n'
raise OSError(msg)
real_root = os.path.dirname(os.path.dirname(real_root))
developer_root = os.path.join(spack.stage.get_stage_root(),
'xcode-select',
self.name,
str(self.version))
xcode_link = os.path.join(developer_root, 'Xcode.app')
if not os.path.exists(developer_root):
tty.warn('Copying Xcode from %s to %s in order to add spack '
'wrappers to it. Please do not interrupt.'
% (real_root, developer_root))
# We need to make a new Xcode.app instance, but with symlinks to
# the spack wrappers for the compilers it ships. This is necessary
# because some projects insist on just asking xcrun and related
# tools where the compiler runs. These tools are very hard to trick
# as they do realpath and end up ignoring the symlinks in a
# "softer" tree of nothing but symlinks in the right places.
shutil.copytree(
real_root, developer_root, symlinks=True,
ignore=shutil.ignore_patterns(
'AppleTV*.platform', 'Watch*.platform', 'iPhone*.platform',
'Documentation', 'swift*'
))
real_dirs = [
'Toolchains/XcodeDefault.xctoolchain/usr/bin',
'usr/bin',
]
bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
for real_dir in real_dirs:
dev_dir = os.path.join(developer_root,
'Contents',
'Developer',
real_dir)
for fname in os.listdir(dev_dir):
if fname in bins:
os.unlink(os.path.join(dev_dir, fname))
os.symlink(
os.path.join(spack.paths.build_env_path, 'cc'),
os.path.join(dev_dir, fname))
os.symlink(developer_root, xcode_link)
env.set('DEVELOPER_DIR', xcode_link)
|
rspavel/spack
|
lib/spack/spack/compilers/apple_clang.py
|
Python
|
lgpl-2.1
| 7,173
|
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__author__ = "d01"
__email__ = "jungflor@gmail.com"
__copyright__ = "Copyright (C) 2015-16, Florian JUNG"
__license__ = "MIT"
__version__ = "0.1.0"
__date__ = "2016-04-05"
# Created: 2015-07-17 18:50
""" measure request speed execute """
import time
from flotils.runable import StartStopable, SignalStopWrapper
import client
import server
import echo_server
import echo_client
class StartWrapper(StartStopable, SignalStopWrapper):
def __init__(self, settings=None):
if settings is None:
settings = {}
super(StartWrapper, self).__init__(settings)
self.modules = settings['modules_run']
""" Modules to start/stop with this one
:type : list[client.WrapperClient | server.WrapperServer] """
def start(self, blocking=False):
self.debug("()")
for module in self.modules:
try:
module.start(False)
except:
self.exception(u"Failed to start {}".format(module.name))
self.stop()
return
super(StartWrapper, self).start(blocking)
def stop(self):
self.debug("()")
if not self._is_running:
return
super(StartWrapper, self).stop()
for module in self.modules:
try:
module.stop()
except:
self.exception(u"Failed to stop {}".format(module.name))
if __name__ == '__main__':
import argparse
import logging
import logging.config
from flotils.logable import default_logging_config
logging.config.dictConfig(default_logging_config)
logging.getLogger().setLevel(logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("--debug", action="store_true")
parser.add_argument("--client", action="store_true")
parser.add_argument("--echo", action="store_true")
parser.add_argument("--number", type=int, default=1)
parser.add_argument("--host", type=str, default="127.0.0.1")
parser.add_argument("--port", type=int, default=2346)
parser.add_argument("--people", type=int, default=3)
parser.add_argument("--throttle", type=int, default=0)
args = parser.parse_args()
if args.debug:
logging.getLogger().setLevel(logging.DEBUG)
modules = []
if args.echo:
if args.client:
modules, cmd_line = echo_client.create(
args.number, args.host, args.port, args.people, args.throttle
)
else:
modules, cmd_line = echo_server.create(args.host, args.port)
else:
if args.client:
modules, cmd_line = client.create(
args.number, args.host, args.port, args.people, args.throttle
)
else:
modules, cmd_line = server.create(args.host, args.port)
ctrl = StartWrapper({
'modules_run': modules
})
try:
ctrl.start(False)
while True:
line = raw_input("> ")
if callable(cmd_line):
line = cmd_line(line, ctrl)
if line == "quit":
break
except KeyboardInterrupt:
pass
finally:
ctrl.stop()
|
the01/python-paps
|
examples/measure/main.py
|
Python
|
mit
| 3,357
|
"""
session --- Reopen files when starting
======================================
"""
from PyQt5.QtCore import QTimer
import os.path
from enki.core.core import core
from enki.core.defines import CONFIG_DIR
import enki.core.json_wrapper
_AUTO_SAVE_INTERVAL_MS = 60 * 1000
def getSessionFilePath():
if core.commandLineArgs()['session_name']:
session_name = core.commandLineArgs()['session_name']
elif core.commandLineArgs()['auto-session-name']:
session_name = os.getcwd().split(os.path.sep)[-1]
elif 'ENKI_SESSION' in os.environ:
session_name = os.environ['ENKI_SESSION']
else:
session_name = ''
if session_name:
session_filename = 'session_{}.json'.format(session_name)
for char in r'<>:"/\|?*' + ' ': # reserved characters for file name on Windows. By MSDN. And space
session_filename = session_filename.replace(char, '_')
else:
session_filename = 'session.json'
return os.path.join(CONFIG_DIR, session_filename)
_SESSION_FILE_PATH = getSessionFilePath()
class Plugin:
"""Plugin interface
"""
def __init__(self):
core.restoreSession.connect(self._onRestoreSession)
core.aboutToTerminate.connect(self._saveSession)
self._timer = QTimer(core)
self._timer.timeout.connect(self._autoSaveSession)
self._timer.setInterval(_AUTO_SAVE_INTERVAL_MS)
self._timer.start()
def terminate(self):
"""Explicitly called destructor
"""
core.restoreSession.disconnect(self._onRestoreSession)
core.aboutToTerminate.disconnect(self._saveSession)
self._timer.stop()
def _onRestoreSession(self):
"""Enki initialisation finished.
Now restore session
"""
# if have documents except 'untitled' new doc, don't restore session
if core.workspace().currentDocument() is not None:
return
if not os.path.exists(_SESSION_FILE_PATH):
return
session = enki.core.json_wrapper.load(_SESSION_FILE_PATH, 'session', None)
if session is not None:
for filePath in session['opened']:
if os.path.exists(filePath):
core.workspace().openFile(filePath)
if session['current'] is not None:
document = self._documentForPath(session['current'])
if document is not None: # document might be already deleted
core.workspace().setCurrentDocument(document)
if 'project' in session:
path = session['project']
if path is not None and os.path.isdir(path):
core.project().open(path)
def _documentForPath(self, filePath):
"""Find document by it's file path.
Raises ValueError, if document hasn't been found
"""
for document in core.workspace().documents():
if document.filePath() is not None and \
document.filePath() == filePath:
return document
return None
def _saveSession(self, showWarnings=True):
"""Enki is going to be terminated.
Save session
"""
fileList = [document.filePath()
for document in core.workspace().documents()
if document.filePath() is not None and
os.path.exists(document.filePath()) and
'/.git/' not in document.filePath() and
not (document.fileName().startswith('svn-commit') and
document.fileName().endswith('.tmp'))]
if not fileList:
return
currentPath = None
if core.workspace().currentDocument() is not None:
currentPath = core.workspace().currentDocument().filePath()
session = {'current': currentPath,
'opened': fileList,
'project': core.project().path()}
enki.core.json_wrapper.dump(_SESSION_FILE_PATH, 'session', session, showWarnings)
def _autoSaveSession(self):
self._saveSession(showWarnings=False)
|
bjones1/enki
|
enki/plugins/session.py
|
Python
|
gpl-2.0
| 4,134
|
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import logging
LOG = logging.getLogger(__name__)
class CopyAnnotationsPlugin(QObject):
def __init__(self, labeltool, class_filter=None, frame_range=1, overlap_threshold=None, prefix=''):
QObject.__init__(self)
self._class_filter = class_filter
self._overlap_threshold = overlap_threshold
self._frame_range = frame_range
self._prefix = prefix
self._labeltool = labeltool
self._wnd = labeltool.mainWindow()
self._sc = QAction("Copy labels from previous image/frame", self._wnd)
self._sc.triggered.connect(self.copy)
def copy(self):
current = self._labeltool.currentImage()
prev = current.getPreviousSibling()
num_back = self._frame_range
while num_back > 0 and prev is not None:
for annotation in self.getAnnotationsFiltered(prev):
LOG.debug("num_back: %d, annotation: %s", num_back, str(annotation))
# check for overlap with annotations in current
if self._overlap_threshold is not None:
r1 = self.getRect(annotation)
if r1 is not None:
cont = False
for curr_ann in self.getAnnotationsFiltered(current):
r2 = self.getRect(curr_ann)
if r2 is not None:
o = self.overlap(r1, r2)
LOG.debug("overlap between %s and %s: %f", str(r1), str(r2), o)
if o > self._overlap_threshold:
cont = True
break
if cont:
continue # do not copy
# copy the annotation
current.addAnnotation(annotation)
prev = prev.getPreviousSibling()
num_back -= 1
def getAnnotationsFiltered(self, image_item):
annotations = []
for annotation in image_item.getAnnotations()['annotations']:
# check class filter
if self._class_filter is not None:
if annotation.get('class', None) not in self._class_filter:
continue # do not copy
annotations.append(annotation)
return annotations
def getRect(self, annotation):
keys = ['x', 'y', 'width', 'height']
for key in keys:
if not self._prefix + key in annotation:
return None
return [annotation[self._prefix + key] for key in keys]
def overlap(self, r1, r2):
ia = float(self.area(self.intersect(r1, r2)))
union = self.area(r1) + self.area(r2) - ia
return ia / union
def intersect(self, r1, r2):
x = max(r1[0], r2[0])
y = max(r1[1], r2[1])
w = max(0, min(r1[0] + r1[2], r2[0] + r2[2]) - x)
h = max(0, min(r1[1] + r1[3], r2[1] + r2[3]) - y)
return (x, y, w, h)
def area(self, r):
return r[2]*r[3]
def action(self):
return self._sc
|
shiquanwang/sloth
|
sloth/plugins/__init__.py
|
Python
|
gpl-3.0
| 3,144
|
r"""
Cole-Cole
=========
There are various different definitions of a Cole-Cole model, see for instance
Tarasov and Titov (2013). We try a few different ones here, but you can supply
your preferred version.
The original Cole-Cole (1941) model was formulated for the complex dielectric
permittivity. It is reformulated to conductivity to use it for IP,
.. math::
\sigma(\omega) = \sigma_\infty + \frac{\sigma_0 - \sigma_\infty}{1 +
(i\omega\tau)^C}\ . \qquad\qquad\qquad (1)
Another, similar model is given by Pelton et al. (1978),
.. math::
\rho(\omega) = \rho_\infty + \frac{\rho_0 - \rho_\infty}{1 +
(i\omega\tau)^C}\ . \qquad\qquad\qquad (2)
Equation (2) is just like equation (1), but replaces :math:`\sigma` by
:math:`\rho`. However, mathematically they are not the same. Substituting
:math:`\rho = 1/\sigma` in the latter and resolving it for :math:`\sigma` will
not yield the former. Equation (2) is usually written in the following form,
using the chargeability :math:`m = (\rho_0-\rho_\infty)/\rho_0`,
.. math::
\rho(\omega) = \rho_0 \left[1 - m \left(1- \frac{1}{1 + (i\omega\tau)^C}
\right)\right]\ . \quad (3)
In all cases we add the part coming from the dielectric permittivity
(displacement currents), even tough it usually doesn't matter in the frequency
range of IP.
**References**
- **Cole, K.S., and R.H. Cole, 1941**, Dispersion and adsorption in
dielectrics. I. Alternating current characteristics; *Journal of Chemical
Physics*, Volume 9, Pages 341-351, doi:
`10.1063/1.1750906 <https://doi.org/10.1063/1.1750906>`_.
- **Pelton, W.H., S.H. Ward, P.G. Hallof, W.R. Sill, and P.H. Nelson, 1978**,
Mineral discrimination and removal of inductive coupling with multifrequency
IP, *Geophysics*, Volume 43, Pages 588-609, doi:
`10.1190/1.1440839 <https://doi.org/10.1190/1.1440839>`_.
- **Tarasov, A., and K. Titov, 2013**, On the use of the Cole–Cole equations in
spectral induced polarization; *Geophysical Journal International*, Volume
195, Issue 1, Pages 352-356, doi:
`10.1093/gji/ggt251 <https://doi.org/10.1093/gji/ggt251>`_.
"""
import empymod
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
###############################################################################
# Use empymod with user-def. func. to adjust :math:`\eta` and :math:`\zeta`
# -------------------------------------------------------------------------
#
# In principal it is always best to write your own modelling routine if you
# want to adjust something. Just copy ``empymod.dipole`` or ``empymod.bipole``
# as a template, and modify it to your needs. Since ``empymod v1.7.4``,
# however, there is a hook which allows you to modify :math:`\eta_h, \eta_v,
# \zeta_h`, and :math:`\zeta_v` quite easily.
#
# The trick is to provide a dictionary (we name it ``inp`` here) instead of the
# resistivity vector in ``res``. This dictionary, ``inp``, has two mandatory
# plus optional entries: - ``res``: the resistivity vector you would have
# provided normally (mandatory).
#
# - A function name, which has to be either or both of (mandatory):
#
# - ``func_eta``: To adjust ``etaH`` and ``etaV``, or
# - ``func_zeta``: to adjust ``zetaH`` and ``zetaV``.
#
# - In addition, you have to provide all parameters you use in
# ``func_eta``/``func_zeta`` and are not already provided to ``empymod``.
# All additional parameters must have #layers elements.
#
# The functions ``func_eta`` and ``func_zeta`` must have the following
# characteristics:
#
# - The signature is ``func(inp, p_dict)``, where
#
# - ``inp`` is the dictionary you provide, and
# - ``p_dict`` is a dictionary that contains all parameters so far computed
# in empymod [``locals()``].
#
# - It must return ``etaH, etaV`` if ``func_eta``, or ``zetaH, zetaV`` if
# ``func_zeta``.
#
# Dummy example
# ~~~~~~~~~~~~~
#
# ::
#
# def my_new_eta(inp, p_dict):
# # Your computations, using the parameters you provided
# # in ``inp`` and the parameters from empymod in ``p_dict``.
# # In the example below, we provide, e.g., inp['tau']
# return etaH, etaV
#
# And then you call ``empymod`` with ``res = {'res': res-array, 'tau': tau,
# 'func_eta': my_new_eta}``.
#
# Define the Cole-Cole model
# --------------------------
#
# In this notebook we exploit this hook in empymod to compute :math:`\eta_h`
# and :math:`\eta_v` with the Cole-Cole model. By default, :math:`\eta_h` and
# :math:`\eta_v` are computed like this:
#
# .. math::
#
# \eta_h = \frac{1}{\rho} + j\omega \varepsilon_{r;h}\varepsilon_0 \ ,
# \qquad (4)\\ \eta_v = \frac{1}{\rho \lambda^2} +
# j\omega\varepsilon_{r;v}\varepsilon_0 \ . \qquad (5)
#
#
# With this function we recompute it. We replace the real part, the resistivity
# :math:`\rho`, in equations (4) and (5) by the complex, frequency-dependent
# Cole-Cole resistivity [:math:`\rho(\omega)`], as given, for instance, in
# equations (1)-(3). Then we add back the imaginary part coming from thet
# dielectric permittivity (basically zero for low frequencies).
#
# Note that in this notebook we use this hook to model relaxation in the low
# frequency spectrum for IP measurements, replacing :math:`\rho` by a
# frequency-dependent model :math:`\rho(\omega)`. However, this could also be
# used to model dielectric phenomena in the high frequency spectrum, replacing
# :math:`\varepsilon_r` by a frequency-dependent formula
# :math:`\varepsilon_r(\omega)`.
def cole_cole(inp, p_dict):
"""Cole and Cole (1941)."""
# Compute complex conductivity from Cole-Cole
iotc = np.outer(2j*np.pi*p_dict['freq'], inp['tau'])**inp['c']
condH = inp['cond_8'] + (inp['cond_0']-inp['cond_8'])/(1+iotc)
condV = condH/p_dict['aniso']**2
# Add electric permittivity contribution
etaH = condH + 1j*p_dict['etaH'].imag
etaV = condV + 1j*p_dict['etaV'].imag
return etaH, etaV
def pelton_et_al(inp, p_dict):
""" Pelton et al. (1978)."""
# Compute complex resistivity from Pelton et al.
iotc = np.outer(2j*np.pi*p_dict['freq'], inp['tau'])**inp['c']
rhoH = inp['rho_0']*(1 - inp['m']*(1 - 1/(1 + iotc)))
rhoV = rhoH*p_dict['aniso']**2
# Add electric permittivity contribution
etaH = 1/rhoH + 1j*p_dict['etaH'].imag
etaV = 1/rhoV + 1j*p_dict['etaV'].imag
return etaH, etaV
###############################################################################
# Example
# -------
#
# Two half-space model, air above earth:
#
# - x-directed sourcer at the surface
# - x-directed receiver, also at the surface, inline at an offset of 500 m.
# - Switch-on time-domain response
# - Isotropic
# - Model [air, subsurface]
#
# - :math:`\rho_\infty = 1/\sigma_\infty =` [2e14, 10]
# - :math:`\rho_0 = 1/\sigma_0 =` [2e14, 5]
# - :math:`\tau =` [0, 1]
# - :math:`c =` [0, 0.5]
# Times
times = np.logspace(-2, 2, 101)
# Model parameter which apply for all
model = {
'src': [0, 0, 1e-5, 0, 0],
'rec': [500, 0, 1e-5, 0, 0],
'depth': 0,
'freqtime': times,
'signal': 1,
'verb': 1
}
# Collect Cole-Cole models
res_0 = np.array([2e14, 10])
res_8 = np.array([2e14, 5])
tau = [0, 1]
c = [0, 0.5]
m = (res_0-res_8)/res_0
cole_model = {'res': res_0, 'cond_0': 1/res_0, 'cond_8': 1/res_8,
'tau': tau, 'c': c, 'func_eta': cole_cole}
pelton_model = {'res': res_0, 'rho_0': res_0, 'm': m,
'tau': tau, 'c': c, 'func_eta': pelton_et_al}
# Compute
out_bipole = empymod.bipole(res=res_0, **model)
out_cole = empymod.bipole(res=cole_model, **model)
out_pelton = empymod.bipole(res=pelton_model, **model)
# Plot
plt.figure()
plt.title('Switch-off')
plt.plot(times, out_bipole, label='Regular Bipole')
plt.plot(times, out_cole, '--', label='Cole and Cole (1941)')
plt.plot(times, out_pelton, '-.', label='Pelton et al. (1978)')
plt.legend()
plt.yscale('log')
plt.xscale('log')
plt.xlabel('time (s)')
plt.show()
###############################################################################
empymod.Report()
|
prisae/empymod
|
examples/time_domain/cole_cole_ip.py
|
Python
|
apache-2.0
| 8,028
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from mock import Mock
from apache.aurora.client.cli.context import AuroraCommandContext
from apache.aurora.client.hooks.hooked_api import HookedAuroraClientAPI
from apache.aurora.common.cluster import Cluster
from apache.aurora.common.clusters import Clusters
from gen.apache.aurora.api.ttypes import (
AssignedTask,
Response,
ResponseCode,
Result,
ScheduledTask,
ScheduleStatus,
ScheduleStatusResult,
TaskConfig,
TaskEvent
)
class FakeAuroraCommandContext(AuroraCommandContext):
def __init__(self, reveal=True):
super(FakeAuroraCommandContext, self).__init__()
self.options = None
self.status = []
self.fake_api = self.create_mock_api()
self.task_status = []
self.showed_urls = []
self.out = []
self.err = []
if reveal:
self.enable_reveal_errors()
def get_api(self, cluster):
return self.fake_api
@classmethod
def create_mock_api(cls):
"""Builds up a mock API object, with a mock SchedulerProxy.
Returns the API and the proxy"""
# This looks strange, but we set up the same object to use as both
# the SchedulerProxy and the SchedulerClient. These tests want to observe
# what API calls get made against the scheduler, and both of these objects
# delegate calls to the scheduler. It doesn't matter which one is used:
# what we care about is that the right API calls get made.
mock_api = Mock(spec=HookedAuroraClientAPI)
mock_scheduler_proxy = Mock()
mock_scheduler_proxy.url = "http://something_or_other"
mock_scheduler_proxy.scheduler_client.return_value = mock_scheduler_proxy
mock_api = Mock(spec=HookedAuroraClientAPI)
mock_api.scheduler_proxy = mock_scheduler_proxy
return mock_api
def print_out(self, str):
self.out.append(str)
def print_err(self, str):
self.err.append(str)
def get_out(self):
return self.out
def get_err(self):
return self.err
def open_page(self, url):
self.showed_urls.append(url)
def handle_open(self, api):
pass
def add_expected_status_query_result(self, expected_result):
self.task_status.append(expected_result)
# each call adds an expected query result, in order.
self.fake_api.scheduler_proxy.getTasksWithoutConfigs.side_effect = self.task_status
self.fake_api.check_status.side_effect = self.task_status
class AuroraClientCommandTest(unittest.TestCase):
FAKE_TIME = 42131
@classmethod
def create_blank_response(cls, code, msg):
response = Mock(spec=Response)
response.responseCode = code
mock_msg = Mock()
mock_msg.message = msg
response.details = [mock_msg]
response.messageDEPRECATED = msg
response.result = Mock(spec=Result)
return response
@classmethod
def create_simple_success_response(cls):
return cls.create_blank_response(ResponseCode.OK, 'OK')
@classmethod
def create_error_response(cls):
return cls.create_blank_response(ResponseCode.ERROR, 'Damn')
@classmethod
def create_mock_api(cls):
"""Builds up a mock API object, with a mock SchedulerProxy"""
mock_api = Mock(spec=HookedAuroraClientAPI)
mock_scheduler = Mock()
mock_scheduler.url = "http://something_or_other"
mock_scheduler_client = Mock()
mock_scheduler_client.scheduler.return_value = mock_scheduler
mock_scheduler_client.url = "http://something_or_other"
mock_api = Mock(spec=HookedAuroraClientAPI)
mock_api.scheduler_proxy = mock_scheduler_client
return (mock_api, mock_scheduler_client)
@classmethod
def create_mock_api_factory(cls):
"""Create a collection of mocks for a test that wants to mock out the client API
by patching the api factory."""
mock_api, mock_scheduler_client = cls.create_mock_api()
mock_api_factory = Mock()
mock_api_factory.return_value = mock_api
return mock_api_factory, mock_scheduler_client
@classmethod
def create_status_call_result(cls, mock_task=None):
status_response = cls.create_simple_success_response()
schedule_status = Mock(spec=ScheduleStatusResult)
status_response.result.scheduleStatusResult = schedule_status
# This should be a list of ScheduledTask's.
schedule_status.tasks = []
if mock_task is None:
for i in range(20):
schedule_status.tasks.append(cls.create_mock_task(i))
else:
schedule_status.tasks.append(mock_task)
return status_response
@classmethod
def create_mock_task(cls, instance_id, status=ScheduleStatus.RUNNING):
mock_task = Mock(spec=ScheduledTask)
mock_task.assignedTask = Mock(spec=AssignedTask)
mock_task.assignedTask.instanceId = instance_id
mock_task.assignedTask.taskId = "Task%s" % instance_id
mock_task.assignedTask.slaveId = "Slave%s" % instance_id
mock_task.assignedTask.task = Mock(spec=TaskConfig)
mock_task.slaveHost = "Slave%s" % instance_id
mock_task.status = status
mock_task_event = Mock(spec=TaskEvent)
mock_task_event.timestamp = 1000
mock_task.taskEvents = [mock_task_event]
return mock_task
@classmethod
def setup_get_tasks_status_calls(cls, scheduler):
status_response = cls.create_status_call_result()
scheduler.getTasksWithoutConfigs.return_value = status_response
@classmethod
def fake_time(cls, ignored):
"""Utility function used for faking time to speed up tests."""
cls.FAKE_TIME += 2
return cls.FAKE_TIME
CONFIG_BASE = """
HELLO_WORLD = Job(
name = '%(job)s',
role = '%(role)s',
cluster = '%(cluster)s',
environment = '%(env)s',
instances = 20,
%(inner)s
update_config = UpdateConfig(
batch_size = 1,
restart_threshold = 60,
watch_secs = 45,
max_per_shard_failures = 2,
),
task = Task(
name = 'test',
processes = [Process(name = 'hello_world', cmdline = 'echo {{thermos.ports[http]}}')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
jobs = [HELLO_WORLD]
"""
TEST_ROLE = 'bozo'
TEST_ENV = 'test'
TEST_JOB = 'hello'
TEST_CLUSTER = 'west'
TEST_JOBSPEC = 'west/bozo/test/hello'
TEST_CLUSTERS = Clusters([Cluster(
name='west',
packer_copy_command='copying {{package}}',
zk='zookeeper.example.com',
scheduler_zk_path='/foo/bar',
auth_mechanism='UNAUTHENTICATED')])
@classmethod
def get_test_config(cls, cluster, role, env, job, filler=''):
"""Create a config from the template"""
return cls.CONFIG_BASE % {'job': job, 'role': role, 'env': env, 'cluster': cluster,
'inner': filler}
@classmethod
def get_valid_config(cls):
return cls.get_test_config(cls.TEST_CLUSTER, cls.TEST_ROLE, cls.TEST_ENV, cls.TEST_JOB)
@classmethod
def get_invalid_config(cls, bad_clause):
return cls.get_test_config(cls.TEST_CLUSTER, cls.TEST_ROLE, cls.TEST_ENV, cls.TEST_JOB,
bad_clause)
|
mkhutornenko/incubator-aurora
|
src/test/python/apache/aurora/client/cli/util.py
|
Python
|
apache-2.0
| 7,372
|
#!/usr/bin/python3
# Copyright (C) 2019 CZ.NIC, z.s.p.o. <knot-dns@labs.nic.cz>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# requirements:
# sudo apt install libmysqlclient-dev
# pip3 install sqlobject configparser mysqlclient argparse
from sqlobject import *
import configparser
import argparse
import os
import string
import sys
import time
import re
from subprocess import DEVNULL, PIPE, Popen
# globals
connection = None
soa_serial = int(time.time())
config = configparser.ConfigParser()
fix_absolute = False
storage = os.getcwd()
knotc_binary = "knotc"
knotc_socket = None
slave_mode = False
conf_txn_open = False
knotc_zone_reload = []
knotc_timeout = 10
class Domains(SQLObject):
# id = IntCol() # implicitly there
name = StringCol()
master = StringCol()
last_check = IntCol()
type = StringCol()
notified_serial = IntCol()
account = StringCol()
class Records(SQLObject):
# id = IntCol() # implicitly there
domain = ForeignKey('Domains')
name = StringCol()
type = StringCol()
content = StringCol()
ttl = IntCol()
prio = IntCol()
change_date = IntCol()
ordername = StringCol()
auth = IntCol()
class Changes(SQLObject):
# id = IntCol() # implicitly there
domain = ForeignKey('Domains')
type = IntCol() # -1 .. zone removed; 0 .. zone modified; 1 .. zone added
def remove_dot(s):
return s[:-1] if s[-1] == '.' else s
def fix_abs(name):
return remove_dot(name) + '.' if fix_absolute else name
def domain_get_records(domain, txn):
if str(domain).isdigit():
return Records.select(Records.q.domain == domain, connection=txn)
else:
dn = remove_dot(domain)
return Records.select(AND(Domains.q.id == Records.q.domain, Domains.q.name == dn), connection=txn)
def domain_id2name(domain, txn):
return Domains.select(Domains.q.id == domain, connection=txn)[0].name
def get_config(key, default_val):
global config
return int(config['DEFAULT'][key]) if key in config['DEFAULT'] and config['DEFAULT'][key] is not None else default_val
def get_soa_params():
refresh = get_config("soa-refresh-default", 10800)
retry = get_config("soa-retry-default", 3600)
expire = get_config("soa-expire-default", 604800)
minttl = get_config("soa-minimum-ttl", 3600)
return (refresh, retry, expire, minttl)
def soa_content(db_content):
global soa_serial
(nameserver, contact, fake_serial) = db_content.split()
ns = fix_abs(nameserver)
co = fix_abs(contact.replace("@", "."))
(refresh, retry, expire, minttl) = get_soa_params()
return ("%s %s %d %d %d %d %d" % (ns, co, soa_serial, refresh, retry, expire, minttl))
def zone_storage(zone):
global storage
return os.path.join(storage, "%s.zone" % remove_dot(zone))
def knotc_single(*args):
global knotc_binary
global knotc_socket
global knotc_timeout
cmd = [ knotc_binary, "-s", knotc_socket, "-t", str(knotc_timeout) ] + list(args)
p = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
raise Exception("error: knotc %s failed: '%s'" % (str(args), stderr))
def zone_template(zone):
# this function is intended to be patched by user's business logic
return None
# param type:
# -1 ... remove this zone
# 0 ... mark this zone as modified (fallback to 2 if not exists)
# 1 ... add zone (fallback to 0 if exists already)
# 2 ... add zone no fallback (ignore if exists already)
def knotc_send(type, zone):
global slave_mode
global conf_txn_open
global knotc_zone_reload
if type == 0:
try:
# ensure by zone-status that the zone exists
knotc_single("zone-status", zone)
knotc_zone_reload.append(zone)
except:
knotc_send(2, zone)
else:
try:
if not conf_txn_open:
knotc_single("conf-begin")
conf_txn_open = True
if type > 0:
try:
knotc_single("conf-set", "zone[%s]" % zone)
knotc_single("conf-set", "zone[%s].file" % zone, zone_storage(zone))
template = zone_template(remove_dot(zone))
if template is not None:
knotc_single("conf-set", "zone[%s].template" % zone, template)
except:
if type > 1:
pass
else:
knotc_send(0, zone)
else:
knotc_single("conf-unset", "zone[%s]" % zone)
try:
knotc_zone_reload.remove(zone)
except:
pass
except:
knotc_single("conf-abort")
conf_txn_open = False
raise
def print_record(record, outfile):
t = record.type.upper()
if t == 'SOA':
content = soa_content(record.content)
elif t == 'MX' or t == 'SRV':
content = "%d %s" % (record.prio, record.content)
elif t == 'TXT' or t == 'SPF':
content = "\"%s\"" % record.content
else:
content = record.content
if t in ('NS', 'MX', 'CNAME', 'DNAME', 'SRV', 'PTR'):
content = fix_abs(content)
record = ("%s. %d %s %s\n" % (record.name, record.ttl, t, content))
outfile.write(record)
def print_domain(domain, change_type = 0, txn = None):
global knotc_socket
global slave_mode
dn = domain_id2name(domain, txn) if str(domain).isdigit() else domain
if not slave_mode:
file_name = zone_storage(dn)
tmp_name = "%s.tmp" % file_name
f = open(tmp_name, "w")
for r in domain_get_records(domain, txn):
print_record(r, f)
f.close()
os.rename(tmp_name, file_name)
if knotc_socket is not None:
knotc_send(change_type, dn)
print("Updated zone %s" % dn)
def domain_from_change(change, txn):
global knotc_socket
if change.type >= 0:
print_domain(change.domain.name, change.type, txn)
else:
dn = change.domain.name
try:
os.remove(zone_storage(dn))
except:
print("Warning: failed to delete zonefile for %s" % dn, file=sys.stderr)
if knotc_socket is not None:
knotc_send(change.type, dn)
else:
print("Warning: removed zone '%s', but unspecified knotc socket." % dn, file=sys.stderr)
def process_changes(startwith):
global connection
processed = []
try:
txn = connection.transaction()
for ch in Changes.select(Changes.q.id > startwith, connection=txn):
domain_from_change(ch, txn)
processed.append(ch.id)
txn.commit()
finally:
if len(processed) > 0:
print("Processed up to change_id %d" % processed[-1])
def process_all():
global connection
txn = connection.transaction()
for d in Domains.select(connection=txn):
print_domain(d.id, txn = txn)
txn.commit()
def read_config_file(filename):
global config
with open(filename, 'r') as f:
fcontent = '[DEFAULT]\n' + f.read()
config.read_string(fcontent)
def main():
global storage
global knotc_socket
global soa_serial
global fix_absolute
global connection
global slave_mode
global conf_txn_open
global knotc_zone_reload
global knotc_timeout
argp = argparse.ArgumentParser(prog='dns_sql2zf', description="Export DNS records from Mysql or Postgres DB into zonefile.", epilog="(C) CZ.NIC, GPLv3")
argp.add_argument(dest='domains', metavar='zone', nargs='*', help='Zone to be exported.')
argp.add_argument('--db', dest='dburi', metavar='DB_URI', nargs=1, required=True, help='URI of database to export from (example: mysql://user:password@127.0.0.1/powerdns_db).')
argp.add_argument('--storage', dest='storage', metavar='path', nargs=1, help='Storage for the generated zonefile (otherwise current dir).')
argp.add_argument('--all', dest='all', action='store_true', help="Export all zones.")
argp.add_argument('--confile', dest='confile', metavar='file', nargs=1, help='PowerDNS configfile to obtain SOA parameters (otherwise defaults).')
argp.add_argument('--serial', dest='soa_serial', type=int, metavar='uint32', nargs=1, help='SOA serial number (otherwise UNIX timestamp).')
argp.add_argument('--absolute-names', dest='fix_absolute', action='store_true', help="Interpret names in records' contents (e.g. CNAME, NS...) as absolute even if w/o trailing dot.")
argp.add_argument('--from-changes', dest='from_changes', metavar="from_id", nargs='?', const=0, help="Export zones listed in extra 'changes' table.")
argp.add_argument('--knotc', dest='knotc_socket', metavar='knot_socket', nargs=1, help="Notify Knot DNS about changes (requires: $PATH/knotc).")
argp.add_argument('--knotc-timeout', dest='knotc_timeout', type=int, metavar='uint32', nargs=1, help='Timeout for knotc commands (default 10).')
argp.add_argument('--slave', dest='slave', action='store_true', help="Don't generate zonefiles, use 'knotc zone-refresh' instead of zone-reload.")
argp.add_argument('--version', action='version', version='dns_sql2zf 0.1')
args = argp.parse_args()
if args.soa_serial is not None:
soa_serial = args.soa_serial[0]
if args.confile is not None:
read_config_file(args.confile[0])
if args.storage is not None:
storage = args.storage[0]
if args.fix_absolute:
fix_absolute = True
if args.knotc_socket is not None:
knotc_socket = args.knotc_socket[0]
if args.knotc_timeout is not None:
knotc_timeout = args.knotc_timeout[0]
if args.slave:
slave_mode = True
connection = connectionForURI(args.dburi[0])
sqlhub.processConnection = connection
for domain in args.domains:
print_domain(domain)
if args.all:
process_all()
if args.from_changes is not None:
process_changes(args.from_changes)
if conf_txn_open:
knotc_single("conf-commit")
for zone in knotc_zone_reload:
knotc_single("zone-reload" if not slave_mode else "zone-refresh", zone)
if __name__ == "__main__":
main()
|
CZ-NIC/knot
|
scripts/dns_sql2zf.py
|
Python
|
gpl-3.0
| 10,871
|
'''
--------------------------------------------------------------------------------------
tasks.py
--------------------------------------------------------------------------------------
A set of tasks to manage your AWS Django deployment.
author : Ashok Fernandez (github.com/ashokfernandez/)
credit : Derived from files in https://github.com/gcollazo/Fabulous
date : 11 / 3 / 2014
Tasks include:
- configure_instance : Configures a new EC2 instance (as definied in project_conf.py) and return's it's public dns
This takes around 8 minutes to complete.
- update_packages : Updates the python packages on the server to match those found in requirements/common.txt and
requirements/prod.txt
- deploy : Pulls the latest commit from the master branch on the server, collects the static files, syncs the db and
restarts the server
- reload_gunicorn : Pushes the gunicorn startup script to the servers and restarts the gunicorn process, use this if you
have made changes to templates/start_gunicorn.bash
- reload_nginx : Pushes the nginx config files to the servers and restarts the nginx, use this if you
have made changes to templates/nginx-app-proxy or templates/nginx.conf
- reload_supervisor : Pushes the supervisor config files to the servers and restarts the supervisor, use this if you
have made changes to templates/supervisord-init or templates/supervisord.conf
'''
# Spawns a new EC2 instance (as definied in djangofab_conf.py) and return's it's public dns
# This takes around 8 minutes to complete.
configure_instance = [
# First command as regular user
{"action":"run", "params":"whoami"},
# sudo apt-get update
{"action":"sudo", "params":"apt-get update -qq",
"message":"Updating apt-get"},
# List of APT packages to install
{"action":"apt",
"params":["libpq-dev", "nginx", "memcached", "git",
"python-setuptools", "python-dev", "build-essential", "python-pip"],
"message":"Installing apt-get packages"},
# List of pypi packages to install
{"action":"pip", "params":["virtualenv", "virtualenvwrapper","supervisor"],
"message":"Installing pip packages"},
# Add AWS credentials to the a config file so that boto can access S3
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/boto.cfg",
"destination":"/home/%(SERVER_USERNAME)s/boto.cfg"}},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/boto.cfg /etc/boto.cfg"},
# virtualenvwrapper
{"action":"sudo", "params":"mkdir %(VIRTUALENV_DIR)s", "message":"Configuring virtualenvwrapper"},
{"action":"sudo", "params":"chown -R %(SERVER_USERNAME)s: %(VIRTUALENV_DIR)s"},
{"action":"run", "params":"echo 'export WORKON_HOME=%(VIRTUALENV_DIR)s' >> /home/%(SERVER_USERNAME)s/.profile"},
{"action":"run", "params":"echo 'source /usr/local/bin/virtualenvwrapper.sh' >> /home/%(SERVER_USERNAME)s/.profile"},
{"action":"run", "params":"source /home/%(SERVER_USERNAME)s/.profile"},
# webapps alias
{"action":"run", "params":"""echo "alias webapps='cd %(APPS_DIR)s'" >> /home/%(SERVER_USERNAME)s/.profile""",
"message":"Creating webapps alias"},
# webapps dir
{"action":"sudo", "params":"mkdir %(APPS_DIR)s", "message":"Creating webapps directory"},
{"action":"sudo", "params":"chown -R %(SERVER_USERNAME)s: %(APPS_DIR)s"},
# git setup
{"action":"run", "params":"git config --global user.name '%(GIT_USERNAME)s'",
"message":"Configuring git"},
{"action":"run", "params":"git config --global user.email '%(ADMIN_EMAIL)s'"},
{"action":"put", "params":{"file":"%(BITBUCKET_DEPLOY_KEY_PATH)s",
"destination":"/home/%(SERVER_USERNAME)s/.ssh/%(BITBUCKET_DEPLOY_KEY_NAME)s"}},
{"action":"run", "params":"chmod 600 /home/%(SERVER_USERNAME)s/.ssh/%(BITBUCKET_DEPLOY_KEY_NAME)s"},
{"action":"run", "params":"""echo 'IdentityFile /home/%(SERVER_USERNAME)s/.ssh/%(BITBUCKET_DEPLOY_KEY_NAME)s' >> /home/%(SERVER_USERNAME)s/.ssh/config"""},
{"action":"run", "params":"ssh-keyscan bitbucket.org >> /home/%(SERVER_USERNAME)s/.ssh/known_hosts"},
# Create virtualevn
{"action":"run", "params":"mkvirtualenv --no-site-packages %(PROJECT_NAME)s",
"message":"Creating virtualenv"},
# install django in virtual env
{"action":"virtualenv", "params":"pip install Django",
"message":"Installing django"},
# install psycopg2 drivers for Postgres
{"action":"virtualenv", "params":"pip install psycopg2",
"message":"Installing psycopg2"},
# install gunicorn in virtual env
{"action":"virtualenv", "params":"pip install gunicorn",
"message":"Installing gunicorn"},
# Clone the git repo
{"action":"run", "params":"git clone %(BITBUCKET_REPO)s %(PROJECT_PATH)s"},
{"action":"put", "params":{"file":"%(FAB_CONFIG_PATH)s/templates/gunicorn.conf.py",
"destination":"%(PROJECT_PATH)s/gunicorn.conf.py"}},
# Create run and log dirs for the gunicorn socket and logs
{"action":"run", "params":"mkdir %(PROJECT_PATH)s/logs"},
# Add gunicorn startup script to project folder
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/start_gunicorn.bash",
"destination":"%(PROJECT_PATH)s/start_gunicorn.bash"}},
{"action":"sudo", "params":"chmod +x %(PROJECT_PATH)s/start_gunicorn.bash"},
# Install the requirements from the pip requirements files
{"action":"virtualenv", "params":"pip install -r %(PROJECT_PATH)s/requirements.txt --upgrade"},
# nginx
{"action":"put", "params":{"file":"%(FAB_CONFIG_PATH)s/templates/nginx.conf",
"destination":"/home/%(SERVER_USERNAME)s/nginx.conf"},
"message":"Configuring nginx"},
{"action":"sudo", "params":"mv /etc/nginx/nginx.conf /etc/nginx/nginx.conf.old"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/nginx.conf /etc/nginx/nginx.conf"},
{"action":"sudo", "params":"chown root:root /etc/nginx/nginx.conf"},
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/nginx-app-proxy",
"destination":"/home/%(SERVER_USERNAME)s/%(PROJECT_NAME)s"}},
{"action":"sudo", "params":"rm -rf /etc/nginx/sites-enabled/default"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/%(PROJECT_NAME)s /etc/nginx/sites-available/%(PROJECT_NAME)s"},
{"action":"sudo", "params":"ln -s /etc/nginx/sites-available/%(PROJECT_NAME)s /etc/nginx/sites-enabled/%(PROJECT_NAME)s"},
{"action":"sudo", "params":"chown root:root /etc/nginx/sites-available/%(PROJECT_NAME)s"},
{"action":"sudo", "params":"/etc/init.d/nginx restart", "message":"Restarting nginx"},
# Run collectstatic and syncdb
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py collectstatic -v 0 --noinput"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py makemigrations"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py migrate"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py syncdb"},
# Setup supervisor
{"action":"run", "params":"echo_supervisord_conf > /home/%(SERVER_USERNAME)s/supervisord.conf",
"message":"Configuring supervisor"},
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/supervisord.conf",
"destination":"/home/%(SERVER_USERNAME)s/my.supervisord.conf"}},
{"action":"run", "params":"cat /home/%(SERVER_USERNAME)s/my.supervisord.conf >> /home/%(SERVER_USERNAME)s/supervisord.conf"},
{"action":"run", "params":"rm /home/%(SERVER_USERNAME)s/my.supervisord.conf"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/supervisord.conf /etc/supervisord.conf"},
{"action":"sudo", "params":"supervisord"},
{"action":"put", "params":{"file":"%(FAB_CONFIG_PATH)s/templates/supervisord-init",
"destination":"/home/%(SERVER_USERNAME)s/supervisord-init"}},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/supervisord-init /etc/init.d/supervisord"},
{"action":"sudo", "params":"chmod +x /etc/init.d/supervisord"},
{"action":"sudo", "params":"update-rc.d supervisord defaults"}
]
# Updates the python packages on the server to match those found in requirements/common.txt and
# requirements/prod.txt
update_packages = [
# Updates the python packages
{"action":"virtualenv", "params":"pip install -r %(PROJECT_PATH)s/requirements.txt --upgrade"},
]
# Pulls the latest commit from the master branch on the server, collects the static files, syncs
# the db and restarts the server
deploy = [
# Pull the latest version from the bitbucket repo
{"action":"run", "params":"cd %(PROJECT_PATH)s && git pull"},
# Update the database
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py collectstatic -v 0 --noinput"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py makemigrations"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py migrate"},
{"action":"virtualenv", "params":"python %(PROJECT_PATH)s/manage.py syncdb"},
# Restart gunicorn to update the site
{"action":"sudo", "params": "supervisorctl restart %(PROJECT_NAME)s"}
]
# Pushes the gunicorn startup script to the servers and restarts the gunicorn process, use this
# if you have made changes to templates/start_gunicorn.bash
reload_gunicorn = [
# Push the gunicorn startup script to server
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/start_gunicorn.bash",
"destination":"%(PROJECT_PATH)s/start_gunicorn.bash"}},
{"action":"sudo", "params":"chmod +x %(PROJECT_PATH)s/start_gunicorn.bash"},
# Restart gunicorn to update the site
{"action":"sudo", "params": "supervisorctl restart %(PROJECT_NAME)s"}
]
# Pushes the nginx config files to the servers and restarts the nginx, use this if you
# have made changes to templates/nginx-app-proxy or templates/nginx.conf
reload_nginx = [
# stop old nginx process
{"action":"sudo", "params":"service nginx stop"},
# Load the nginx config files
{"action":"put", "params":{"file":"%(FAB_CONFIG_PATH)s/templates/nginx.conf",
"destination":"/home/%(SERVER_USERNAME)s/nginx.conf"},
"message":"Configuring nginx"},
{"action":"sudo", "params":"mv /etc/nginx/nginx.conf /etc/nginx/nginx.conf.old"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/nginx.conf /etc/nginx/nginx.conf"},
{"action":"sudo", "params":"chown root:root /etc/nginx/nginx.conf"},
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/nginx-app-proxy",
"destination":"/home/%(SERVER_USERNAME)s/%(PROJECT_NAME)s"}},
{"action":"sudo", "params":"rm -rf /etc/nginx/sites-enabled/default"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/%(PROJECT_NAME)s /etc/nginx/sites-available/%(PROJECT_NAME)s"},
# {"action":"sudo", "params":"ln -s /etc/nginx/sites-available/%(PROJECT_NAME)s /etc/nginx/sites-enabled/%(PROJECT_NAME)s"},
{"action":"sudo", "params":"chown root:root /etc/nginx/sites-available/%(PROJECT_NAME)s"},
{"action":"sudo", "params":"/etc/init.d/nginx restart", "message":"Restarting nginx"},
]
# Pushes the supervisor config files to the servers and restarts the supervisor, use this if you
# have made changes to templates/supervisord-init or templates/supervisord.conf
reload_supervisor = [
# stop old supervisor process
{"action":"sudo", "params":"supervisorctl stop all"},
{"action":"sudo", "params":"killall supervisord"},
# Setup supervisor
{"action":"run", "params":"echo_supervisord_conf > /home/%(SERVER_USERNAME)s/supervisord.conf",
"message":"Configuring supervisor"},
{"action":"put_template", "params":{"template":"%(FAB_CONFIG_PATH)s/templates/supervisord.conf",
"destination":"/home/%(SERVER_USERNAME)s/my.supervisord.conf"}},
{"action":"run", "params":"cat /home/%(SERVER_USERNAME)s/my.supervisord.conf >> /home/%(SERVER_USERNAME)s/supervisord.conf"},
{"action":"run", "params":"rm /home/%(SERVER_USERNAME)s/my.supervisord.conf"},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/supervisord.conf /etc/supervisord.conf"},
{"action":"sudo", "params":"supervisord"},
{"action":"put", "params":{"file":"%(FAB_CONFIG_PATH)s/templates/supervisord-init",
"destination":"/home/%(SERVER_USERNAME)s/supervisord-init"}},
{"action":"sudo", "params":"mv /home/%(SERVER_USERNAME)s/supervisord-init /etc/init.d/supervisord"},
{"action":"sudo", "params":"chmod +x /etc/init.d/supervisord"},
{"action":"sudo", "params":"update-rc.d supervisord defaults"},
# Restart supervisor
{"action":"sudo", "params":"supervisorctl start all"}
]
|
aqui/website
|
fabfile/tasks.py
|
Python
|
gpl-3.0
| 13,021
|
import requests
from django.core.management.base import BaseCommand
from popolo.models import Membership
from speeches.models import Section, Speech
class Command(BaseCommand):
help = 'Validates speeches, sections and memberships'
def handle(self, *args, **options):
# All speeches should belong to a section.
speeches_without_sections = Speech.objects.filter(section=None)
if speeches_without_sections:
print('%d speeches without sections' % speeches_without_sections.count())
# All sections should have speeches or sections.
sections_without_speeches = Section.objects.filter(speech__section_id=None).filter(children__parent_id=None).order_by('start_date')
if sections_without_speeches:
print('%d sections without speeches' % sections_without_speeches.count())
for section in sections_without_speeches:
print(section.get_ancestors[0].start_date, section.title)
# All boundaries should match a membership label.
url = 'https://represent.opennorth.ca/boundaries/nova-scotia-electoral-districts/?limit=0'
for boundary in requests.get(url).json()['objects']:
try:
_ = Membership.objects.get(label='MLA for %s' % boundary['name'].replace('—', '-')) # m-dash
except Membership.DoesNotExist:
print('%s matches no membership' % boundary['name'])
|
SpringtideCollectiveOrg/openhousens.ca
|
legislature/management/commands/validate.py
|
Python
|
mit
| 1,437
|
# pip install matplotlib
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime
import time
x = [0, 1]
y = [0, 1.5]
#y = []
fig = plt.figure("盈利分析", (18, 5))
# plt.plot(x,y)
# plt.plot_date(x,y)
# x = range(3)
# y1 = [elem*2 for elem in x]
# plt.plot(x, y1)
# y2 = [elem**2 for elem in x]
# plt.plot(x, y2, 'r')
date1 = ["2017-01-01", "2017-01-02", "2017-01-03"]
date_times = [datetime.datetime.strptime(x, '%Y-%m-%d') for x in date1]
# print(date_times[0])
y3 = [100, 200, 300]
dates = matplotlib.dates.date2num(date_times)
# 设置标题
fig.suptitle('diff', fontsize=14, fontweight='bold')
ax = fig.add_subplot(1, 1, 1)
ax.plot(dates, y3)
y4 = [200, 400, 600]
ax.plot(dates, y4, 'r')
# plt.plot_date(dates,y3)
# x轴标签旋转角度
plt.xticks(rotation=30)
# for label in ax.xaxis.get_ticklabels():
# label.set_rotation(45)
ax.set_xlabel("x label")
ax.set_ylabel("y label")
ax.xaxis.set_major_locator(mdates.DayLocator(
bymonthday=range(1, 31), interval=1))
ax.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d"))
# plt.show()
# 画第二个图
plt.figure("b")
plt.plot(x,y)
plt.savefig("easyplot.jpg")
plt.show()
|
GuLiPing-Hz/PL
|
py/pytest/test_draw.py
|
Python
|
gpl-3.0
| 1,222
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
import sys
# Application
APP_NAME = "BigBashView"
APP_VERSION = "3.0.0"
# Directories
PROGDIR = os.path.dirname(os.path.abspath(sys.argv[0]))
DATA_DIR = os.path.expanduser("~/.bigbashview3")
# Server
ADDRESS = "127.0.0.1"
PORT = 6543
COMPAT = True
# Window
WIDTH = -1
HEIGHT = -1
WINDOW_STATE = "normal"
URL = ""
# Extras
ICON = os.sep.join((PROGDIR, "..", "icons", "bbv.png"))
|
kaiana/bigbashview
|
usr/share/bigbashview3/globals.py
|
Python
|
gpl-2.0
| 436
|
"""
sentry.testutils
~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import base64
import time
from sentry.conf import settings
from sentry.utils import json
from sentry.utils.auth import get_signature, get_auth_header
from sentry.utils.compat import pickle
from django.conf import settings as django_settings
from django.core.cache import cache
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db import connections, DEFAULT_DB_ALIAS
from django.test import TestCase, TransactionTestCase
from django.test.client import Client
from sentry.models import ProjectOption, Option
from sentry.utils import cached_property
fixture = cached_property
class Settings(object):
"""
Allows you to define settings that are required for this function to work.
>>> with Settings(SENTRY_LOGIN_URL='foo'): #doctest: +SKIP
>>> print settings.SENTRY_LOGIN_URL #doctest: +SKIP
"""
NotDefined = object()
def __init__(self, **overrides):
self.overrides = overrides
self._orig = {}
self._orig_sentry = {}
def __enter__(self):
for k, v in self.overrides.iteritems():
self._orig[k] = getattr(django_settings, k, self.NotDefined)
setattr(django_settings, k, v)
if k.startswith('SENTRY_'):
nk = k.split('SENTRY_', 1)[1]
self._orig_sentry[nk] = getattr(settings, nk, self.NotDefined)
setattr(settings, nk, v)
def __exit__(self, exc_type, exc_value, traceback):
for k, v in self._orig.iteritems():
if v is self.NotDefined:
delattr(django_settings, k)
else:
setattr(django_settings, k, v)
for k, v in self._orig_sentry.iteritems():
if v is self.NotDefined:
delattr(settings, k)
else:
setattr(settings, k, v)
class BaseTestCase(object):
urls = 'tests.sentry.web.urls'
Settings = Settings
def _pre_setup(self):
cache.clear()
ProjectOption.objects.clear_cache()
Option.objects.clear_cache()
super(BaseTestCase, self)._pre_setup()
def _postWithKey(self, data, key=None):
resp = self.client.post(reverse('sentry-api-store'), {
'data': base64.b64encode(pickle.dumps(data)),
'key': settings.KEY,
})
return resp
def _makeMessage(self, data, key=None):
ts = time.time()
message = base64.b64encode(json.dumps(data))
sig = get_signature(message, ts, key)
return ts, message, sig
def _postWithSignature(self, data, key=None):
ts, message, sig = self._makeMessage(data, key)
resp = self.client.post(reverse('sentry-api-store'), message,
content_type='application/octet-stream',
HTTP_AUTHORIZATION=get_auth_header(sig, ts, '_postWithSignature', key),
)
return resp
def _postWithNewSignature(self, data, key=None):
ts, message, sig = self._makeMessage(data, key)
resp = self.client.post(reverse('sentry-api-store'), message,
content_type='application/octet-stream',
HTTP_X_SENTRY_AUTH=get_auth_header(sig, ts, '_postWithSignature', key),
)
return resp
class TestCase(BaseTestCase, TestCase):
pass
class TransactionTestCase(BaseTestCase, TransactionTestCase):
"""
Subclass of ``django.test.TransactionTestCase`` that quickly tears down
fixtures and doesn't `flush` on setup. This enables tests to be run in
any order.
"""
urls = 'tests.urls'
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
self.client = getattr(self, 'client_class', Client)()
try:
self._pre_setup()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
try:
super(TransactionTestCase, self).__call__(result)
finally:
try:
self._post_teardown()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
def _get_databases(self):
if getattr(self, 'multi_db', False):
return connections
return [DEFAULT_DB_ALIAS]
def _fixture_setup(self):
for db in self._get_databases():
if hasattr(self, 'fixtures') and self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures, **{'verbosity': 0, 'database': db})
def _fixture_teardown(self):
for db in self._get_databases():
call_command('flush', verbosity=0, interactive=False, database=db)
|
simmetria/sentry
|
src/sentry/testutils.py
|
Python
|
bsd-3-clause
| 5,319
|
from flask import Flask, jsonify, request
from flask.ext.cors import CORS
import re
availability = {}
app = Flask(__name__)
CORS(app)
@app.route("/")
def hello_world():
return jsonify({"message": "Hello World!"})
@app.route("/availability")
def get_availability():
return jsonify(availability)
@app.route("/availability", methods=["POST"])
def record_availability():
global availability
json = request.get_json()
if not json:
return jsonify({"error": "not JSON"})
if len(json) != 1:
return jsonify({"error": "wrong len"})
safe = {}
for key in json:
value = json[key]
print(value, key)
key = re.sub('[<>*&#@$;:]', '', key)
value = re.sub('[<>*&#@;:]', '', value)
print(value, key)
if value == "Free" or value == "Busy" or value == "Putting out a fire" or value == "Running really fast":
safe[key] = value
availability.update(safe)
return jsonify(availability)
if __name__ == "__main__":
app.run(debug=True)
|
hacksu/Availability
|
AvailabilityBackend.py
|
Python
|
unlicense
| 1,039
|
"""Offer numeric state listening automation rules."""
import logging
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import (
CONF_ABOVE,
CONF_ATTRIBUTE,
CONF_BELOW,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import CALLBACK_TYPE, HassJob, callback
from homeassistant.helpers import condition, config_validation as cv, template
from homeassistant.helpers.event import (
async_track_same_state,
async_track_state_change_event,
)
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs
def validate_above_below(value):
"""Validate that above and below can co-exist."""
above = value.get(CONF_ABOVE)
below = value.get(CONF_BELOW)
if above is None or below is None:
return value
if isinstance(above, str) or isinstance(below, str):
return value
if above > below:
raise vol.Invalid(
f"A value can never be above {above} and below {below} at the same time. You probably want two different triggers.",
)
return value
TRIGGER_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_PLATFORM): "numeric_state",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_BELOW): cv.NUMERIC_STATE_THRESHOLD_SCHEMA,
vol.Optional(CONF_ABOVE): cv.NUMERIC_STATE_THRESHOLD_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_FOR): cv.positive_time_period_template,
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
validate_above_below,
)
_LOGGER = logging.getLogger(__name__)
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type="numeric_state"
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
entity_ids = config.get(CONF_ENTITY_ID)
below = config.get(CONF_BELOW)
above = config.get(CONF_ABOVE)
time_delta = config.get(CONF_FOR)
template.attach(hass, time_delta)
value_template = config.get(CONF_VALUE_TEMPLATE)
unsub_track_same = {}
armed_entities = set()
period: dict = {}
attribute = config.get(CONF_ATTRIBUTE)
job = HassJob(action)
_variables = {}
if automation_info:
_variables = automation_info.get("variables") or {}
if value_template is not None:
value_template.hass = hass
def variables(entity_id):
"""Return a dict with trigger variables."""
trigger_info = {
"trigger": {
"platform": "numeric_state",
"entity_id": entity_id,
"below": below,
"above": above,
"attribute": attribute,
}
}
return {**_variables, **trigger_info}
@callback
def check_numeric_state(entity_id, from_s, to_s):
"""Return whether the criteria are met, raise ConditionError if unknown."""
return condition.async_numeric_state(
hass, to_s, below, above, value_template, variables(entity_id), attribute
)
# Each entity that starts outside the range is already armed (ready to fire).
for entity_id in entity_ids:
try:
if not check_numeric_state(entity_id, None, entity_id):
armed_entities.add(entity_id)
except exceptions.ConditionError as ex:
_LOGGER.warning(
"Error initializing '%s' trigger: %s",
automation_info["name"],
ex,
)
@callback
def state_automation_listener(event):
"""Listen for state changes and calls action."""
entity_id = event.data.get("entity_id")
from_s = event.data.get("old_state")
to_s = event.data.get("new_state")
@callback
def call_action():
"""Call action with right context."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": platform_type,
"entity_id": entity_id,
"below": below,
"above": above,
"from_state": from_s,
"to_state": to_s,
"for": time_delta if not time_delta else period[entity_id],
"description": f"numeric state of {entity_id}",
}
},
to_s.context,
)
@callback
def check_numeric_state_no_raise(entity_id, from_s, to_s):
"""Return True if the criteria are now met, False otherwise."""
try:
return check_numeric_state(entity_id, from_s, to_s)
except exceptions.ConditionError:
# This is an internal same-state listener so we just drop the
# error. The same error will be reached and logged by the
# primary async_track_state_change_event() listener.
return False
try:
matching = check_numeric_state(entity_id, from_s, to_s)
except exceptions.ConditionError as ex:
_LOGGER.warning("Error in '%s' trigger: %s", automation_info["name"], ex)
return
if not matching:
armed_entities.add(entity_id)
elif entity_id in armed_entities:
armed_entities.discard(entity_id)
if time_delta:
try:
period[entity_id] = cv.positive_time_period(
template.render_complex(time_delta, variables(entity_id))
)
except (exceptions.TemplateError, vol.Invalid) as ex:
_LOGGER.error(
"Error rendering '%s' for template: %s",
automation_info["name"],
ex,
)
return
unsub_track_same[entity_id] = async_track_same_state(
hass,
period[entity_id],
call_action,
entity_ids=entity_id,
async_check_same_func=check_numeric_state_no_raise,
)
else:
call_action()
unsub = async_track_state_change_event(hass, entity_ids, state_automation_listener)
@callback
def async_remove():
"""Remove state listeners async."""
unsub()
for async_remove in unsub_track_same.values():
async_remove()
unsub_track_same.clear()
return async_remove
|
partofthething/home-assistant
|
homeassistant/components/homeassistant/triggers/numeric_state.py
|
Python
|
apache-2.0
| 6,771
|
# ENH some closed forms
import pathlib
from sympy import Rational as frac
from sympy import sqrt
from ...helpers import article
from .._helpers import C2Scheme, _read, register
from .._tyler import tyler_2 as dunavant_03
source = article(
authors=["D.A. Dunavant"],
title="Economical symmetrical quadrature rules for complete polynomials over a square domain",
journal="Numerical Methods in Engineering",
volume="21",
number="10",
month="oct",
year="1985",
pages="1777–1784",
url="https://doi.org/10.1002/nme.1620211004",
)
this_dir = pathlib.Path(__file__).resolve().parent
def dunavant_00():
d = {"zero2": [[1]]}
return C2Scheme("Dunavant 0", d, 1, source, 1.0e-100)
def dunavant_01():
d = {"d4_aa": [[frac(1, 4)], [sqrt(frac(1, 3))]]}
return C2Scheme("Dunavant 1", d, 3, source, 4.441e-16)
def dunavant_02():
d = {
"d4_a0": [[frac(10, 49)], [sqrt(frac(7, 15))]],
"d4_aa": [[frac(9, 196)], [sqrt(frac(7, 9))]],
}
return C2Scheme("Dunavant 2", d, 5, source, 1.305e-15)
def dunavant_04():
return _read(this_dir / "dunavant_04.json", source)
def dunavant_05():
return _read(this_dir / "dunavant_05.json", source)
def dunavant_06():
return _read(this_dir / "dunavant_06.json", source)
def dunavant_07():
return _read(this_dir / "dunavant_07.json", source)
def dunavant_08():
return _read(this_dir / "dunavant_08.json", source)
def dunavant_09():
return _read(this_dir / "dunavant_09.json", source)
def dunavant_10():
return _read(this_dir / "dunavant_10.json", source)
register(
[
dunavant_00,
dunavant_01,
dunavant_02,
dunavant_03,
dunavant_04,
dunavant_05,
dunavant_06,
dunavant_07,
dunavant_08,
dunavant_09,
dunavant_10,
]
)
|
nschloe/quadpy
|
src/quadpy/c2/_dunavant/__init__.py
|
Python
|
mit
| 1,858
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from robot.errors import (DataError, ExecutionFailed, ExecutionPassed,
PassExecution, ReturnFromKeyword,
UserKeywordExecutionFailed)
from robot.variables import is_list_var
from robot.output import LOGGER
from robot import utils
from .arguments import (ArgumentMapper, ArgumentResolver,
EmbeddedArguments, UserKeywordArgumentParser)
from .handlerstore import HandlerStore
from .keywordrunner import KeywordRunner
from .timeouts import KeywordTimeout
from .usererrorhandler import UserErrorHandler
class UserLibrary(object):
def __init__(self, user_keywords, path=None):
basename = os.path.basename(path) if path else None
self.name = os.path.splitext(basename)[0] if path else None
self.handlers = HandlerStore(basename)
for kw in user_keywords:
try:
handler, embedded = self._create_handler(kw)
self._validate_not_duplicate(handler)
except DataError as err:
LOGGER.error("Creating user keyword '%s' failed: %s"
% (kw.name, unicode(err)))
handler = UserErrorHandler(kw.name, unicode(err))
embedded = False
self.handlers.add(handler, embedded)
def _create_handler(self, kw):
embedded = EmbeddedArguments(kw.name)
if embedded:
if kw.args:
raise DataError('Keyword cannot have both normal and embedded '
'arguments.')
return EmbeddedArgsTemplate(kw, self.name, embedded), True
return UserKeywordHandler(kw, self.name), False
def _validate_not_duplicate(self, handler):
if handler.name in self.handlers:
self.handlers.remove(handler.name)
raise DataError('Keyword with same name defined multiple times.')
class UserKeywordHandler(object):
type = 'user'
def __init__(self, keyword, libname):
self.name = keyword.name
self.keywords = keyword.keywords.normal
self.return_value = tuple(keyword.return_)
self.teardown = keyword.keywords.teardown
self.libname = libname
self.doc = self._doc = unicode(keyword.doc)
self.tags = self._tags = keyword.tags
self.arguments = UserKeywordArgumentParser().parse(tuple(keyword.args),
self.longname)
self._timeout = keyword.timeout
@property
def longname(self):
return '%s.%s' % (self.libname, self.name) if self.libname else self.name
@property
def shortdoc(self):
return self.doc.splitlines()[0] if self.doc else ''
def init_keyword(self, variables):
# TODO: Should use runner and not change internal state like this.
# Timeouts should also be cleaned up in general.
doc = variables.replace_string(self._doc, ignore_errors=True)
doc, tags = utils.split_tags_from_doc(doc)
self.doc = doc
self.tags = [variables.replace_string(tag, ignore_errors=True)
for tag in self._tags] + tags
if self._timeout:
self.timeout = KeywordTimeout(self._timeout.value,
self._timeout.message,
variables)
else:
self.timeout = KeywordTimeout()
def run(self, context, arguments):
arguments = self._resolve_arguments(context, arguments)
with context.user_keyword(self):
args, kwargs = self._map_arguments(context, arguments)
if context.dry_run:
return self._dry_run(context, args, kwargs)
return self._normal_run(context, args, kwargs)
def _resolve_arguments(self, context, arguments):
variables = context.variables if not context.dry_run else None
resolver = ArgumentResolver(self.arguments)
return resolver.resolve(arguments, variables)
def _map_arguments(self, context, arguments):
positional, named = arguments
variables = context.variables if not context.dry_run else None
mapper = ArgumentMapper(self.arguments)
return mapper.map(positional, named, variables)
def _dry_run(self, context, args, kwargs):
error, return_ = self._execute(context, args, kwargs)
if error:
raise error
return None
def _normal_run(self, context, args, kwargs):
error, return_ = self._execute(context, args, kwargs)
if error and not error.can_continue(context.in_teardown):
raise error
return_value = self._get_return_value(context.variables, return_)
if error:
error.return_value = return_value
raise error
return return_value
def _execute(self, context, positional, kwargs):
self._set_variables(positional, kwargs, context.variables)
context.output.trace(lambda: self._log_args(context.variables))
self._verify_keyword_is_valid()
self.timeout.start()
error = return_ = pass_ = None
runner = KeywordRunner(context)
try:
runner.run_keywords(self.keywords)
except ReturnFromKeyword as exception:
return_ = exception
error = exception.earlier_failures
except ExecutionPassed as exception:
pass_ = exception
error = exception.earlier_failures
except ExecutionFailed as exception:
error = exception
with context.keyword_teardown(error):
td_error = self._run_teardown(context)
if error or td_error:
error = UserKeywordExecutionFailed(error, td_error)
return error or pass_, return_
def _set_variables(self, positional, kwargs, variables):
before_varargs, varargs = self._split_args_and_varargs(positional)
for name, value in zip(self.arguments.positional, before_varargs):
variables['${%s}' % name] = value
if self.arguments.varargs:
variables['@{%s}' % self.arguments.varargs] = varargs
if self.arguments.kwargs:
variables['&{%s}' % self.arguments.kwargs] = kwargs
def _split_args_and_varargs(self, args):
if not self.arguments.varargs:
return args, []
positional = len(self.arguments.positional)
return args[:positional], args[positional:]
def _log_args(self, variables):
args = ['${%s}' % arg for arg in self.arguments.positional]
if self.arguments.varargs:
args.append('@{%s}' % self.arguments.varargs)
if self.arguments.kwargs:
args.append('&{%s}' % self.arguments.kwargs)
args = ['%s=%s' % (name, utils.prepr(variables[name]))
for name in args]
return 'Arguments: [ %s ]' % ' | '.join(args)
def _run_teardown(self, context):
if not self.teardown:
return None
try:
name = context.variables.replace_string(self.teardown.name)
except DataError as err:
return ExecutionFailed(unicode(err), syntax=True)
if name.upper() in ('', 'NONE'):
return None
runner = KeywordRunner(context)
try:
runner.run_keyword(self.teardown, name)
except PassExecution:
return None
except ExecutionFailed as err:
return err
return None
def _verify_keyword_is_valid(self):
if not (self.keywords or self.return_value):
raise DataError("User keyword '%s' contains no keywords."
% self.name)
def _get_return_value(self, variables, return_):
ret = self.return_value if not return_ else return_.return_value
if not ret:
return None
contains_list_var = any(is_list_var(item) for item in ret)
try:
ret = variables.replace_list(ret)
except DataError as err:
raise DataError('Replacing variables from keyword return value '
'failed: %s' % unicode(err))
if len(ret) != 1 or contains_list_var:
return ret
return ret[0]
class EmbeddedArgsTemplate(UserKeywordHandler):
def __init__(self, keyword, libname, embedded):
UserKeywordHandler.__init__(self, keyword, libname)
self.keyword = keyword
self.embedded_name = embedded.name
self.embedded_args = embedded.args
def matches(self, name):
return self.embedded_name.match(name) is not None
def create(self, name):
return EmbeddedArgs(name, self)
class EmbeddedArgs(UserKeywordHandler):
def __init__(self, name, template):
match = template.embedded_name.match(name)
if not match:
raise ValueError('Does not match given name')
UserKeywordHandler.__init__(self, template.keyword, template.libname)
self.embedded_args = zip(template.embedded_args, match.groups())
self.name = name
self.orig_name = template.name
def _resolve_arguments(self, context, arguments):
variables = context.variables if not context.dry_run else None
# Validates that no arguments given.
ArgumentResolver(self.arguments).resolve(arguments, variables)
if not variables:
return self.embedded_args
return [(n, variables.replace_scalar(v)) for n, v in self.embedded_args]
def _map_arguments(self, context, arguments):
if not context.dry_run:
for name, value in arguments:
context.variables['${%s}' % name] = value
return [], {}
|
caio2k/RIDE
|
src/robotide/lib/robot/running/userkeyword.py
|
Python
|
apache-2.0
| 10,334
|
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that mock environment has same shape outputs as true environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from pysc2.env import mock_sc2_env
from pysc2.env import sc2_env
class TestCompareEnvironments(absltest.TestCase):
@classmethod
def setUpClass(cls):
super(TestCompareEnvironments, cls).setUpClass()
players = [
sc2_env.Agent(race=sc2_env.Race.terran),
sc2_env.Agent(race=sc2_env.Race.protoss),
]
kwargs = {
'map_name': 'Flat64',
'players': players,
'agent_interface_format': [
sc2_env.AgentInterfaceFormat(
feature_dimensions=sc2_env.Dimensions(
screen=(32, 64),
minimap=(8, 16)
),
rgb_dimensions=sc2_env.Dimensions(
screen=(31, 63),
minimap=(7, 15)
),
action_space=sc2_env.ActionSpace.FEATURES
),
sc2_env.AgentInterfaceFormat(
rgb_dimensions=sc2_env.Dimensions(screen=64, minimap=32)
)
]
}
cls._env = sc2_env.SC2Env(**kwargs)
cls._mock_env = mock_sc2_env.SC2TestEnv(**kwargs)
@classmethod
def tearDownClass(cls):
super(TestCompareEnvironments, cls).tearDownClass()
cls._env.close()
cls._mock_env.close()
def test_observation_spec(self):
self.assertEqual(self._env.observation_spec(),
self._mock_env.observation_spec())
def test_action_spec(self):
self.assertEqual(self._env.action_spec(), self._mock_env.action_spec())
if __name__ == '__main__':
absltest.main()
|
deepmind/pysc2
|
pysc2/env/mock_sc2_env_comparison_test.py
|
Python
|
apache-2.0
| 2,364
|
#!/usr/bin/env python
import re
import glob
import sys
if len(sys.argv) != 3:
print('usage: python post.py directory versionNumber')
sys.exit(1)
path = str(sys.argv[1])
insertVersion = re.compile(r'VERSION_VERSION_VERSION')
insertSTLLink = re.compile(r'WWW_STL_DOCU')
# tested with doxygen 1.7.5.1
hasAnchorDetails = re.compile(r'<a (class="anchor" |)name="_?details"( id="details"|)>')
detailsHeading1 = re.compile(r'''<a name="_details"></a><h2>Detailed Description</h2>
<h3>''')
detailsHeading2 = re.compile(r'<a name="_details"></a><h2>Detailed Description</h2>')
# tested with doxygen 1.7.5.1 and 1.8.2
detailsHeading3 = re.compile(r'<a name="details" id="details"></a><h2( class="groupheader"|)>Detailed Description</h2>')
mainHeading1 = re.compile(r'''(<!-- Generated by Doxygen \d+\.\d+\.\d+ -->)
( <div class="navpath">.*
</div>
|)<div class="contents">
<h1>(.*)( Class Template Reference| Struct Template Reference| Class Reference| Struct Reference)(<br>
<small>
.*</small>
|)</h1>''')
mainHeading2 = re.compile(r'''(<!-- Generated by Doxygen \d+\.\d+\.\d+ -->)
( <div class="navpath">.*
</div>
<div class="contents">
|<div class="contents">
|)<h1>(.*)()(<br>
<small>
.*</small>
|)</h1>''')
# tested with doxygen 1.5.6
mainHeading3 = re.compile(r'''(<!-- Generated by Doxygen \d+\.\d+\.\d+ -->)
(<div class="header">
<div class="headertitle">
)<h1>(.*)</h1>(.*)()
</div>
<div class="contents">''')
# tested with doxygen 1.7.5.1 and 1.7.6.1
mainHeading4 = re.compile(r'''(<!-- Generated by Doxygen .+ -->
</div>)
(<div class="header">
<div class="headertitle">
)<div class="title">(.*)</div> </div>(.*)()
</div>(?:<!--header-->)?
<div class="contents">''')
# tested with doxygen 1.8.2
mainHeading5 = re.compile(r'''(<!-- Generated by Doxygen .+ -->
</div><!-- top -->)
(<div class="header">
<div class="headertitle">
)<div class="title">(.*)</div> </div>(.*)()
</div>(?:<!--header-->)?
<div class="contents">''')
mainHeadingReplacement = '''\\1
<div class="contents">
<table class="main_heading">
<tr>
%s<td width="100%%">\\3\\5
</td>
<td align=right><a href="http://hci.iwr.uni-heidelberg.de/vigra/"><IMG border=0 ALT="VIGRA" SRC="documents/vigra.gif" title="VIGRA Homepage"></a></td></tr>
</table><p>
'''
# tested with doxygen 1.7.5.1
headingSummary = re.compile(r'''(<!-- Generated by Doxygen .+ -->
</div>
<div class="header">)
<div class="summary">
(?s).*?</div>''')
# tested with doxygen 1.8.2
headingSummary2 = re.compile(r'''(<!-- Generated by Doxygen .+ -->
</div><!-- top -->
<div class="header">)
<div class="summary">
(?s).*?</div>''')
# tested with doxygen 1.7.5.1
headingNavpath = re.compile(r'''(<!-- Generated by Doxygen .+ -->)
<div id="nav-path" class="navpath">(?s).*?</div>''')
# tested with doxygen 1.8.2
headingNavpath2 = re.compile(r'''(<!-- Generated by Doxygen .+ -->)
<div id="nav-path" class="navpath">
<ul>
.* </ul>
</div>''')
detailsLink = '''<td align=left>
<A HREF ="#_details" ><IMG BORDER=0 ALT="details" title="Detailed Description" SRC="documents/pfeilGross.gif"></A>
</td>
'''
indexPageHeading = re.compile(r'''((?:<p>)?<a class="anchor" (?:name|id)="_details"></a> (?:</p>\n)?<center> </center>)<h2>(<a class="anchor" (?:name|id)="Main">(?:</a>)?)
(.*)
<center> Version''')
indexPageHeadingReplacement = '''\\1 <h2 class="details_section">\\2
\\3
<center> Version'''
templateDeclaration = re.compile('''<tr><td class="memTemplParams" nowrap colspan="2">([^<]*)</td></tr>\s*
<tr><td class="memTemplItemLeft" nowrap align="right" valign="top">[^<]*</td><td class="memTemplItemRight" valign="bottom"><a class="el" href=".*#([^"]+)">''')
templateDocumentation = '''(<a class="anchor" name="%s"></a>.*
<div class="memitem">
<div class="memproto">\s*
<table class="memname">
<tr>)'''
templateDocumentationReplacement = '''\\1
<td colspan="4" class="memtemplate">%s</td></tr><tr>'''
def convertHeadings(text):
if hasAnchorDetails.search(text):
text = detailsHeading1.sub('<a name="_details"></a><h2 class="details_section">Detailed Description</h2>\n<h3 class="details_section">', \
text, 1)
text = detailsHeading2.sub(r'<a name="_details"></a><h2 class="details_section">Detailed Description</h2>', text, 1)
text = detailsHeading3.sub(r'<a name="_details" id="details"></a><h2 class="details_section">Detailed Description</h2>', text, 1)
mhr = mainHeadingReplacement % detailsLink
else:
mhr = mainHeadingReplacement % ''
text = headingNavpath.sub("\\1", text, 1)
text = headingNavpath2.sub("\\1", text, 1)
text = headingSummary.sub("\\1", text, 1)
text = headingSummary2.sub("\\1", text, 1)
text = mainHeading1.sub(mhr, text, 1)
text = mainHeading2.sub(mhr, text, 1)
text = mainHeading3.sub(mhr, text, 1)
text = mainHeading4.sub(mhr, text, 1)
text = mainHeading5.sub(mhr, text, 1)
return text
def insertMissingTemplateDeclarations(text):
matches = templateDeclaration.findall(text)
for k in matches:
text = re.sub(templateDocumentation % k[1], templateDocumentationReplacement % k[0], text)
return text
def processFile(fileName):
print(fileName) # log message
if sys.version_info[0] < 3:
f = open(fileName)
else:
f = open(fileName,encoding = "ISO-8859-1")
text = f.read()
f.close()
text = insertVersion.sub(sys.argv[2], text)
text = insertSTLLink.sub(r'http://www.sgi.com/tech/stl/', text)
if re.search('.*/index.html', fileName) or re.search('.*\\index.html', fileName):
text = re.sub(r'<h3 (align="center"|class="version")>\d+\.\d+\.\d+ </h3>', '', text)
text = indexPageHeading.sub(indexPageHeadingReplacement, text)
text = convertHeadings(text)
text = insertMissingTemplateDeclarations(text)
f = open(fileName, 'w+')
f.write(text)
f.close()
files = glob.glob(path + '/*.html') # use given path to files
#files = glob.glob(path + '/index.html')
for file in files:
processFile(file)
|
dstoe/vigra
|
docsrc/post.py
|
Python
|
mit
| 6,047
|
# Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""Script for updating tensorflow/tools/compatibility/renames_v2.py.
To update renames_v2.py, run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
pyformat --in_place third_party/tensorflow/tools/compatibility/renames_v2.py
"""
# pylint: enable=line-too-long
import sys
import six
import tensorflow as tf
from tensorflow import python as tf_python # pylint: disable=unused-import
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import app
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_export
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.compatibility import all_renames_v2
# This import is needed so that TensorFlow python modules are in sys.modules.
_OUTPUT_FILE_PATH = 'third_party/tensorflow/tools/compatibility/renames_v2.py'
_FILE_HEADER = """# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
\"\"\"List of renames to apply when converting from TF 1.0 to TF 2.0.
THIS FILE IS AUTOGENERATED: To update, please run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
pyformat --in_place third_party/tensorflow/tools/compatibility/renames_v2.py
This file should be updated whenever endpoints are deprecated.
\"\"\"
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""
def get_canonical_name(v2_names, v1_name):
if v2_names:
return v2_names[0]
return 'compat.v1.%s' % v1_name
def get_all_v2_names():
"""Get a set of function/class names available in TensorFlow 2.0."""
v2_names = set() # All op names in TensorFlow 2.0
def visit(unused_path, unused_parent, children):
"""Visitor that collects TF 2.0 names."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v2 = tf_export.get_v2_names(attr)
for name in api_names_v2:
v2_names.add(name)
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
visitor.do_not_descend_map['tf.compat'] = ['v1']
traverse.traverse(tf.compat.v2, visitor)
return v2_names
def collect_constant_renames():
"""Looks for constants that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
renames = set()
for module in sys.modules.values():
constants_v1_list = tf_export.get_v1_constants(module)
constants_v2_list = tf_export.get_v2_constants(module)
# _tf_api_constants attribute contains a list of tuples:
# (api_names_list, constant_name)
# We want to find API names that are in V1 but not in V2 for the same
# constant_names.
# First, we convert constants_v1_list and constants_v2_list to
# dictionaries for easier lookup.
constants_v1 = {constant_name: api_names
for api_names, constant_name in constants_v1_list}
constants_v2 = {constant_name: api_names
for api_names, constant_name in constants_v2_list}
# Second, we look for names that are in V1 but not in V2.
for constant_name, api_names_v1 in constants_v1.items():
api_names_v2 = constants_v2[constant_name]
for name in api_names_v1:
if name not in api_names_v2:
renames.add((name, get_canonical_name(api_names_v2, name)))
return renames
def collect_function_renames():
"""Looks for functions/classes that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
# Set of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
renames = set()
def visit(unused_path, unused_parent, children):
"""Visitor that collects rename strings to add to rename_line_set."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v1 = tf_export.get_v1_names(attr)
api_names_v2 = tf_export.get_v2_names(attr)
deprecated_api_names = set(api_names_v1) - set(api_names_v2)
for name in deprecated_api_names:
renames.add((name, get_canonical_name(api_names_v2, name)))
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
visitor.do_not_descend_map['tf.compat'] = ['v1', 'v2']
traverse.traverse(tf, visitor)
# It is possible that a different function is exported with the
# same name. For e.g. when creating a different function to
# rename arguments. Exclude it from renames in this case.
v2_names = get_all_v2_names()
renames = set((name, new_name) for name, new_name in renames
if name not in v2_names)
return renames
def get_rename_line(name, canonical_name):
return ' \'tf.%s\': \'tf.%s\'' % (name, canonical_name)
def update_renames_v2(output_file_path):
"""Writes a Python dictionary mapping deprecated to canonical API names.
Args:
output_file_path: File path to write output to. Any existing contents
would be replaced.
"""
function_renames = collect_function_renames()
constant_renames = collect_constant_renames()
all_renames = function_renames.union(constant_renames)
manual_renames = set(
all_renames_v2.manual_symbol_renames.keys())
# List of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
rename_lines = [
get_rename_line(name, canonical_name)
for name, canonical_name in all_renames
if 'tf.' + six.ensure_str(name) not in manual_renames
]
renames_file_text = '%srenames = {\n%s\n}\n' % (
_FILE_HEADER, ',\n'.join(sorted(rename_lines)))
file_io.write_string_to_file(output_file_path, renames_file_text)
def main(unused_argv):
update_renames_v2(_OUTPUT_FILE_PATH)
if __name__ == '__main__':
app.run(main=main)
|
karllessard/tensorflow
|
tensorflow/tools/compatibility/update/generate_v2_renames_map.py
|
Python
|
apache-2.0
| 7,410
|
from gi.repository import GObject
from gi.repository import GLib
from gi.repository import Replay
import json
class CausewayFileLoader(Replay.FileLoader):
__gtype_name__ = 'CausewayFileLoader'
def __init__(self):
Replay.FileLoader.__init__(self, name='Causeway Traces', pattern='*.log')
def validate_event(self, event):
for (field, typ) in { 'class': list,
'anchor': dict,
'timestamp': int }.iteritems():
if not field in event:
raise Exception('Invalid Causeway log - missing required field "%s"' % field)
if type(event[field]) != typ:
raise Exception('Invalid Causeway log - required field "%s" must be of type "%s"' % (field, typ))
return True
def process_events(self, events):
# order events based on their timestamp
vats = {}
t = None
if type(events) != list:
raise Exception('Invalid Causeway log - expected a list of events')
# check each event contains the required fields with the
# required types for each field
for event in events:
self.validate_event(event)
# now process each event
for event in sorted(events, lambda event: event['timestamp']):
# get 0 timestamp
if not t:
t = event['timestamp']
if ('org.ref_send.log.Sent' in event['class'] or
'org.ref_send.log.Got' in event['class']):
# the vat which sent this message - strip everything
# preceeding the /-/ and strip any trailing /
loop = event['anchor']['turn']['loop']
name = loop.split('/-/')[1].strip('/')
if not name in vats:
vats[name] = { 'name': name, 'objects': {} }
vat = vats[name]
# the object sending this message is the first calls
# within trace - strip the method name though
name = event['trace']['calls'][0]['name']
name = '.'.join(name.split('.')[:-1])
source = event['trace']['calls'][0]['source']
if not name in vat['objects']:
vat['objects'][name] = { 'name': name, 'source': source }
if 'org.ref_send.log.Sent' in event['class']:
sends.append(event)
else:
recvs.append(event)
elif 'org.ref_send.log.Resolved' in event['class']:
# process promise resolution
pass
elif 'org.ref_send.log.Comment' in event['class']:
# process comment
pass
else:
raise Exception('Invalid Causeway log - Invalid event class "%s" - must be one of Sent, Got or Comment' % str(event['class']))
# create all objects in all vats
for (vat_name, vat) in vats.iteritems():
# TODO: color vats differently
props = GLib.Variant('a{sv}', {'color': GLib.Variant('s', '#999'),
'vat': GLib.Variant('s', vat_name)})
for (obj_name, obj) in vat['objects'].iteritems():
self.emit_event(Replay.NodeCreateEvent.new(timestamp=t, source=obj['source'], id=obj_name,
props=props))
# send and recv all messages
for event in sorted(events, lambda event: event['timestamp']):
if 'org.ref_send.log.Sent' in event['class']:
# message send event
pass
elif 'org.ref_send.log.Got' in event['class']:
# message recv event
pass
elif 'org.ref_send.log.Comment' in event['class']:
# message send event
pass
def do_load_file(self, file):
# just use standard python file loading as is easier
try:
path = file.get_path()
f = open(path, 'r')
data = f.read()
try:
events = json.loads(data)
try:
self.process_events(events)
# say we've finished successfully
self.emit_progress(1.0)
except Exception as e:
print e
self.emit_error(str(e))
except Exception as e:
print e
self.emit_error('Invalid JSON format: %s' % str(e))
except Exception as e:
self.emit_error('Error reading Causeway log %s: %s' % (path, str(e)))
class CausewayPlugin(GObject.Object, Replay.WindowActivatable):
__gtype_name__ = 'CausewayPlugin'
window = GObject.property(type=Replay.Window)
def do_activate(self):
self._file_loader = CausewayFileLoader()
self.window.add_file_loader(self._file_loader)
def do_deactivate(self):
self.window.remove_file_loader(self._file_loader)
self._file_loader = None
|
alexmurray/replay
|
plugins/causeway/causeway.py
|
Python
|
gpl-3.0
| 5,091
|
# -*- coding: utf-8 -*-
"""
A Trait Documenter
(Subclassed from the autodoc ClassLevelDocumenter)
:copyright: Copyright 2012 by Enthought, Inc
"""
import traceback
import sys
import inspect
import tokenize
import token
import StringIO
from sphinx.ext.autodoc import ClassLevelDocumenter
from ..trait_handlers import TraitType
from ..has_traits import MetaHasTraits
def _is_class_trait(name, cls):
""" Check if the name is in the list of class defined traits of ``cls``.
"""
return isinstance(cls, MetaHasTraits) and name in cls.__class_traits__
class TraitDocumenter(ClassLevelDocumenter):
""" Specialized Documenter subclass for trait attributes.
The class defines a new documenter that recovers the trait definition
signature of module level and class level traits.
To use the documenter, append the module path in the extension
attribute of the `conf.py`.
.. warning::
Using the TraitDocumenter in conjunction with TraitsDoc is not
advised.
"""
### ClassLevelDocumenter interface #####################################
objtype = 'traitattribute'
directivetype = 'attribute'
member_order = 60
# must be higher than other attribute documenters
priority = 12
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
""" Check that the documented member is a trait instance.
"""
check = (isattr and issubclass(type(member), TraitType) or
_is_class_trait(membername, parent.object))
return check
def document_members(self, all_members=False):
""" Trait attributes have no members """
pass
def add_content(self, more_content, no_docstring=False):
""" Never try to get a docstring from the trait."""
ClassLevelDocumenter.add_content(self, more_content,
no_docstring=True)
def import_object(self):
""" Get the Trait object.
Notes
-----
Code adapted from autodoc.Documenter.import_object.
"""
try:
__import__(self.modname)
current = self.module = sys.modules[self.modname]
for part in self.objpath[:-1]:
current = self.get_attr(current, part)
name = self.objpath[-1]
self.object_name = name
self.object = None
self.parent = current
return True
# this used to only catch SyntaxError, ImportError and
# AttributeError, but importing modules with side effects can raise
# all kinds of errors.
except Exception, err:
if self.env.app and not self.env.app.quiet:
self.env.app.info(traceback.format_exc().rstrip())
msg = ('autodoc can\'t import/find {0} {r1}, it reported error: '
'"{2}", please check your spelling and sys.path')
self.directive.warn(msg.format(self.objtype, str(self.fullname),
err))
self.env.note_reread()
return False
def add_directive_header(self, sig):
""" Add the directive header 'attribute' with the annotation
option set to the trait definition.
"""
ClassLevelDocumenter.add_directive_header(self, sig)
definition = self._get_trait_definition()
self.add_line(u' :annotation: = {0}'.format(definition),
'<autodoc>')
### Private Interface #####################################################
def _get_trait_definition(self):
""" Retrieve the Trait attribute definition
"""
# Get the class source and tokenize it.
source = inspect.getsource(self.parent)
string_io = StringIO.StringIO(source)
tokens = tokenize.generate_tokens(string_io.readline)
# find the trait definition start
trait_found = False
name_found = False
while not trait_found:
item = tokens.next()
if name_found and item[:2] == (token.OP, '='):
trait_found = True
continue
if item[:2] == (token.NAME, self.object_name):
name_found = True
# Retrieve the trait definition.
definition_tokens = []
for type, name, start, stop, line in tokens:
if type == token.NEWLINE:
break
item = (type, name, (0, start[1]), (0, stop[1]), line)
definition_tokens.append(item)
return tokenize.untokenize(definition_tokens).strip()
def setup(app):
""" Add the TraitDocumenter in the current sphinx autodoc instance. """
app.add_autodocumenter(TraitDocumenter)
|
HyperloopTeam/FullOpenMDAO
|
lib/python2.7/site-packages/traits-4.3.0-py2.7-macosx-10.10-x86_64.egg/traits/util/trait_documenter.py
|
Python
|
gpl-2.0
| 4,801
|
#!/usr/bin/env python
from SettingsWidgets import *
from gi.repository import Gio
class Module:
def __init__(self, content_box):
keywords = _("desktop, home, button, trash")
advanced = False
sidePage = SidePage(_("Desktop"), "desktop.svg", keywords, advanced, content_box)
self.sidePage = sidePage
self.name = "desktop"
self.category = "prefs"
def _loadCheck(self):
if 'org.nemo' in Gio.Settings.list_schemas():
nemo_desktop_schema = Gio.Settings.new("org.nemo.desktop")
nemo_desktop_keys = nemo_desktop_schema.list_keys()
if "computer-icon-visible" in nemo_desktop_keys:
self.sidePage.add_widget(GSettingsCheckButton(_("Show a computer icon"), "org.nemo.desktop", "computer-icon-visible", None))
if "home-icon-visible" in nemo_desktop_keys:
self.sidePage.add_widget(GSettingsCheckButton(_("Show a home icon"), "org.nemo.desktop", "home-icon-visible", None))
if "trash-icon-visible" in nemo_desktop_keys:
self.sidePage.add_widget(GSettingsCheckButton(_("Show the trash"), "org.nemo.desktop", "trash-icon-visible", None))
if "volumes-visible" in nemo_desktop_keys:
self.sidePage.add_widget(GSettingsCheckButton(_("Show mounted volumes"), "org.nemo.desktop", "volumes-visible", None))
if "network-icon-visible" in nemo_desktop_keys:
self.sidePage.add_widget(GSettingsCheckButton(_("Show network servers"), "org.nemo.desktop", "network-icon-visible", None))
return True
else:
return False
|
chitwanix/Sagarmatha
|
files/usr/lib/sagarmatha-settings/modules/cs_desktop.py
|
Python
|
gpl-2.0
| 1,650
|
from oauth2app.models import *
def clientAuthorizations(client, user):
authorizations = AccessToken.objects.filter(client=client, user=user).all()
ass = []
for a in authorizations:
ass += [k.key for k in a.scope.all()]
return ass
def getClients(type):
try: clients = Client.objects.filter(type=ClientType.objects.get(type=type))
except ClientType.DoesNotExist: return []
return clients
def getTokenForUser(client, user, scope):
token = AccessToken.objects.filter(client=client, user=user, scope=AccessRange.objects.get(key=scope)).order_by('-issue')
try: t = token[0].token
except IndexError: t = {'error':'does not exist'}
except AccessToken.DoesNotExist: t = {'error':'does not exist'}
except Scope.DoesNotExist: t = {'error':'does not exist'}
return t
def getEndpoint(resource, client):
return client.api_uri+resource+'/'
|
RaduGatej/SensibleData-Platform
|
sensible_data_platform/oauth2_authorization_server/manager.py
|
Python
|
mit
| 847
|
import os
import sys
import time
import atexit
import datetime
import argparse
CUR_DIR = os.path.dirname(os.path.realpath(__file__))
# If you have more than 8 stations, change this value.
NUMBER_OF_STATIONS = 8
# The longest a station can run for by default is 30 minutes. This allows
# the operating system a way to look for stations that have been running too
# long because of a software problem. Change this as needed.
MAX_MINUTES_PER_STATION = 30
try:
import RPi.GPIO as GPIO
except ImportError:
# If you aren't running this on a Pi, you won't have
# the GPIO avaialble, so there is a file in utilities that
# stubs out the necessary values.
import utilities.gpio_dev as GPIO
class OpenSprinkler():
### Low-Level Hardware Stuff. Don't mess with these. ###
def _enable_shift_register_output(self):
"""
Low-level function to enable shift register output. Don't call this
yourself unless you know why you are doing it.
"""
GPIO.output(self.PIN_SR_NOE, False)
def _disable_shift_register_output(self):
"""
Low-level function to disable shift register output. Don't call this
yourself unless you know why you are doing it.
"""
GPIO.output(self.PIN_SR_NOE, True)
def _set_shift_registers(self, new_values):
"""
This is the low-level function that is called to set the shift registers.
I don't pretent do understand the inner workings here, but it works. Don't
use this to turn on/off stations, use set_station_status() as the
higher-level interface.
"""
GPIO.output(self.PIN_SR_CLK, False)
GPIO.output(self.PIN_SR_LAT, False)
for s in range(0, self.number_of_stations):
GPIO.output(self.PIN_SR_CLK, False)
GPIO.output(self.PIN_SR_DAT, new_values[self.number_of_stations-1-s])
GPIO.output(self.PIN_SR_CLK, True)
GPIO.output(self.PIN_SR_LAT, True)
def _initialize_hardware(self):
"""
This contains the low-level stuff required to make the GPIO operations work. Someone
smarter than me wrote this stuff, I just smile and nod.
"""
self.PIN_SR_CLK = 4
self.PIN_SR_NOE = 17
self.PIN_SR_LAT = 22
self.PIN_SR_DAT = 21
# The 2nd revision of the RPI has a different pin value
if GPIO.RPI_REVISION == 2:
self.PIN_SR_DAT = 27
# Not sure why this is called, but it was in the original script.
GPIO.cleanup()
# setup GPIO pins to interface with shift register. Don't muck with this
# stuff unless you know why you are doing it.
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.PIN_SR_CLK, GPIO.OUT)
GPIO.setup(self.PIN_SR_NOE, GPIO.OUT)
self._disable_shift_register_output()
GPIO.setup(self.PIN_SR_DAT, GPIO.OUT)
GPIO.setup(self.PIN_SR_LAT, GPIO.OUT)
self._set_shift_registers(self.station_values)
self._enable_shift_register_output()
def cleanup(self):
"""
This runs at the termination of the file, turning off all stations, making
sure that any PID files are removed, and running GPIO cleanup.
"""
self.log("Running Cleanup.")
self.reset_all_stations()
self.remove_status_file()
GPIO.cleanup()
### Convenience methods for filesystem operations. You don't need to call these
### manually, they are handled by the higher-level operations.
def create_status_file(self, station_number):
"""
Writes a PID file to the directory to indicate what the PID of the
current program is and what zone is being operated.
"""
file_path = os.path.join(CUR_DIR, '%s.pid' % self.pid)
f = open(file_path, 'w')
f.write("%d" % station_number)
f.close()
def remove_status_file(self):
"""
Handles removal of the PID file.
"""
file_path = os.path.join(CUR_DIR, '%s.pid' % self.pid)
if os.path.exists(file_path):
os.remove(file_path)
def check_for_delay(self):
if os.path.exists(os.path.join(CUR_DIR, 'DELAY')):
self.log("Found DELAY file.")
return True
else:
return False
### Logging functionality ###
def log(self, message):
"""
A convenience method for writing operations to a log file. If debugging
is enabled, the message is output to the console.
"""
file_path = os.path.join(CUR_DIR, 'log.txt')
f = open(file_path, 'a')
now_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
msg = '%s\t%s\t%s\n' % (now_time, self.pid, message)
f.write(msg)
if self.debug:
print msg
### Higher-Level Interface. These are the functions you want to call
def operate_station(self, station_number, minutes, queue=None, callback_function=None):
"""
This is the method that operates a station. Running it causes any
currently-running stations to turn off, then a pid file is created that
lets the system know that there is a process running. When it completes,
ALL stations are turned off and the file is cleaned up.
"""
self.log("Operating station %d for %d minutes." % (station_number, minutes))
print "DEBUG: %s" % queue
# First, set all stations to zero
station_values = [0] * self.number_of_stations
# Next, enable just the station to run (adjusting for 0-based index)
station_values[station_number-1] = 1
# Send the command
self._set_shift_registers(station_values)
# Create a filesystem flag to indicate that the system is running
self.create_status_file(station_number)
# After the number of minutes have passed, turn it off
time_to_stop = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
while True:
# If the queue is not empty, it's because a message was passed from the
# parent thread.
if queue and not queue.empty():
self.remove_status_file()
self.reset_all_stations()
break
if datetime.datetime.now() < time_to_stop:
pass
else:
self.log("Finished operating station.")
# We don't know if a new job started while we were snoozing.
# If one did, we don't want to close all valves anymore.
# We need a way to check and see if this process is the most
# recent one.
self.remove_status_file()
self.reset_all_stations()
break
# If a callback function was passed, we call it now.
if callback_function:
callback_function(station_number)
def reset_all_stations(self):
"""
A convenience method for turning everything off.
"""
self.log("Turning Off All Stations.")
off_values = [0] * self.number_of_stations
self._set_shift_registers(off_values)
def __init__(self, debug=False, number_of_stations=8):
self.number_of_stations = number_of_stations
# If debug is true, we print log messages to console
self.debug = debug
# We need to save the PID of the current process.
self.pid = os.getpid()
# Initial values are zero (off) for all stations.
self.station_values = [0] * number_of_stations
# Get the hardware ready for operations
self._initialize_hardware()
if __name__ == "__main__":
# Parse command-line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--station', type=int, help='Station to run [1-8]', required=True)
parser.add_argument('--minutes', type=int, help='Number of minutes to run station.', required=True)
parser.add_argument('--debug', help='Output debugging information', required=False, default=False, action="store_true")
args = vars(parser.parse_args())
station_number = args['station']
number_minutes = args['minutes']
debug = args['debug']
# Make sure the station is within bounds
if station_number not in range(0, NUMBER_OF_STATIONS+1):
sys.exit("Station Number Must Be 1-8, use 0 if you want to turn everything off.")
# Make sure they aren't trying to run a station longer than what is allowed
if number_minutes > MAX_MINUTES_PER_STATION:
sys.exit("Maximum Minutes Allowed is %d." % MAX_MINUTES_PER_STATION)
sprinkler = OpenSprinkler(debug=debug, number_of_stations=NUMBER_OF_STATIONS)
# We register the cleanup method to make sure everything is
# properly closed out, even if an error occurs.
atexit.register(sprinkler.cleanup)
# If they pass station zero, we assume that to be an "all off" command.
if station_number > 0:
# Make sure there is no delay in effect
if not sprinkler.check_for_delay():
sprinkler.operate_station(station_number, number_minutes)
else:
# We don't actually do anything here, since the stations will automatically
# be reset when the file exits.
sprinkler.log('Received all off command.')
|
greencoder/ospi-cli
|
opensprinkler.py
|
Python
|
mit
| 9,457
|
import numpy as np
import copy
class Complex:
def __init__(self, dim, func, func_args=(), symmetry=False, bounds=None,
g_cons=None, g_args=()):
self.dim = dim
self.bounds = bounds
self.symmetry = symmetry # TODO: Define the functions to be used
# here in init to avoid if checks
self.gen = 0
self.perm_cycle = 0
# Every cell is stored in a list of its generation,
# e.g., the initial cell is stored in self.H[0]
# 1st get new cells are stored in self.H[1] etc.
# When a cell is subgenerated it is removed from this list
self.H = [] # Storage structure of cells
# Cache of all vertices
self.V = VertexCache(func, func_args, bounds, g_cons, g_args)
# Generate n-cube here:
self.n_cube(dim, symmetry=symmetry)
# TODO: Assign functions to a the complex instead
if symmetry:
self.generation_cycle = 1
# self.centroid = self.C0()[-1].x
# self.C0.centroid = self.centroid
else:
self.add_centroid()
self.H.append([])
self.H[0].append(self.C0)
self.hgr = self.C0.homology_group_rank()
self.hgrd = 0 # Complex group rank differential
# self.hgr = self.C0.hg_n
# Build initial graph
self.graph_map()
self.performance = []
self.performance.append(0)
self.performance.append(0)
def __call__(self):
return self.H
def n_cube(self, dim, symmetry=False, printout=False):
"""
Generate the simplicial triangulation of the N-D hypercube
containing 2**n vertices
"""
origin = list(np.zeros(dim, dtype=int))
self.origin = origin
supremum = list(np.ones(dim, dtype=int))
self.supremum = supremum
# tuple versions for indexing
origintuple = tuple(origin)
supremumtuple = tuple(supremum)
x_parents = [origintuple]
if symmetry:
self.C0 = Simplex(0, 0, 0, self.dim) # Initial cell object
self.C0.add_vertex(self.V[origintuple])
i_s = 0
self.perm_symmetry(i_s, x_parents, origin)
self.C0.add_vertex(self.V[supremumtuple])
else:
self.C0 = Cell(0, 0, origin, supremum) # Initial cell object
self.C0.add_vertex(self.V[origintuple])
self.C0.add_vertex(self.V[supremumtuple])
i_parents = []
self.perm(i_parents, x_parents, origin)
if printout:
print("Initial hyper cube:")
for v in self.C0():
v.print_out()
def perm(self, i_parents, x_parents, xi):
# TODO: Cut out of for if outside linear constraint cutting planes
xi_t = tuple(xi)
# Construct required iterator
iter_range = [x for x in range(self.dim) if x not in i_parents]
for i in iter_range:
i2_parents = copy.copy(i_parents)
i2_parents.append(i)
xi2 = copy.copy(xi)
xi2[i] = 1
# Make new vertex list a hashable tuple
xi2_t = tuple(xi2)
# Append to cell
self.C0.add_vertex(self.V[xi2_t])
# Connect neighbors and vice versa
# Parent point
self.V[xi2_t].connect(self.V[xi_t])
# Connect all family of simplices in parent containers
for x_ip in x_parents:
self.V[xi2_t].connect(self.V[x_ip])
x_parents2 = copy.copy(x_parents)
x_parents2.append(xi_t)
# Permutate
self.perm(i2_parents, x_parents2, xi2)
def perm_symmetry(self, i_s, x_parents, xi):
# TODO: Cut out of for if outside linear constraint cutting planes
xi_t = tuple(xi)
xi2 = copy.copy(xi)
xi2[i_s] = 1
# Make new vertex list a hashable tuple
xi2_t = tuple(xi2)
# Append to cell
self.C0.add_vertex(self.V[xi2_t])
# Connect neighbors and vice versa
# Parent point
self.V[xi2_t].connect(self.V[xi_t])
# Connect all family of simplices in parent containers
for x_ip in x_parents:
self.V[xi2_t].connect(self.V[x_ip])
x_parents2 = copy.copy(x_parents)
x_parents2.append(xi_t)
i_s += 1
if i_s == self.dim:
return
# Permutate
self.perm_symmetry(i_s, x_parents2, xi2)
def add_centroid(self):
"""Split the central edge between the origin and supremum of
a cell and add the new vertex to the complex"""
self.centroid = list(
(np.array(self.origin) + np.array(self.supremum)) / 2.0)
self.C0.add_vertex(self.V[tuple(self.centroid)])
self.C0.centroid = self.centroid
# Disconnect origin and supremum
self.V[tuple(self.origin)].disconnect(self.V[tuple(self.supremum)])
# Connect centroid to all other vertices
for v in self.C0():
self.V[tuple(self.centroid)].connect(self.V[tuple(v.x)])
self.centroid_added = True
return
# Construct incidence array:
def incidence(self):
if self.centroid_added:
self.structure = np.zeros([2 ** self.dim + 1, 2 ** self.dim + 1],
dtype=int)
else:
self.structure = np.zeros([2 ** self.dim, 2 ** self.dim],
dtype=int)
for v in self.HC.C0():
for v2 in v.nn:
self.structure[v.index, v2.index] = 1
return
# A more sparse incidence generator:
def graph_map(self):
""" Make a list of size 2**n + 1 where an entry is a vertex
incidence, each list element contains a list of indexes
corresponding to that entries neighbors"""
self.graph = [[v2.index for v2 in v.nn] for v in self.C0()]
# Graph structure method:
# 0. Capture the indices of the initial cell.
# 1. Generate new origin and supremum scalars based on current generation
# 2. Generate a new set of vertices corresponding to a new
# "origin" and "supremum"
# 3. Connected based on the indices of the previous graph structure
# 4. Disconnect the edges in the original cell
def sub_generate_cell(self, C_i, gen):
"""Subgenerate a cell `C_i` of generation `gen` and
homology group rank `hgr`."""
origin_new = tuple(C_i.centroid)
centroid_index = len(C_i()) - 1
# If not gen append
try:
self.H[gen]
except IndexError:
self.H.append([])
# Generate subcubes using every extreme vertex in C_i as a supremum
# and the centroid of C_i as the origin
H_new = [] # list storing all the new cubes split from C_i
for i, v in enumerate(C_i()[:-1]):
supremum = tuple(v.x)
H_new.append(
self.construct_hypercube(origin_new, supremum, gen, C_i.hg_n))
for i, connections in enumerate(self.graph):
# Present vertex V_new[i]; connect to all connections:
if i == centroid_index: # Break out of centroid
break
for j in connections:
C_i()[i].disconnect(C_i()[j])
# Destroy the old cell
if C_i is not self.C0: # Garbage collector does this anyway; not needed
del C_i
# TODO: Recalculate all the homology group ranks of each cell
return H_new
def split_generation(self):
"""
Run sub_generate_cell for every cell in the current complex self.gen
"""
no_splits = False # USED IN SHGO
try:
for c in self.H[self.gen]:
if self.symmetry:
# self.sub_generate_cell_symmetry(c, self.gen + 1)
self.split_simplex_symmetry(c, self.gen + 1)
else:
self.sub_generate_cell(c, self.gen + 1)
except IndexError:
no_splits = True # USED IN SHGO
self.gen += 1
return no_splits # USED IN SHGO
def construct_hypercube(self, origin, supremum, gen, hgr,
printout=False):
"""
Build a hypercube with triangulations symmetric to C0.
Parameters
----------
origin : vec
supremum : vec (tuple)
gen : generation
hgr : parent homology group rank
"""
# Initiate new cell
v_o = np.array(origin)
v_s = np.array(supremum)
C_new = Cell(gen, hgr, origin, supremum)
C_new.centroid = tuple((v_o + v_s) * .5)
# Build new indexed vertex list
V_new = []
for i, v in enumerate(self.C0()[:-1]):
v_x = np.array(v.x)
sub_cell_t1 = v_o - v_o * v_x
sub_cell_t2 = v_s * v_x
vec = sub_cell_t1 + sub_cell_t2
vec = tuple(vec)
C_new.add_vertex(self.V[vec])
V_new.append(vec)
# Add new centroid
C_new.add_vertex(self.V[C_new.centroid])
V_new.append(C_new.centroid)
# Connect new vertices #TODO: Thread into other loop; no need for V_new
for i, connections in enumerate(self.graph):
# Present vertex V_new[i]; connect to all connections:
for j in connections:
self.V[V_new[i]].connect(self.V[V_new[j]])
if printout:
print("A sub hyper cube with:")
print("origin: {}".format(origin))
print("supremum: {}".format(supremum))
for v in C_new():
v.print_out()
# Append the new cell to the to complex
self.H[gen].append(C_new)
return C_new
def split_simplex_symmetry(self, S, gen):
"""
Split a hypersimplex S into two sub simplices by building a hyperplane
which connects to a new vertex on an edge (the longest edge in
dim = {2, 3}) and every other vertex in the simplex that is not
connected to the edge being split.
This function utilizes the knowledge that the problem is specified
with symmetric constraints
The longest edge is tracked by an ordering of the
vertices in every simplices, the edge between first and second
vertex is the longest edge to be split in the next iteration.
"""
# If not gen append
try:
self.H[gen]
except IndexError:
self.H.append([])
# Find new vertex.
# V_new_x = tuple((np.array(C()[0].x) + np.array(C()[1].x)) / 2.0)
s = S()
firstx = s[0].x
lastx = s[-1].x
V_new = self.V[tuple((np.array(firstx) + np.array(lastx)) / 2.0)]
# Disconnect old longest edge
self.V[firstx].disconnect(self.V[lastx])
# Connect new vertices to all other vertices
for v in s[:]:
v.connect(self.V[V_new.x])
# New "lower" simplex
S_new_l = Simplex(gen, S.hg_n, self.generation_cycle,
self.dim)
S_new_l.add_vertex(s[0])
S_new_l.add_vertex(V_new) # Add new vertex
for v in s[1:-1]: # Add all other vertices
S_new_l.add_vertex(v)
# New "upper" simplex
S_new_u = Simplex(gen, S.hg_n, S.generation_cycle, self.dim)
# First vertex on new long edge
S_new_u.add_vertex(s[S_new_u.generation_cycle + 1])
for v in s[1:-1]: # Remaining vertices
S_new_u.add_vertex(v)
for k, v in enumerate(s[1:-1]): # iterate through inner vertices
if k == S.generation_cycle:
S_new_u.add_vertex(V_new)
else:
S_new_u.add_vertex(v)
S_new_u.add_vertex(s[-1]) # Second vertex on new long edge
self.H[gen].append(S_new_l)
self.H[gen].append(S_new_u)
return
# Plots
def plot_complex(self):
"""
Here, C is the LIST of simplexes S in the
2- or 3-D complex
To plot a single simplex S in a set C, use e.g., [C[0]]
"""
from matplotlib import pyplot
if self.dim == 2:
pyplot.figure()
for C in self.H:
for c in C:
for v in c():
if self.bounds is None:
x_a = np.array(v.x, dtype=float)
else:
x_a = np.array(v.x, dtype=float)
for i in range(len(self.bounds)):
x_a[i] = (x_a[i] * (self.bounds[i][1]
- self.bounds[i][0])
+ self.bounds[i][0])
# logging.info('v.x_a = {}'.format(x_a))
pyplot.plot([x_a[0]], [x_a[1]], 'o')
xlines = []
ylines = []
for vn in v.nn:
if self.bounds is None:
xn_a = np.array(vn.x, dtype=float)
else:
xn_a = np.array(vn.x, dtype=float)
for i in range(len(self.bounds)):
xn_a[i] = (xn_a[i] * (self.bounds[i][1]
- self.bounds[i][0])
+ self.bounds[i][0])
# logging.info('vn.x = {}'.format(vn.x))
xlines.append(xn_a[0])
ylines.append(xn_a[1])
xlines.append(x_a[0])
ylines.append(x_a[1])
pyplot.plot(xlines, ylines)
if self.bounds is None:
pyplot.ylim([-1e-2, 1 + 1e-2])
pyplot.xlim([-1e-2, 1 + 1e-2])
else:
pyplot.ylim(
[self.bounds[1][0] - 1e-2, self.bounds[1][1] + 1e-2])
pyplot.xlim(
[self.bounds[0][0] - 1e-2, self.bounds[0][1] + 1e-2])
pyplot.show()
elif self.dim == 3:
fig = pyplot.figure()
ax = fig.add_subplot(111, projection='3d')
for C in self.H:
for c in C:
for v in c():
x = []
y = []
z = []
# logging.info('v.x = {}'.format(v.x))
x.append(v.x[0])
y.append(v.x[1])
z.append(v.x[2])
for vn in v.nn:
x.append(vn.x[0])
y.append(vn.x[1])
z.append(vn.x[2])
x.append(v.x[0])
y.append(v.x[1])
z.append(v.x[2])
# logging.info('vn.x = {}'.format(vn.x))
ax.plot(x, y, z, label='simplex')
pyplot.show()
else:
print("dimension higher than 3 or wrong complex format")
return
class VertexGroup:
def __init__(self, p_gen, p_hgr):
self.p_gen = p_gen # parent generation
self.p_hgr = p_hgr # parent homology group rank
self.hg_n = None
self.hg_d = None
# Maybe add parent homology group rank total history
# This is the sum off all previously split cells
# cumulatively throughout its entire history
self.C = []
def __call__(self):
return self.C
def add_vertex(self, V):
if V not in self.C:
self.C.append(V)
def homology_group_rank(self):
"""
Returns the homology group order of the current cell
"""
if self.hg_n is None:
self.hg_n = sum(1 for v in self.C if v.minimiser())
return self.hg_n
def homology_group_differential(self):
"""
Returns the difference between the current homology group of the
cell and its parent group
"""
if self.hg_d is None:
self.hgd = self.hg_n - self.p_hgr
return self.hgd
def polytopial_sperner_lemma(self):
"""
Returns the number of stationary points theoretically contained in the
cell based information currently known about the cell
"""
pass
def print_out(self):
"""
Print the current cell to console
"""
for v in self():
v.print_out()
class Cell(VertexGroup):
"""
Contains a cell that is symmetric to the initial hypercube triangulation
"""
def __init__(self, p_gen, p_hgr, origin, supremum):
super().__init__(p_gen, p_hgr)
self.origin = origin
self.supremum = supremum
self.centroid = None # (Not always used)
# TODO: self.bounds
class Simplex(VertexGroup):
"""
Contains a simplex that is symmetric to the initial symmetry constrained
hypersimplex triangulation
"""
def __init__(self, p_gen, p_hgr, generation_cycle, dim):
super().__init__(p_gen, p_hgr)
self.generation_cycle = (generation_cycle + 1) % (dim - 1)
class Vertex:
def __init__(self, x, bounds=None, func=None, func_args=(), g_cons=None,
g_cons_args=(), nn=None, index=None):
self.x = x
self.order = sum(x)
x_a = np.array(x, dtype=float)
if bounds is not None:
for i, (lb, ub) in enumerate(bounds):
x_a[i] = x_a[i] * (ub - lb) + lb
# TODO: Make saving the array structure optional
self.x_a = x_a
# Note Vertex is only initiated once for all x so only
# evaluated once
if func is not None:
self.feasible = True
if g_cons is not None:
for g, args in zip(g_cons, g_cons_args):
if g(self.x_a, *args) < 0.0:
self.f = np.inf
self.feasible = False
break
if self.feasible:
self.f = func(x_a, *func_args)
if nn is not None:
self.nn = nn
else:
self.nn = set()
self.fval = None
self.check_min = True
# Index:
if index is not None:
self.index = index
def __hash__(self):
return hash(self.x)
def connect(self, v):
if v is not self and v not in self.nn:
self.nn.add(v)
v.nn.add(self)
if self.minimiser():
v._min = False
v.check_min = False
# TEMPORARY
self.check_min = True
v.check_min = True
def disconnect(self, v):
if v in self.nn:
self.nn.remove(v)
v.nn.remove(self)
self.check_min = True
v.check_min = True
def minimiser(self):
"""Check whether this vertex is strictly less than all its neighbors"""
if self.check_min:
self._min = all(self.f < v.f for v in self.nn)
self.check_min = False
return self._min
def print_out(self):
print("Vertex: {}".format(self.x))
constr = 'Connections: '
for vc in self.nn:
constr += '{} '.format(vc.x)
print(constr)
print('Order = {}'.format(self.order))
class VertexCache:
def __init__(self, func, func_args=(), bounds=None, g_cons=None,
g_cons_args=(), indexed=True):
self.cache = {}
self.func = func
self.g_cons = g_cons
self.g_cons_args = g_cons_args
self.func_args = func_args
self.bounds = bounds
self.nfev = 0
self.size = 0
if indexed:
self.index = -1
def __getitem__(self, x, indexed=True):
try:
return self.cache[x]
except KeyError:
if indexed:
self.index += 1
xval = Vertex(x, bounds=self.bounds,
func=self.func, func_args=self.func_args,
g_cons=self.g_cons,
g_cons_args=self.g_cons_args,
index=self.index)
else:
xval = Vertex(x, bounds=self.bounds,
func=self.func, func_args=self.func_args,
g_cons=self.g_cons,
g_cons_args=self.g_cons_args)
# logging.info("New generated vertex at x = {}".format(x))
# NOTE: Surprisingly high performance increase if logging is commented out
self.cache[x] = xval
# TODO: Check
if self.func is not None:
if self.g_cons is not None:
if xval.feasible:
self.nfev += 1
self.size += 1
else:
self.size += 1
else:
self.nfev += 1
self.size += 1
return self.cache[x]
|
scipy/scipy
|
scipy/optimize/_shgo_lib/triangulation.py
|
Python
|
bsd-3-clause
| 21,439
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
mindcube/mindcube-django-cookiecutter
|
{{cookiecutter.repo_name}}/manage.py
|
Python
|
mit
| 256
|
from myhdl import *
from vga_intf import System
from vga_intf import VideoMemory
# r, g, b
cbars = [
[1, 1, 1,], # white
[1, 1, 0,], # yellow
[0, 1, 1,], # cyan
[0, 1, 0,], # green
[1, 0, 1,], # magenta
[1, 0, 0,], # red
[0, 0, 1,], # blue
[0, 0, 0,], # black
]
def _update_cbars_with_max(P, width):
global cbars
for cc in range(len(cbars)):
for ii in range(3):
if cbars[cc][ii] == 1:
cbars[cc][ii] = P
# create a single value out of pixel tuple
#print("c", cbars[cc])
val = (cbars[cc][0] << 2*width) + \
(cbars[cc][1] << width) + \
cbars[cc][2]
#print("v", val)
cbars[cc] = val
cbars = tuple(cbars)
for ii in range(len(cbars)):
print("%3d: %08X" % (ii, cbars[ii]))
def m_color_bars(dsys, vmem, resolution=(640,440), width=10):
""" generate a color bar pattern
"""
global cbars
NUM_COLORS, PMAX, res = len(cbars), (2**width)-1, resolution
# for a design there is only one VGA driver it is ok to
# globally update cbars!
_update_cbars_with_max(PMAX, width)
# the width of each boundrary
pw = res[0] / NUM_COLORS
clock,reset = dsys.clock,dsys.reset
pval = Signal(intbv(0)[3*width:])
# DEBUG
ssel = Signal(intbv(0)[32:0])
@always_comb
def rtl_pval():
sel = 0
for ii in range(NUM_COLORS):
if vmem.hpxl > (ii*pw):
sel = ii
ssel.next = sel
pval.next = cbars[sel]
W2,W,MASK = 2*width, width, PMAX
@always_seq(clock.posedge, reset=reset)
def rtl_rgb():
# unpack the RGB value
vmem.red.next = (pval >> W2) & MASK
vmem.green.next = (pval >> W) & MASK
vmem.blue.next = pval & MASK
return rtl_pval, rtl_rgb
|
chiggs/alt.hdl
|
examples/ex6_vgasys/myhdl/vga_color_bars.py
|
Python
|
mit
| 1,886
|
from cms.constants import PUBLISHER_STATE_DIRTY
from django.db import models
from cms.models import Page, Title
class BaseExtension(models.Model):
public_extension = models.OneToOneField('self', null=True, editable=False, related_name='draft_extension')
extended_object = None
class Meta:
abstract = True
def get_page(self): # pragma: no cover
raise NotImplementedError('Function must be overwritten in subclasses and return the extended page object.')
def copy_relations(self, oldinstance, language):
"""
Copy relations like many to many or foreign key relations to the public version.
Similar to the same named cms plugin function.
:param oldinstance: the draft version of the extension
"""
pass
def copy_to_public(self, public_object, language):
this = self.__class__.objects.get(pk=self.pk) # get a copy of this instance
public_extension = self.public_extension # get the public version of this instance if any
this.extended_object = public_object # set the new public object
if public_extension:
this.pk = public_extension.pk # overwrite current public extension
this.public_extension = None # remove public extension or it will point to itself and raise duplicate entry
else:
this.pk = None # create new public extension
this.save(mark_page=False)
self.public_extension = this
self.save(mark_page=False)
this.copy_relations(self, language)
this.save(force_update=True, mark_page=False)
return this
class PageExtension(BaseExtension):
extended_object = models.OneToOneField(Page, editable=False)
class Meta:
abstract = True
def get_page(self):
return self.extended_object
def save(self, *args, **kwargs):
if kwargs.pop('mark_page', True):
self.get_page().title_set.update(publisher_state=PUBLISHER_STATE_DIRTY) # mark page dirty
return super(BaseExtension, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
if kwargs.pop('mark_page', True):
self.get_page().title_set.update(publisher_state=PUBLISHER_STATE_DIRTY) # mark page dirty
return super(BaseExtension, self).delete(*args, **kwargs)
class TitleExtension(BaseExtension):
extended_object = models.OneToOneField(Title, editable=False)
class Meta:
abstract = True
def get_page(self):
return self.extended_object.page
def save(self, *args, **kwargs):
if kwargs.pop('mark_page', True):
Title.objects.filter(pk=self.extended_object.pk).update(
publisher_state=PUBLISHER_STATE_DIRTY) # mark title dirty
return super(BaseExtension, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
if kwargs.pop('mark_page', True):
Title.objects.filter(pk=self.extended_object.pk).update(
publisher_state=PUBLISHER_STATE_DIRTY) # mark title dirty
return super(BaseExtension, self).delete(*args, **kwargs)
|
sephii/django-cms
|
cms/extensions/models.py
|
Python
|
bsd-3-clause
| 3,140
|
# -*- coding: utf-8 -*-
import os.path
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from functools import partial
import asyncio
import argparse
from aiogear import Worker, Client
def parse_args():
args = sys.argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--addr', default='127.0.0.1', help='Gearman host address.')
parser.add_argument('-p', '--port', default=4730, type=int, help='Gearman port number.')
return parser.parse_args(args)
async def sleep(job_info):
sleep_time = int(job_info.workload)
print(job_info)
print('Sleeping for ', sleep_time, ' seconds')
await asyncio.sleep(sleep_time)
print('Sleep is done')
async def register_worker(loop, addr, port):
factory = lambda: Worker(sleep, loop=loop)
_, worker = await loop.create_connection(factory, addr, port)
return worker
async def connect(loop, addr, port):
client = Client()
await loop.create_connection(lambda: client, addr, port)
return client
def job_is_complete(job_created, f):
print('Created Job ', job_created, ' is finished')
async def main(loop, addr, port):
# Connects worker & register function
worker = await register_worker(loop, addr, port)
# Connect as client
async with await connect(loop, addr, port) as client:
# Run sleep task and wait it
job_created = await client.submit_job('sleep', '5')
f = client.wait_job(job_created.handle)
f.add_done_callback(partial(job_is_complete, job_created))
await f
await worker.shutdown()
loop.stop()
if __name__ == '__main__':
args = parse_args()
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop, args.addr, args.port))
try:
loop.run_forever()
except KeyboardInterrupt:
loop.close()
|
sardok/aiogear
|
examples/client.py
|
Python
|
mit
| 1,875
|
from .banyan_base_multi import BanyanBaseMulti
|
MrYsLab/python_banyan
|
python_banyan/banyan_base_multi/__init__.py
|
Python
|
agpl-3.0
| 47
|
##############################################################################
#
# Copyright (c) 2014, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of hubspot-contacts
# <https://github.com/2degrees/hubspot-contacts>, which is subject to the
# provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
from voluptuous import All
from voluptuous import Any
from voluptuous import Length
from voluptuous import Schema
from hubspot.contacts._schemas._validators import AnyListItemValidates
from hubspot.contacts._schemas._validators import Constant
from hubspot.contacts._schemas._validators import DynamicDictionary
from hubspot.contacts._schemas._validators import GetDictValue
_CANONICAL_IDENTITY_PROFILE_SCHEMA = All(
[],
AnyListItemValidates(
Schema(
{'type': Constant(u'EMAIL'), 'value': unicode},
required=True,
extra=True,
),
),
)
_IS_PROPERTY_VALUE = Schema({'value': unicode}, required=True, extra=True)
_IDENTITY_PROFILE_SCHEMA = Schema(
{'vid': int, 'identities': Any([], _CANONICAL_IDENTITY_PROFILE_SCHEMA)},
extra=True,
required=True,
)
CONTACT_SCHEMA = Schema(
{
'vid': int,
'properties': DynamicDictionary(
unicode,
All(_IS_PROPERTY_VALUE, GetDictValue('value')),
),
'identity-profiles': All([_IDENTITY_PROFILE_SCHEMA], Length(min=1)),
},
required=True,
extra=True,
)
|
2degrees/hubspot-contacts
|
hubspot/contacts/_schemas/contacts.py
|
Python
|
bsd-3-clause
| 1,895
|
# source: http://stackoverflow.com/questions/2758159/how-to-embed-a-python-interpreter-in-a-pyqt-widget
import sys
import os
import re
import traceback
from PyQt5 import QtCore
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from electrum_ltc import util
from electrum_ltc.i18n import _
from .util import MONOSPACE_FONT
class OverlayLabel(QtWidgets.QLabel):
STYLESHEET = '''
QLabel, QLabel link {
color: rgb(0, 0, 0);
background-color: rgb(248, 240, 200);
border: 1px solid;
border-color: rgb(255, 114, 47);
padding: 2px;
}
'''
def __init__(self, text, parent):
super().__init__(text, parent)
self.setMinimumHeight(150)
self.setGeometry(0, 0, self.width(), self.height())
self.setStyleSheet(self.STYLESHEET)
self.setMargin(0)
parent.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setWordWrap(True)
def mousePressEvent(self, e):
self.hide()
def on_resize(self, w):
padding = 2 # px, from the stylesheet above
self.setFixedWidth(w - padding)
class Console(QtWidgets.QPlainTextEdit):
def __init__(self, prompt='>> ', startup_message='', parent=None):
QtWidgets.QPlainTextEdit.__init__(self, parent)
self.prompt = prompt
self.history = []
self.namespace = {}
self.construct = []
self.setGeometry(50, 75, 600, 400)
self.setWordWrapMode(QtGui.QTextOption.WrapAnywhere)
self.setUndoRedoEnabled(False)
self.document().setDefaultFont(QtGui.QFont(MONOSPACE_FONT, 10, QtGui.QFont.Normal))
self.showMessage(startup_message)
self.updateNamespace({'run':self.run_script})
self.set_json(False)
warning_text = "<h1>{}</h1><br>{}<br><br>{}".format(
_("Warning!"),
_("Do not paste code here that you don't understand. Executing the wrong code could lead "
"to your coins being irreversibly lost."),
_("Click here to hide this message.")
)
self.messageOverlay = OverlayLabel(warning_text, self)
def resizeEvent(self, e):
super().resizeEvent(e)
vertical_scrollbar_width = self.verticalScrollBar().width() * self.verticalScrollBar().isVisible()
self.messageOverlay.on_resize(self.width() - vertical_scrollbar_width)
def set_json(self, b):
self.is_json = b
def run_script(self, filename):
with open(filename) as f:
script = f.read()
# eval is generally considered bad practice. use it wisely!
result = eval(script, self.namespace, self.namespace)
def updateNamespace(self, namespace):
self.namespace.update(namespace)
def showMessage(self, message):
self.appendPlainText(message)
self.newPrompt()
def clear(self):
self.setPlainText('')
self.newPrompt()
def newPrompt(self):
if self.construct:
prompt = '.' * len(self.prompt)
else:
prompt = self.prompt
self.completions_pos = self.textCursor().position()
self.completions_visible = False
self.appendPlainText(prompt)
self.moveCursor(QtGui.QTextCursor.End)
def getCommand(self):
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
curr_line = curr_line.rstrip()
curr_line = curr_line[len(self.prompt):]
return curr_line
def setCommand(self, command):
if self.getCommand() == command:
return
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
self.moveCursor(QtGui.QTextCursor.End)
for i in range(len(curr_line) - len(self.prompt)):
self.moveCursor(QtGui.QTextCursor.Left, QtGui.QTextCursor.KeepAnchor)
self.textCursor().removeSelectedText()
self.textCursor().insertText(command)
self.moveCursor(QtGui.QTextCursor.End)
def show_completions(self, completions):
if self.completions_visible:
self.hide_completions()
c = self.textCursor()
c.setPosition(self.completions_pos)
completions = map(lambda x: x.split('.')[-1], completions)
t = '\n' + ' '.join(completions)
if len(t) > 500:
t = t[:500] + '...'
c.insertText(t)
self.completions_end = c.position()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = True
def hide_completions(self):
if not self.completions_visible:
return
c = self.textCursor()
c.setPosition(self.completions_pos)
l = self.completions_end - self.completions_pos
for x in range(l): c.deleteChar()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = False
def getConstruct(self, command):
if self.construct:
prev_command = self.construct[-1]
self.construct.append(command)
if not prev_command and not command:
ret_val = '\n'.join(self.construct)
self.construct = []
return ret_val
else:
return ''
else:
if command and command[-1] == (':'):
self.construct.append(command)
return ''
else:
return command
def getHistory(self):
return self.history
def setHisory(self, history):
self.history = history
def addToHistory(self, command):
if command[0:1] == ' ':
return
if command and (not self.history or self.history[-1] != command):
self.history.append(command)
self.history_index = len(self.history)
def getPrevHistoryEntry(self):
if self.history:
self.history_index = max(0, self.history_index - 1)
return self.history[self.history_index]
return ''
def getNextHistoryEntry(self):
if self.history:
hist_len = len(self.history)
self.history_index = min(hist_len, self.history_index + 1)
if self.history_index < hist_len:
return self.history[self.history_index]
return ''
def getCursorPosition(self):
c = self.textCursor()
return c.position() - c.block().position() - len(self.prompt)
def setCursorPosition(self, position):
self.moveCursor(QtGui.QTextCursor.StartOfLine)
for i in range(len(self.prompt) + position):
self.moveCursor(QtGui.QTextCursor.Right)
def register_command(self, c, func):
methods = { c: func}
self.updateNamespace(methods)
def runCommand(self):
command = self.getCommand()
self.addToHistory(command)
command = self.getConstruct(command)
if command:
tmp_stdout = sys.stdout
class stdoutProxy():
def __init__(self, write_func):
self.write_func = write_func
self.skip = False
def flush(self):
pass
def write(self, text):
if not self.skip:
stripped_text = text.rstrip('\n')
self.write_func(stripped_text)
QtCore.QCoreApplication.processEvents()
self.skip = not self.skip
if type(self.namespace.get(command)) == type(lambda:None):
self.appendPlainText("'{}' is a function. Type '{}()' to use it in the Python console."
.format(command, command))
self.newPrompt()
return
sys.stdout = stdoutProxy(self.appendPlainText)
try:
try:
# eval is generally considered bad practice. use it wisely!
result = eval(command, self.namespace, self.namespace)
if result is not None:
if self.is_json:
util.print_msg(util.json_encode(result))
else:
self.appendPlainText(repr(result))
except SyntaxError:
# exec is generally considered bad practice. use it wisely!
exec(command, self.namespace, self.namespace)
except SystemExit:
self.close()
except BaseException:
traceback_lines = traceback.format_exc().split('\n')
# Remove traceback mentioning this file, and a linebreak
for i in (3,2,1,-1):
traceback_lines.pop(i)
self.appendPlainText('\n'.join(traceback_lines))
sys.stdout = tmp_stdout
self.newPrompt()
self.set_json(False)
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Tab:
self.completions()
return
self.hide_completions()
if event.key() in (QtCore.Qt.Key_Enter, QtCore.Qt.Key_Return):
self.runCommand()
return
if event.key() == QtCore.Qt.Key_Home:
self.setCursorPosition(0)
return
if event.key() == QtCore.Qt.Key_PageUp:
return
elif event.key() in (QtCore.Qt.Key_Left, QtCore.Qt.Key_Backspace):
if self.getCursorPosition() == 0:
return
elif event.key() == QtCore.Qt.Key_Up:
self.setCommand(self.getPrevHistoryEntry())
return
elif event.key() == QtCore.Qt.Key_Down:
self.setCommand(self.getNextHistoryEntry())
return
elif event.key() == QtCore.Qt.Key_L and event.modifiers() == QtCore.Qt.ControlModifier:
self.clear()
super(Console, self).keyPressEvent(event)
def completions(self):
cmd = self.getCommand()
# note for regex: new words start after ' ' or '(' or ')'
lastword = re.split(r'[ ()]', cmd)[-1]
beginning = cmd[0:-len(lastword)]
path = lastword.split('.')
prefix = '.'.join(path[:-1])
prefix = (prefix + '.') if prefix else prefix
ns = self.namespace.keys()
if len(path) == 1:
ns = ns
else:
assert len(path) > 1
obj = self.namespace.get(path[0])
try:
for attr in path[1:-1]:
obj = getattr(obj, attr)
except AttributeError:
ns = []
else:
ns = dir(obj)
completions = []
for name in ns:
if name[0] == '_':continue
if name.startswith(path[-1]):
completions.append(prefix+name)
completions.sort()
if not completions:
self.hide_completions()
elif len(completions) == 1:
self.hide_completions()
self.setCommand(beginning + completions[0])
else:
# find common prefix
p = os.path.commonprefix(completions)
if len(p)>len(lastword):
self.hide_completions()
self.setCommand(beginning + p)
else:
self.show_completions(completions)
welcome_message = '''
---------------------------------------------------------------
Welcome to a primitive Python interpreter.
---------------------------------------------------------------
'''
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
console = Console(startup_message=welcome_message)
console.updateNamespace({'myVar1' : app, 'myVar2' : 1234})
console.show()
sys.exit(app.exec_())
|
vialectrum/vialectrum
|
electrum_ltc/gui/qt/console.py
|
Python
|
mit
| 11,822
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
TRANSACTION_STATUS = (
('P', _('pending')),
('F', _('failed')),
('C', _('complete')),
)
class Transaction(models.Model):
user = models.ForeignKey(User, blank = True, null = True, default = None,
verbose_name = _("user"), help_text = _("user who started transaction"))
description = models.CharField(_("reference description"), max_length = 255, help_text = _("reference description"))
amount = models.FloatField(_("amount"))
currency = models.CharField(_("currency"), max_length = 3)
details = models.CharField(_("details"), max_length = 255, help_text = _("payment details"))
created = models.DateTimeField(auto_now_add = True)
last_modified = models.DateTimeField(auto_now = True)
status = models.CharField(_("status"), max_length = 1, default = 'P')
redirect_after_success = models.CharField(max_length = 255, editable = False)
redirect_on_failure = models.CharField(max_length = 255, editable = False)
def __unicode__(self):
return _("transaction %s " % self.pk)
class Meta:
verbose_name = _("transaction")
ordering = ['-last_modified']
|
truevision/django_banklink
|
django_banklink/models.py
|
Python
|
bsd-3-clause
| 1,291
|
name = input("What is your name? ")
number = input("How old are you? ")
age = int(number)
print("Your name backwards is ", name[::-1].lower())
print("Your age is ten years will be ", age + 10)
|
SaileshPatel/BBC-Brum-Challenges
|
name.py
|
Python
|
mit
| 193
|
import os
import logging
import numpy as np
from utils import CONFIG
from pandas import read_csv
from pprint import pformat
from collections import Counter
from nltk.tokenize import word_tokenize
try:
import cPickle as pickle
except:
import pickle
config = CONFIG.CapData
logger = logging.getLogger('CapData')
logger.setLevel(config.log.level)
__all__ = ['flickr8k_raw_data']
def _parse_caption(txt):
txt = txt.strip().lower().replace(",", "").replace("'", "")
txt = txt.replace(".", "").replace("\"", "").replace("!", "")
txt = txt.replace("?", "").replace("-", "").replace(')', '')
txt = txt.replace("(", "").replace("&", "and")
txt = " ".join(txt.split())
return txt
def _read_words(data_path):
# read the captions and build a test language model on this data
df = read_csv(data_path, delimiter='\t', names=['name', 'text'],
header=None)
df.text = map(_parse_caption, df.text)
df['image_id'] = map(lambda k: k.split('#')[0], df.name)
# return list of names and list of captions
tokens = [['<ST>'] + word_tokenize(line) + ['.', '<ET>']
for line in df.text.tolist()]
return (df.name.tolist(), tokens)
def _build_vocab(data_path):
(_, cap_toks) = _read_words(data_path)
words = [it for l in cap_toks for it in l]
unq_words = map(lambda i: i[0], Counter(words).most_common()[::-1])
vocab = dict(zip(unq_words, range(1, len(unq_words) + 1)))
return vocab
def _file_to_ids(file_path, word_to_id):
(names, cap_toks) = _read_words(file_path)
return (names, [[word_to_id[word] for word in toks if word in word_to_id]
for toks in cap_toks])
def flickr8k_raw_data(data_path):
train_path = os.path.join(data_path, 'Flickr8k.token.trainImgs.txt')
test_path = os.path.join(data_path, 'Flickr8k.token.testImgs.txt')
dev_path = os.path.join(data_path, 'Flickr8k.token.devImgs.txt')
# vocab - dict, word -> int
vocab = _build_vocab(train_path)
print 'Vocabulary built.'
pkl_vocab_path = os.path.join(data_path, 'vocab.pkl')
with open(pkl_vocab_path, 'wb') as fp:
pickle.dump(vocab, fp)
print 'Vocabulary saved to', pkl_vocab_path
# names - list, image name, word_to_ids - list, ints corresponding to word
(tr_names, tr_word_to_ids) = _file_to_ids(train_path, vocab)
(te_names, te_word_to_ids) = _file_to_ids(test_path, vocab)
(de_names, de_word_to_ids) = _file_to_ids(dev_path, vocab)
pkl_spl_path = os.path.join(data_path, 'split_caps.pkl')
with open(pkl_spl_path, 'wb') as fp:
pickle.dump((tr_names, tr_word_to_ids), fp)
pickle.dump((te_names, te_word_to_ids), fp)
pickle.dump((de_names, de_word_to_ids), fp)
print 'Captions tokenized and encoded; saved to - %s.' % pkl_spl_path
return ({'names': tr_names, 'word_to_ids': tr_word_to_ids},
{'names': te_names, 'word_to_ids': te_word_to_ids},
{'names': de_names, 'word_to_ids': de_word_to_ids},
vocab)
# len(vocab.keys()))
|
chintak/image-captioning
|
scripts/reader.py
|
Python
|
mit
| 3,050
|
"""class:`trols_stats.Reporter`
Statistics reporting module.
"""
import re
import collections
import logging
import trols_stats
class Reporter:
def __init__(self, db):
self.__db = db()
@property
def db(self):
return self.__db
@db.setter
def db(self, value):
self.__db = value
def get_players(self,
names=None,
competition=None,
competition_type=None,
team=None,
section=None):
"""Get all players from cache.
**Kwargs:**
*competition*: the competition identifier. For example::
'nejta_saturday_am_spring_2015'
*competition_type*: 'girls' or 'boys'. (``None`` includes
both)
*section*: section level. (``None`` includes all sections)
**Returns:**
list of simplified player token IDs in the form::
"""
def cmp_name(name, token):
return name.lower() in token.split('~')[0].lower()
def cmp_team(team, token):
return token.split('~')[1] == team
def cmp_section(section, token):
return token.split('~')[2] == str(section)
def cmp_comp_type(competition_type, token):
return token.split('~')[3] == competition_type
def cmp_comp(competition, token):
return token.split('~')[4] == str(competition)
matched = self.db.keys()
if names is not None:
matched = [x for x in matched for n in names if cmp_name(n, x)]
seen = set()
seen_add = seen.add
matched = [x for x in matched if not (x in seen or seen_add(x))]
if team is not None:
matched = [x for x in matched if cmp_team(team, x)]
if section is not None:
matched = [x for x in matched if cmp_section(section, x)]
if competition_type is not None:
matched = [x for x in matched if cmp_comp_type(competition_type, x)]
if competition is not None:
matched = [x for x in matched if cmp_comp(competition, x)]
return self.player_ids_dict(matched)
@staticmethod
def get_competition_details(competition):
"""Ugly, hardwired event/event type lookup based on
*competition*.
**Args:**
*competition*: competition token to use as the mapper trigger
**Returns:**
dictionary structure representing the event/event type::
{
'event': ['doubles'],
'event_type': ['mens'],
}
"""
comp_details = None
if re.match('dvta_(tuesday|thursday)_night', competition):
comp_details = {
'event': ['doubles'],
'event_type': ['mens'],
}
elif re.match('dvta_thursday_am', competition):
comp_details = {
'event': ['doubles'],
'event_type': ['womens'],
}
elif re.match('dvta_friday_night', competition):
comp_details = {
'event': ['singles'],
'event_type': ['mixed'],
}
elif re.match('nejta', competition):
comp_details = {
'event': ['singles', 'doubles'],
'event_type': ['girls', 'boys'],
}
return comp_details
def get_competitions(self):
"""Return a list of all competitions represented in the current
data set.
**Returns:**
list of cometitions. For example::
[
'nejta_saturday_am_autumn_2014',
'nejta_saturday_am_autumn_2015',
'nejta_saturday_am_spring_2014',
'nejta_saturday_am_spring_2015',
...
]
"""
competitions = set(x.split('~')[4] for x in self.db.keys())
return sorted(competitions)
def get_teams(self,
competition='nejta_saturday_am_spring_2015',
competition_type=None,
section=None):
"""Filter teams based on *competition*, *competition_type*
and *section*.
**Kwargs:**
*competition*: the TROLS Stats competiton key of the form::
<association>_<day>_<time_slot>_<season>_<year>
For example::
'nejta_saturday_am_spring_2015'
*competition_type*: one of ``girls``, ``boys`` or ``None``
for any
*section*: numeric representation of the team section number
**Returns:**
sorted list of team names of the form::
[
'Bundoora',
'Eaglemont',
'Mill Park',
'Rosanna',
...
]
"""
kwargs = {
'competition': competition,
'competition_type': competition_type,
'section': section
}
tokens = self.get_players(**kwargs)
teams = set(x.get('token').split('~')[1] for x in tokens)
return sorted(teams)
def get_sections(self,
competition='nejta_saturday_am_spring_2015',
competition_type=None):
"""Filter sections based on *competition* and *competition_type*.
**Kwargs:**
*competition*: the competition code (default
``nejta_saturday_am_spring_2015``)
*competition_type*: either ``boys``, ``girls`` or ``None``
**Returns:**
sorted list of integer section numbers
"""
kwargs = {
'competition': competition,
'competition_type': competition_type
}
tokens = self.get_players(**kwargs)
sections = set(x.get('token').split('~')[2] for x in tokens)
return sorted([int(x) for x in sections])
def get_player_fixtures(self, player_token):
"""Search for all fixtures where player *name* participated.
*Args:*
*player_token*: player token ID to filter DB against. For
example::
Joel Markovski~Watsonia~20~boys~saturday_am_autumn_2015
*Returns*: list of all :class:`trols_stats.model.aggregate.Game`
objects that *name* was involved in
"""
match_aggregates = self.db.get(player_token)
if match_aggregates is None:
match_aggregates = []
else:
match_aggregates = sorted(match_aggregates,
key=lambda x: x.fixture.match_round_numeric)
return match_aggregates
@staticmethod
def last_fixture_played(games):
"""Sort through the list of *games* and identify the last
fixture played.
*Args:*
*games*: list of :class:`trols_stata.model.aggregates.Games`
model instances
*Returns:*
list of games that were played last
"""
rounds = [x.fixture.match_round for x in games]
last_fixture = []
if rounds:
if 'Grand Final' in rounds:
last_round = 'Grand Final'
elif 'Prelim Final' in rounds:
last_round = 'Prelim Final'
elif 'Semi Final' in rounds:
last_round = 'Semi Final'
else:
last_round = rounds[-1]
last_fixture = [x for x in games if x.fixture.match_round == last_round]
return last_fixture
def get_player_singles(self, name):
"""Return list of singles games from all fixtures where player
*name* participated.
*Args:*
*name*: name to filter DB against
*Returns*: dict of all singles
:class:`trols_stats.model.aggregate.Game`
objects that *name* was involved in. Key is the
:meth:`trols_stats.model.aggregate.Game.get_player_id` `token`.
"""
logging.info('Extracting singles games for player "%s"', name)
fixtures = self.get_player_fixtures(name)
singles_games = [x for x in fixtures if x.is_singles()]
logging.info('Total singles games found with player "%s": %d', name, len(singles_games))
return singles_games
def get_player_doubles(self, name):
"""Return list of doubles games from all fixtures where player
*name* participated.
*Args:*
*name*: name to filter DB against
*Returns*: list of all doubles
:class:`trols_stats.model.aggregate.Game`
objects that *name* was involved in
"""
logging.info('Extracting doubles games for player "%s"', name)
fixtures = self.get_player_fixtures(name)
doubles_games = [x for x in fixtures if x.is_doubles()]
logging.info('Total doubles games found with player "%s": %d', name, len(doubles_games))
return doubles_games
def get_player_stats(self,
player_tokens=None,
last_fixture=False,
event=None):
"""Calculates and returns match stats from all fixtures for all
or nominated players.
*Args:*
*player_tokens*: list of player token ID to filter DB against.
For example::
Joel Markovski~Watsonia~20~boys~saturday_am_autumn_2015
*Kwargs:*
*last_fixture* boolean flag to indicate if the last fixture
played with the associated player_token shoule be included
*Returns*:
dictionary of player statistics where the key is the
player token ID and the values take the form::
{
'name': name,
'team': team,
'section': section,
'comp_type': comp_type,
'comp': comp,
'singles': singles_stats(),
'doubles': doubles_stats(),
'last_fixture': last_fixture_played(),
}
"""
if player_tokens is None:
player_tokens = self.db.keys()
stats = {}
for player_token in player_tokens:
singles_stats = trols_stats.Statistics()
doubles_stats = trols_stats.Statistics()
game_aggregates = self.get_player_fixtures(player_token)
for game in game_aggregates:
if game.is_singles():
singles_stats.aggregate(game)
elif game.is_doubles():
doubles_stats.aggregate(game)
stats[player_token] = {
'singles': singles_stats(),
'doubles': doubles_stats(),
}
player_details = self.player_ids_dict([player_token])
stats[player_token].update(player_details[0])
if last_fixture:
event_aggregates = list(game_aggregates)
if event is not None and event == 'singles':
event_aggregates = self.get_player_singles(player_token)
elif event is not None and event == 'doubles':
event_aggregates = self.get_player_doubles(player_token)
fixture = self.last_fixture_played(event_aggregates)
if fixture:
fixture = [x() for x in fixture]
stats[player_token]['last_fixture'] = fixture
return stats
@staticmethod
def sort_stats(statistics,
event='singles',
key='score_for',
reverse=False,
limit=None):
"""Sort the given dictionary of :class:`trols_stats.Statistics`
based on order criteria denoted by *event*, *key* and whether
the order is *reverse*.
**Args:**
*statistics*: as per :meth:`get_player_stats` return value
**Kwargs:**
*event*: since the :class:`trols_stats.Statistics` item
contains both *singles* and *doubles* scores we need to denote
which event to sort by. Default is *singles*
*key*: :class:`trols_stats.Statistics` attribute
to sort by. Possible values include *games_won*,
*games_played*, *percentage*, *score_against*, *games_lost*
or *score_for*. Default, *score_for*
*reverse*: if ``True``, will reverse the sort order from lowest
to highest
*limit*: limit the number of :class:`trols_stats.Statistics`
items to return after sorting. Setting a *limit* will also
trigger the qualified metric that will further filter the
results based on whether the athlete has played more that 3
matches.
**Returns:**
Same as :meth:`get_player_stats`
"""
def qualified(statistic):
is_qualified = False
games_played = statistic[1].get(event).get('games_played')
if games_played is not None and games_played > 3:
is_qualified = True
return is_qualified
stats = sorted(statistics.items(),
key=lambda x: x[1][event][key],
reverse=reverse)
if limit is not None:
stats = [x for x in stats if qualified(x)][:limit]
return stats
@staticmethod
def rank_stats(statistics, event='singles', key='score_for'):
"""Rank the given *statistics*.
Adds another key, ``rank`` to the *statistics* structure that
represents the athlete's rank in the list.
**Args:**
*statistics*: as per :meth:`get_player_stats` return value
*event*: since the :class:`trols_stats.Statistics` item
contains both *singles* and *doubles* scores we need to denote
which event to sort by. Default is *singles*
*key*: :class:`trols_stats.Statistics` attribute
to sort by. Possible values include *games_won*,
*games_played*, *percentage*, *score_against*, *games_lost*
or *score_for*. Default, *score_for*
"""
last_rank = 1
last_value = None
for index, stat in enumerate(statistics, start=1):
if last_value is None:
logging.debug('This is the first value')
last_value = stat[1][event][key]
if last_value == stat[1][event][key]:
stat[1]['rank'] = last_rank
else:
stat[1]['rank'] = index
last_rank = index
last_value = stat[1][event][key]
return statistics
def get_player_results_compact(self, player_tokens):
"""Get all singles and doubles results associated with
*player_token*.
.. note::
Fixtures are returned in order of match rounds, "Semi Final", "Prelim Final"
and then "Grand Final" with singles before doubles events.
*Args:*
*player_tokens*: list of player token ID to filter DB against.
For example::
Joel Markovski~Watsonia~20~boys~saturday_am_autumn_2015
*Returns*: dict of all singles in a compact format for
presentation in web templates. For example::
{
'Isabella Markovski~Watsonia~14~girls~saturday_am_autumn_2015': [
{
'match_type': 'doubles',
'match_round': 5,
'date_played':
datetime.datetime(2015, 2, 28, 0, 0),
'home_team': 'Watsonia Red',
'away_team': 'St Marys',
'player': 'Madeline Doyle',
'opposition': ['Lauren Amsing', 'Mia Bovalino'],
'score_for': 3,
'score_against': 6,
'team_mate': 'Tara Watson',
'player_won': False,
},
],
}
"""
results = {}
for player_token in player_tokens:
player_matches = self.get_player_fixtures(player_token)
stash = results[player_token] = {}
stash['rounds'] = collections.OrderedDict()
events = ['is_singles', 'is_doubles']
for event in events:
for m in [x for x in player_matches if getattr(x, event)()]:
if stash['rounds'].get(m.fixture_round) is None:
stash['rounds'][m.fixture_round] = []
stash['rounds'][m.fixture_round].append(m.compact_match())
return results
@staticmethod
def player_ids_dict(player_ids):
"""Helper method that splits the components of the token index
from *player_ids* list into separate parts. For example::
>>> from trols_munder_ui.utils import player_ids_dict
>>> token = ('Isabella Markovski~Watsonia~12~girls~saturday_am_spring_2015')
>>> player_ids_dict([token])
[{'name': 'Isabella Markovski', 'comp_type': 'girls', 'section': '12',
'team': 'Watsonia', 'token': 'Isabella Markovski~Watsonia~12~girls~sa
turday_am_spring_2015', 'comp': 'saturday_am_spring_2015'}]
"""
def player_id_struct(player_id):
(name, team, section, comp_type, comp) = player_id.split('~')
comp_parts = comp.split('_')
comp_string = '{} {} {} {} {}'.format(comp_parts[0].upper(),
comp_parts[1].title(),
comp_parts[2].upper(),
comp_parts[3].title(),
comp_parts[4])
return {
'name': name,
'team': team,
'section': section,
'comp_type': comp_type,
'comp': comp,
'comp_string': comp_string,
'token': player_id,
}
return [player_id_struct(x) for x in player_ids]
|
loum/trols-stats
|
trols_stats/interface/reporter.py
|
Python
|
gpl-2.0
| 18,397
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from django.conf import settings
from django.http import HttpResponseRedirect
from django.test.utils import override_settings
from django.utils.timezone import now
from itertools import chain
from shuup.admin import ShuupAdminAppConfig
from shuup.admin.base import AdminModule
from shuup.admin.dashboard import DashboardContentBlock, get_activity
from shuup.admin.menu import get_menu_entry_categories
from shuup.admin.module_registry import get_module_urls, get_modules, replace_modules
from shuup.admin.utils.permissions import set_permissions_for_group
from shuup.admin.views.dashboard import DashboardView
from shuup.admin.views.search import get_search_results
from shuup.testing.factories import get_default_shop, get_default_staff_user
from shuup.testing.utils import apply_request_middleware
from shuup.utils.excs import Problem
from shuup_tests.admin.fixtures.test_module import ARestrictedTestModule, ATestModule
from shuup_tests.utils import empty_iterable
from shuup_tests.utils.faux_users import AnonymousUser, AuthenticatedUser, StaffUser, SuperUser
from shuup_tests.utils.templates import get_templates_setting_for_specific_directories
TEMPLATES_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), "templates"))
def test_admin_module_base(rf, admin_user):
request = apply_request_middleware(rf.get("/"), user=admin_user)
am = AdminModule()
assert empty_iterable(am.get_urls())
assert empty_iterable(am.get_menu_entries(request))
assert empty_iterable(am.get_search_results(request, ""))
assert empty_iterable(am.get_dashboard_blocks(request))
assert empty_iterable(am.get_notifications(request))
assert empty_iterable(am.get_activity(request, now()))
def test_module_loading_and_urls():
with replace_modules([ATestModule, "shuup_tests.admin.fixtures.test_module:ATestModule"]):
assert all(u.name.startswith("test") for u in get_module_urls())
def test_modules_in_core_admin_work(rf, admin_user):
get_default_shop()
request = rf.get("/")
apply_request_middleware(request, user=admin_user)
request = apply_request_middleware(rf.get("/"), user=admin_user)
with replace_modules(ShuupAdminAppConfig.provides["admin_module"]):
assert all(get_module_urls())
assert get_menu_entry_categories(request)
def test_search(rf, admin_user):
request = apply_request_middleware(rf.get("/"), user=admin_user)
with replace_modules([ATestModule]):
assert any(sr.to_json()["text"] == "yes" for sr in get_search_results(request, "yes"))
assert any(sr.url == "/OK" for sr in get_search_results(request, "spooky")) # Test aliases
assert any(sr.target == "_blank" for sr in get_search_results(request, "yes"))
def test_notifications(rf):
request = rf.get("/")
with replace_modules([ATestModule]):
assert any(n.text == "OK" for n in chain(*(m.get_notifications(request) for m in get_modules())))
def test_dashboard_blocks(rf):
request = rf.get("/")
with replace_modules([ATestModule]):
block_ids = set()
for block in chain(*(m.get_dashboard_blocks(request) for m in get_modules())):
block_ids.add(block.id)
assert block_ids >= set(["test-0", "test-1", "test-2", "test-3", "test-4"])
@pytest.mark.django_db
def test_dashboard_blocks_permissions(rf, client):
with replace_modules([ARestrictedTestModule]):
request = rf.get("/")
request.user = get_default_staff_user(get_default_shop()) # Dashboard permission is added by default
request.session = client.session
view = DashboardView(request=request)
assert not view.get_context_data()["blocks"]
# By default there is only dashboard permission so to be
# able to see some blocks permission to some admin module
# providing dashboard bocks needed.
set_permissions_for_group(
request.user.groups.first(), set("dashboard") | set(ARestrictedTestModule().get_required_permissions())
)
view = DashboardView(request=request)
assert view.get_context_data()["blocks"]
def test_menu_entries(rf, admin_user):
request = rf.get("/")
request.user = admin_user
with replace_modules([ATestModule]):
categories = get_menu_entry_categories(request)
assert categories
test_category_menu_entries = [cat for cat in categories if cat.name == "Test"][0]
assert any(me.text == "OK" for me in test_category_menu_entries)
def test_content_block_template(rf):
TEMPLATES = get_templates_setting_for_specific_directories(settings.TEMPLATES, [TEMPLATES_DIR])
with override_settings(TEMPLATES=TEMPLATES):
request = rf.get("/")
dcb = DashboardContentBlock.by_rendering_template("foo", request, "module_template.jinja", {"name": "world"})
assert dcb.content == "Hello world"
def test_activity(rf):
with replace_modules([ATestModule]):
request = rf.get("/")
texts = [a.text for a in get_activity(request, 10)]
# Check that activity is returned in newest-first order.
assert texts == ["Latest", "Later", "Earlier", "Earliest"]
def test_url_auth(rf):
def did_disallow(view, request):
try:
return isinstance(view(request), HttpResponseRedirect)
except Problem as prob:
return True # Problems are fine here
with replace_modules([ATestModule]):
urls = dict((u.name, u) for u in get_module_urls())
request = rf.get("/")
request.user = AnonymousUser()
assert did_disallow(urls["test-auth"].callback, request)
assert did_disallow(urls["test-perm"].callback, request)
assert not did_disallow(urls["test-unauth"].callback, request)
request.user = AuthenticatedUser()
assert did_disallow(urls["test-auth"].callback, request)
assert did_disallow(urls["test-perm"].callback, request)
assert not did_disallow(urls["test-unauth"].callback, request)
request.user = StaffUser()
assert not did_disallow(urls["test-auth"].callback, request)
assert did_disallow(urls["test-perm"].callback, request)
assert not did_disallow(urls["test-unauth"].callback, request)
request.user = SuperUser() # Can access all
assert not did_disallow(urls["test-auth"].callback, request)
assert not did_disallow(urls["test-perm"].callback, request)
assert not did_disallow(urls["test-unauth"].callback, request)
|
shoopio/shoop
|
shuup_tests/admin/test_modules.py
|
Python
|
agpl-3.0
| 6,785
|
#!/usr/bin/env python
#
import ACNodeBase as ACNodeBase
import SharedSecret as SharedSecret
import TrustOnFirstContact as TrustOnFirstContact
# Protocol 0.0
# class ACNode(ACNodeBase.ACNodeBase):
# pass
# Protocol SIG/1
# class ACNode(SharedSecret.SharedSecret):
# pass
# Protocol SIG/1 and /2
class ACNode(TrustOnFirstContact.TrustOnFirstContact, SharedSecret.SharedSecret):
pass
|
dirkx/AccesSystem
|
lib-python/ACNode.py
|
Python
|
apache-2.0
| 395
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from wechatpy.client.api.base import BaseWeChatAPI
class WeChatPoi(BaseWeChatAPI):
def add(self, poi_data):
"""
创建门店
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:param poi_data: 门店信息字典
:return: 返回的 JSON 数据包
"""
return self._post('poi/addpoi', data=poi_data)
def get(self, poi_id):
"""
查询门店信息
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:param poi_id: 门店 ID
:return: 返回的 JSON 数据包
"""
return self._post('poi/getpoi', data={'poi_id': poi_id})
def list(self, begin=0, limit=20):
"""
查询门店列表
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:param begin: 开始位置,0 即为从第一条开始查询
:param limit: 返回数据条数,最大允许50,默认为20
:return: 返回的 JSON 数据包
"""
return self._post(
'poi/getpoilist',
data={
'begin': begin,
'limit': limit,
}
)
def update(self, poi_data):
"""
修改门店
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:param poi_data: 门店信息字典
:return: 返回的 JSON 数据包
"""
return self._post('poi/updatepoi', data=poi_data)
def delete(self, poi_id):
"""
删除门店
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:param poi_id: 门店 ID
:return: 返回的 JSON 数据包
"""
return self._post('poi/delpoi', data={'poi_id': poi_id})
def get_categories(self):
"""
获取微信门店类目表
详情请参考
http://mp.weixin.qq.com/wiki/16/8f182af4d8dcea02c56506306bdb2f4c.html
:return: 门店类目表
"""
res = self._get('api_getwxcategory')
return res['category_list']
|
chenjiancan/wechatpy
|
wechatpy/client/api/poi.py
|
Python
|
mit
| 2,307
|
__author__ = 'Mirko Rossini'
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Command that fails'
def add_arguments(self, parser):
parser.add_argument('argument', nargs='+', type=str)
def handle(self, *args, **options):
print("AAaaaaaaaa", args, options)
print("Command called with argument: {}".format(options['argument'][0]))
|
MirkoRossini/pybuilder_django_enhanced_plugin
|
src/integrationtest/resources/testproject/testapp/management/commands/working_command.py
|
Python
|
bsd-3-clause
| 407
|
#!/usr/bin/env python3
## get_protein_www.py --
## get a protein sequence from the Uniprot or NCBI/Refseq web sites using the accession
##
import sys
import re
import textwrap
import time
import requests
ncbi_url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?"
uniprot_url = "https://www.uniprot.org/uniprot/"
sub_range = ''
for acc in sys.argv[1:]:
if (re.search(r':',acc)):
(acc, sub_range) = acc.split(':')
if (re.match(r'^(sp|tr|iso|ref)\|',acc)):
acc=acc.split('|')[1]
if (re.match(r'[A-Z]P_\d+',acc)): # get refseq
db_type="protein"
seq_args = "db=%s&id=" % (db_type) + acc + "&rettype=fasta"
url_string = ncbi_url + seq_args
else: # get uniprot
acc_fields = acc.split('|')
if (len(acc_fields)==1):
url_string = uniprot_url + acc + ".fasta"
else:
url_string = uniprot_url + acc_fields[0] + ".fasta"
try:
req = requests.get(url_string)
except requests.exceptions.RequestException as e:
seq_html = ''
sys.stderr.print(e.response.text+'\n')
continue
else:
seq_html=req.text
if (re.search(r'Error',seq_html)):
sys.stderr.write("*** %s returned Error\n"%(acc))
continue
time.sleep(0.3)
if (not sub_range):
print(seq_html)
else:
(start, stop) = sub_range.split('-')
(start, stop) = (int(start), int(stop))
lines = seq_html.split('\n')
header=lines[0]
seq = ''.join(lines[1:])
if (start > 0):
start -= 1
new_seq = seq[start:stop]
## print the header
if (start > 0):
print("%s @C:%d" %(header, start+1))
else:
print(header)
print('\n'.join(textwrap.wrap(new_seq)))
|
uwbmrb/BMRB-API
|
server/wsgi/bmrbapi/submodules/fasta36/scripts/get_protein.py
|
Python
|
gpl-3.0
| 1,680
|
import datetime
from mock import patch, MagicMock, PropertyMock
from cloudify_rest_client import deployments, executions, blueprints
from cloudify_rest_client.exceptions import CloudifyClientError, \
MissingRequiredDeploymentInputError, UnknownDeploymentInputError
from ... import exceptions
from .mocks import MockListResponse
from .test_base import CliCommandTest
from .constants import BLUEPRINTS_DIR, SAMPLE_BLUEPRINT_PATH, \
SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH
class DeploymentUpdatesTest(CliCommandTest):
def _mock_wait_for_executions(self, value):
patcher = patch(
'cloudify_cli.execution_events_fetcher.wait_for_execution',
MagicMock(return_value=PropertyMock(error=value))
)
self.addCleanup(patcher.stop)
patcher.start()
def setUp(self):
super(DeploymentUpdatesTest, self).setUp()
self.use_manager()
self.client.deployment_updates.update = MagicMock()
self.client.executions = MagicMock()
self._mock_wait_for_executions(False)
patcher = patch('cloudify_cli.inputs.inputs_to_dict', MagicMock())
self.addCleanup(patcher.stop)
patcher.start()
def test_deployment_update_successful(self):
outcome = self.invoke(
'cfy deployments update -p {0} '
'my_deployment'.format(SAMPLE_BLUEPRINT_PATH))
self.assertIn('Updating deployment my_deployment', outcome.logs)
self.assertIn('Finished executing workflow', outcome.logs)
self.assertIn(
'Successfully updated deployment my_deployment', outcome.logs)
def test_deployment_update_failure(self):
self._mock_wait_for_executions(True)
outcome = self.invoke(
'cfy deployments update -p {0} my_deployment'
.format(SAMPLE_BLUEPRINT_PATH),
err_str_segment='',
exception=exceptions.SuppressedCloudifyCliError)
logs = outcome.logs.split('\n')
self.assertIn('Updating deployment my_deployment', logs[-3])
self.assertIn('Execution of workflow', logs[-2])
self.assertIn('failed', logs[-2])
self.assertIn(
'Failed updating deployment my_deployment', logs[-1])
def test_deployment_update_json_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --json-output'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_include_logs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --include-logs'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_skip_install_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --skip-install'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_skip_uninstall_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --skip-uninstall'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_force_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --force'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_override_workflow_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment -w override-wf'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_archive_location_parameter(self):
self.invoke(
'cfy deployments update -p {0} my_deployment'
.format(SAMPLE_ARCHIVE_PATH))
def test_dep_update_archive_loc_and_bp_path_parameters_exclusion(self):
self.invoke(
'cfy deployments update -p '
'{0} -n {1}/helloworld/'
'blueprint2.yaml my_deployment'
.format(SAMPLE_BLUEPRINT_PATH, BLUEPRINTS_DIR),
err_str_segment='param should be passed only when updating'
' from an archive'
)
def test_deployment_update_blueprint_filename_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -n my-blueprint.yaml my_deployment'
.format(SAMPLE_ARCHIVE_PATH))
def test_deployment_update_inputs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -i {1} my_deployment'
.format(SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH))
def test_deployment_update_multiple_inputs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -i {1} -i {1} my_deployment'
.format(SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH))
def test_deployment_update_no_deployment_id_parameter(self):
outcome = self.invoke(
'cfy deployments update -p '
'{0}'.format(SAMPLE_ARCHIVE_PATH),
err_str_segment='2', # Exit code
exception=SystemExit)
self.assertIn('Missing argument "deployment-id"', outcome.output)
def test_deployment_update_no_bp_path_nor_archive_loc_parameters(self):
outcome = self.invoke(
'cfy deployments update my_deployment'.format(
BLUEPRINTS_DIR),
err_str_segment='2', # Exit code
exception=SystemExit)
self.assertIn(
'Missing option "-p" / "--blueprint-path"',
outcome.output
)
class DeploymentsTest(CliCommandTest):
def setUp(self):
super(DeploymentsTest, self).setUp()
self.use_manager()
def test_deployment_create(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id'
})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke(
'cfy deployments create deployment -b a-blueprint-id')
def test_deployments_delete(self):
self.client.deployments.delete = MagicMock()
self.invoke('cfy deployments delete my-dep')
def test_deployments_execute(self):
execute_response = executions.Execution({'status': 'started'})
get_execution_response = executions.Execution({
'status': 'terminated',
'workflow_id': 'mock_wf',
'deployment_id': 'deployment-id',
'blueprint_id': 'blueprint-id',
'error': '',
'id': 'id',
'created_at': datetime.datetime.now(),
'parameters': {}
})
success_event = {
'event_type': 'workflow_succeeded',
'type': 'foo',
'timestamp': '12345678',
'message': {
'text': 'workflow execution succeeded'
},
'context': {
'deployment_id': 'deployment-id'
}
}
get_events_response = MockListResponse([success_event], 1)
self.client.executions.start = MagicMock(
return_value=execute_response)
self.client.executions.get = MagicMock(
return_value=get_execution_response)
self.client.events.list = MagicMock(return_value=get_events_response)
self.invoke('cfy executions start install -d a-deployment-id')
def test_deployments_list_all(self):
self.client.deployments.list = MagicMock(return_value=[])
self.invoke('cfy deployments list')
self.invoke('cfy deployments list -t dummy_tenant')
self.invoke('cfy deployments list -a')
def test_deployments_list_of_blueprint(self):
deps = [
{
'blueprint_id': 'b1_blueprint',
'created_at': 'now',
'updated_at': 'now',
'id': 'id',
'permission': 'creator',
'tenant_name': 'default_tenant'
},
{
'blueprint_id': 'b1_blueprint',
'created_at': 'now',
'updated_at': 'now',
'id': 'id',
'permission': 'creator',
'tenant_name': 'default_tenant'
},
{
'blueprint_id': 'b2_blueprint',
'created_at': 'now',
'updated_at': 'now',
'id': 'id',
'permission': 'creator',
'tenant_name': 'default_tenant'
}
]
self.client.deployments.list = MagicMock(return_value=deps)
outcome = self.invoke('cfy deployments list -b b1_blueprint -v')
self.assertNotIn('b2_blueprint', outcome.logs)
self.assertIn('b1_blueprint', outcome.logs)
def test_deployments_execute_nonexistent_operation(self):
# Verifying that the CLI allows for arbitrary operation names,
# while also ensuring correct error-handling of nonexistent
# operations
expected_error = "operation nonexistent-operation doesn't exist"
self.client.executions.start = MagicMock(
side_effect=CloudifyClientError(expected_error))
command = \
'cfy executions start nonexistent-operation -d a-deployment-id'
self.invoke(
command,
err_str_segment=expected_error,
exception=CloudifyClientError)
def test_deployments_outputs(self):
outputs = deployments.DeploymentOutputs({
'deployment_id': 'dep1',
'outputs': {
'port': 8080
}
})
deployment = deployments.Deployment({
'outputs': {
'port': {
'description': 'Webserver port.',
'value': '...'
}
}
})
self.client.deployments.get = MagicMock(return_value=deployment)
self.client.deployments.outputs.get = MagicMock(return_value=outputs)
self.invoke('cfy deployments outputs dep1')
def test_deployments_inputs(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id',
'inputs': {'key1': 'val1', 'key2': 'val2'}
})
expected_outputs = [
'Retrieving inputs for deployment deployment_id...',
'- "key1":',
'Value: val1',
'- "key2":',
'Value: val2',
]
self.client.deployments.get = MagicMock(return_value=deployment)
outcome = self.invoke('cfy deployments inputs deployment_id')
outcome = [o.strip() for o in outcome.logs.split('\n')]
for output in expected_outputs:
self.assertIn(output, outcome)
def test_missing_required_inputs(self):
self._test_deployment_inputs(
MissingRequiredDeploymentInputError,
'Unable to create deployment. Not all '
'required inputs have been specified...'
)
def test_invalid_input(self):
self._test_deployment_inputs(
UnknownDeploymentInputError,
'Unable to create deployment, an unknown input was specified...'
)
def _test_deployment_inputs(self, exception_type, error_msg):
def raise_error(*args, **kwargs):
raise exception_type('no inputs')
blueprint = blueprints.Blueprint({
'plan': {
'inputs': {
'input1': {'description': 'val1'},
'input2': {'description': 'val2'}
}
}
})
self.client.blueprints.get = MagicMock(return_value=blueprint)
self.client.deployments.create = raise_error
outcome = self.invoke(
'cfy deployments create deployment -b a-blueprint-id',
err_str_segment='no inputs'
)
outcome = [o.strip() for o in outcome.logs.split('\n')]
expected_outputs = [
'Deployment inputs:',
'input1:',
'description: val1',
'input2:',
'description: val2',
]
for output in expected_outputs:
self.assertIn(output, outcome)
self.assertIn(error_msg, outcome)
|
isaac-s/cloudify-cli
|
cloudify_cli/tests/commands/test_deployments.py
|
Python
|
apache-2.0
| 12,173
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.thermal_zones_and_surfaces import ShadingOverhang
log = logging.getLogger(__name__)
class TestShadingOverhang(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_shadingoverhang(self):
pyidf.validation_level = ValidationLevel.error
obj = ShadingOverhang()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_window_or_door_name = "object-list|Window or Door Name"
obj.window_or_door_name = var_window_or_door_name
# real
var_height_above_window_or_door = 3.3
obj.height_above_window_or_door = var_height_above_window_or_door
# real
var_tilt_angle_from_window_or_door = 90.0
obj.tilt_angle_from_window_or_door = var_tilt_angle_from_window_or_door
# real
var_left_extension_from_window_or_door_width = 5.5
obj.left_extension_from_window_or_door_width = var_left_extension_from_window_or_door_width
# real
var_right_extension_from_window_or_door_width = 6.6
obj.right_extension_from_window_or_door_width = var_right_extension_from_window_or_door_width
# real
var_depth = 0.0
obj.depth = var_depth
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.shadingoverhangs[0].name, var_name)
self.assertEqual(idf2.shadingoverhangs[0].window_or_door_name, var_window_or_door_name)
self.assertAlmostEqual(idf2.shadingoverhangs[0].height_above_window_or_door, var_height_above_window_or_door)
self.assertAlmostEqual(idf2.shadingoverhangs[0].tilt_angle_from_window_or_door, var_tilt_angle_from_window_or_door)
self.assertAlmostEqual(idf2.shadingoverhangs[0].left_extension_from_window_or_door_width, var_left_extension_from_window_or_door_width)
self.assertAlmostEqual(idf2.shadingoverhangs[0].right_extension_from_window_or_door_width, var_right_extension_from_window_or_door_width)
self.assertAlmostEqual(idf2.shadingoverhangs[0].depth, var_depth)
|
rbuffat/pyidf
|
tests/test_shadingoverhang.py
|
Python
|
apache-2.0
| 2,442
|
import chipDB
class IspBase():
def programChip(self, flashData):
self.curExtAddr = -1
self.chip = chipDB.getChipFromDB(self.getSignature())
if self.chip == False:
raise IspError("Chip with signature: " + str(self.getSignature()) + "not found")
self.chipErase()
print("Flashing %i bytes" % len(flashData))
self.writeFlash(flashData)
print("Verifying %i bytes" % len(flashData))
self.verifyFlash(flashData)
#low level ISP commands
def getSignature(self):
sig = []
sig.append(self.sendISP([0x30, 0x00, 0x00, 0x00])[3])
sig.append(self.sendISP([0x30, 0x00, 0x01, 0x00])[3])
sig.append(self.sendISP([0x30, 0x00, 0x02, 0x00])[3])
return sig
def chipErase(self):
self.sendISP([0xAC, 0x80, 0x00, 0x00])
class IspError():
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
fastbot3d/Octoprint
|
src/octoprint/util/avr_isp/ispBase.py
|
Python
|
agpl-3.0
| 851
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Subtitle(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id=None, src=None, srclang=None, name=None, font=None, size=None, color=None, text_shadow=None, background=None, opacity=None, italic_color=None):
"""
Subtitle - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'id': 'int',
'src': 'str',
'srclang': 'str',
'name': 'str',
'font': 'str',
'size': 'float',
'color': 'str',
'text_shadow': 'str',
'background': 'str',
'opacity': 'int',
'italic_color': 'bool'
}
self.attribute_map = {
'id': 'id',
'src': 'src',
'srclang': 'srclang',
'name': 'name',
'font': 'font',
'size': 'size',
'color': 'color',
'text_shadow': 'text_shadow',
'background': 'background',
'opacity': 'opacity',
'italic_color': 'italic_color'
}
self._id = id
self._src = src
self._srclang = srclang
self._name = name
self._font = font
self._size = size
self._color = color
self._text_shadow = text_shadow
self._background = background
self._opacity = opacity
self._italic_color = italic_color
@property
def id(self):
"""
Gets the id of this Subtitle.
:return: The id of this Subtitle.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this Subtitle.
:param id: The id of this Subtitle.
:type: int
"""
self._id = id
@property
def src(self):
"""
Gets the src of this Subtitle.
:return: The src of this Subtitle.
:rtype: str
"""
return self._src
@src.setter
def src(self, src):
"""
Sets the src of this Subtitle.
:param src: The src of this Subtitle.
:type: str
"""
self._src = src
@property
def srclang(self):
"""
Gets the srclang of this Subtitle.
:return: The srclang of this Subtitle.
:rtype: str
"""
return self._srclang
@srclang.setter
def srclang(self, srclang):
"""
Sets the srclang of this Subtitle.
:param srclang: The srclang of this Subtitle.
:type: str
"""
self._srclang = srclang
@property
def name(self):
"""
Gets the name of this Subtitle.
:return: The name of this Subtitle.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this Subtitle.
:param name: The name of this Subtitle.
:type: str
"""
self._name = name
@property
def font(self):
"""
Gets the font of this Subtitle.
:return: The font of this Subtitle.
:rtype: str
"""
return self._font
@font.setter
def font(self, font):
"""
Sets the font of this Subtitle.
:param font: The font of this Subtitle.
:type: str
"""
self._font = font
@property
def size(self):
"""
Gets the size of this Subtitle.
:return: The size of this Subtitle.
:rtype: float
"""
return self._size
@size.setter
def size(self, size):
"""
Sets the size of this Subtitle.
:param size: The size of this Subtitle.
:type: float
"""
self._size = size
@property
def color(self):
"""
Gets the color of this Subtitle.
:return: The color of this Subtitle.
:rtype: str
"""
return self._color
@color.setter
def color(self, color):
"""
Sets the color of this Subtitle.
:param color: The color of this Subtitle.
:type: str
"""
self._color = color
@property
def text_shadow(self):
"""
Gets the text_shadow of this Subtitle.
:return: The text_shadow of this Subtitle.
:rtype: str
"""
return self._text_shadow
@text_shadow.setter
def text_shadow(self, text_shadow):
"""
Sets the text_shadow of this Subtitle.
:param text_shadow: The text_shadow of this Subtitle.
:type: str
"""
self._text_shadow = text_shadow
@property
def background(self):
"""
Gets the background of this Subtitle.
:return: The background of this Subtitle.
:rtype: str
"""
return self._background
@background.setter
def background(self, background):
"""
Sets the background of this Subtitle.
:param background: The background of this Subtitle.
:type: str
"""
self._background = background
@property
def opacity(self):
"""
Gets the opacity of this Subtitle.
:return: The opacity of this Subtitle.
:rtype: int
"""
return self._opacity
@opacity.setter
def opacity(self, opacity):
"""
Sets the opacity of this Subtitle.
:param opacity: The opacity of this Subtitle.
:type: int
"""
self._opacity = opacity
@property
def italic_color(self):
"""
Gets the italic_color of this Subtitle.
:return: The italic_color of this Subtitle.
:rtype: bool
"""
return self._italic_color
@italic_color.setter
def italic_color(self, italic_color):
"""
Sets the italic_color of this Subtitle.
:param italic_color: The italic_color of this Subtitle.
:type: bool
"""
self._italic_color = italic_color
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
kinow-io/kinow-python-sdk
|
kinow_client/models/subtitle.py
|
Python
|
apache-2.0
| 8,021
|
"""Provide access to Python's configuration information.
"""
import sys
import os
from os.path import pardir, realpath
_INSTALL_SCHEMES = {
'posix_prefix': {
'stdlib': '{base}/lib/python{py_version_short}',
'platstdlib': '{platbase}/lib/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}/site-packages',
'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
'include': '{base}/include/python{py_version_short}',
'platinclude': '{platbase}/include/python{py_version_short}',
'scripts': '{base}/bin',
'data': '{base}',
},
'posix_home': {
'stdlib': '{base}/lib/python',
'platstdlib': '{base}/lib/python',
'purelib': '{base}/lib/python',
'platlib': '{base}/lib/python',
'include': '{base}/include/python',
'platinclude': '{base}/include/python',
'scripts': '{base}/bin',
'data' : '{base}',
},
'nt': {
'stdlib': '{base}/lib/python{py_version_short}',
'platstdlib': '{base}/lib/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}',
'platlib': '{base}/lib/python{py_version_short}',
'include': '{base}/include/python{py_version_short}',
'platinclude': '{base}/include/python{py_version_short}',
'scripts': '{base}/bin',
'data' : '{base}',
},
'os2': {
'stdlib': '{base}/Lib',
'platstdlib': '{base}/Lib',
'purelib': '{base}/Lib/site-packages',
'platlib': '{base}/Lib/site-packages',
'include': '{base}/Include',
'platinclude': '{base}/Include',
'scripts': '{base}/Scripts',
'data' : '{base}',
},
'os2_home': {
'stdlib': '{userbase}/lib/python{py_version_short}',
'platstdlib': '{userbase}/lib/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data' : '{userbase}',
},
'nt_user': {
'stdlib': '{userbase}/lib/python{py_version_short}',
'platstdlib': '{userbase}/lib/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}',
'platlib': '{userbase}/lib/python{py_version_short}',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data' : '{userbase}',
},
'posix_user': {
'stdlib': '{userbase}/lib/python{py_version_short}',
'platstdlib': '{userbase}/lib/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data' : '{userbase}',
},
'osx_framework_user': {
'stdlib': '{userbase}/lib/python',
'platstdlib': '{userbase}/lib/python',
'purelib': '{userbase}/lib/python/site-packages',
'platlib': '{userbase}/lib/python/site-packages',
'include': '{userbase}/include',
'scripts': '{userbase}/bin',
'data' : '{userbase}',
},
}
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
'scripts', 'data')
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
_PREFIX = os.path.normpath(sys.prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None
def _safe_realpath(path):
try:
return realpath(path)
except OSError:
return path
if sys.executable:
_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
_PROJECT_BASE = _safe_realpath(os.getcwd())
if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# set for cross builds
if "_PYTHON_PROJECT_BASE" in os.environ:
# the build directory for posix builds
_PROJECT_BASE = os.path.normpath(os.path.abspath("."))
def is_python_build():
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
return True
return False
_PYTHON_BUILD = is_python_build()
if _PYTHON_BUILD:
for scheme in ('posix_prefix', 'posix_home'):
_INSTALL_SCHEMES[scheme]['include'] = '{srcdir}/Include'
_INSTALL_SCHEMES[scheme]['platinclude'] = '{projectbase}'
# GCC(mingw) use posix build system
if os.name == "nt" and sys.version.find('GCC') >= 0:
_INSTALL_SCHEMES['nt']['include'] = '{srcdir}/Include'
_INSTALL_SCHEMES['nt']['platinclude'] = '{projectbase}'
def _subst_vars(s, local_vars):
try:
return s.format(**local_vars)
except KeyError:
try:
return s.format(**os.environ)
except KeyError, var:
raise AttributeError('{%s}' % var)
def _extend_dict(target_dict, other_dict):
target_keys = target_dict.keys()
for key, value in other_dict.items():
if key in target_keys:
continue
target_dict[key] = value
def _expand_vars(scheme, vars):
res = {}
if vars is None:
vars = {}
_extend_dict(vars, get_config_vars())
for key, value in _INSTALL_SCHEMES[scheme].items():
if os.name in ('posix', 'nt'):
value = os.path.expanduser(value)
res[key] = os.path.normpath(_subst_vars(value, vars))
return res
def _get_default_scheme():
if os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
def _getuserbase():
env_base = os.environ.get("PYTHONUSERBASE", None)
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
# what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
return env_base if env_base else joinuser(base, "Python")
if sys.platform == "darwin":
framework = get_config_var("PYTHONFRAMEWORK")
if framework:
return env_base if env_base else \
joinuser("~", "Library", framework, "%d.%d"
% (sys.version_info[:2]))
return env_base if env_base else joinuser("~", ".local")
def _parse_makefile(filename, vars=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
import re
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
if vars is None:
vars = {}
done = {}
notdone = {}
with open(filename) as f:
lines = f.readlines()
for line in lines:
if line.startswith('#') or line.strip() == '':
continue
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = v.strip()
# `$$' is a literal `$' in make
tmpv = v.replace('$$', '')
if "$" in tmpv:
notdone[n] = v
else:
try:
v = int(v)
except ValueError:
# insert literal `$'
done[n] = v.replace('$$', '$')
else:
done[n] = v
# do variable interpolation here
while notdone:
for name in notdone.keys():
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m:
n = m.group(1)
found = True
if n in done:
item = str(done[n])
elif n in notdone:
# get it on a subsequent round
found = False
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try: value = int(value)
except ValueError:
done[name] = value.strip()
else:
done[name] = value
del notdone[name]
else:
# bogus variable reference; just drop it since we can't deal
del notdone[name]
# strip spurious spaces
for k, v in done.items():
if isinstance(v, str):
done[k] = v.strip()
# save the results in the global dictionary
vars.update(done)
return vars
def _get_makefile_filename():
if _PYTHON_BUILD:
return os.path.join(_PROJECT_BASE, "Makefile")
return os.path.join(get_path('platstdlib'), "config", "Makefile")
def _generate_posix_vars():
"""Generate the Python module containing build-time variables."""
import pprint
vars = {}
# load the installed Makefile:
makefile = _get_makefile_filename()
try:
_parse_makefile(makefile, vars)
except IOError, e:
msg = "invalid Python installation: unable to open %s" % makefile
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# load the installed pyconfig.h:
config_h = get_config_h_filename()
try:
with open(config_h) as f:
parse_config_h(f, vars)
except IOError, e:
msg = "invalid Python installation: unable to open %s" % config_h
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if _PYTHON_BUILD:
vars['LDSHARED'] = vars['BLDSHARED']
# There's a chicken-and-egg situation on OS X with regards to the
# _sysconfigdata module after the changes introduced by #15298:
# get_config_vars() is called by get_platform() as part of the
# `make pybuilddir.txt` target -- which is a precursor to the
# _sysconfigdata.py module being constructed. Unfortunately,
# get_config_vars() eventually calls _init_posix(), which attempts
# to import _sysconfigdata, which we won't have built yet. In order
# for _init_posix() to work, if we're on Darwin, just mock up the
# _sysconfigdata module manually and populate it with the build vars.
# This is more than sufficient for ensuring the subsequent call to
# get_platform() succeeds.
name = '_sysconfigdata'
if 'darwin' in sys.platform:
import imp
module = imp.new_module(name)
module.build_time_vars = vars
sys.modules[name] = module
pybuilddir = 'build/lib.%s-%s' % (get_platform(), sys.version[:3])
if hasattr(sys, "gettotalrefcount"):
pybuilddir += '-pydebug'
try:
os.makedirs(pybuilddir)
except OSError:
pass
destfile = os.path.join(pybuilddir, name + '.py')
with open(destfile, 'wb') as f:
f.write('# system configuration generated and used by'
' the sysconfig module\n')
f.write('build_time_vars = ')
pprint.pprint(vars, stream=f)
# Create file used for sys.path fixup -- see Modules/getpath.c
with open('pybuilddir.txt', 'w') as f:
f.write(pybuilddir)
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# _sysconfigdata is generated at build time, see _generate_posix_vars()
from _sysconfigdata import build_time_vars
vars.update(build_time_vars)
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
vars['SO'] = '.pyd'
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
#
# public APIs
#
def parse_config_h(fp, vars=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
import re
if vars is None:
vars = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try: v = int(v)
except ValueError: pass
vars[n] = v
else:
m = undef_rx.match(line)
if m:
vars[m.group(1)] = 0
return vars
def get_config_h_filename():
"""Returns the path of pyconfig.h."""
if _PYTHON_BUILD:
# GCC(mingw): os.name is "nt" but build system is posix
if os.name == "nt" and sys.version.find('GCC') < 0:
inc_dir = os.path.join(_PROJECT_BASE, "PC")
else:
inc_dir = _PROJECT_BASE
else:
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
def get_scheme_names():
"""Returns a tuple containing the schemes names."""
schemes = _INSTALL_SCHEMES.keys()
schemes.sort()
return tuple(schemes)
def get_path_names():
"""Returns a tuple containing the paths names."""
return _SCHEME_KEYS
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
"""Returns a mapping containing an install scheme.
``scheme`` is the install scheme name. If not provided, it will
return the default scheme for the current platform.
"""
if expand:
return _expand_vars(scheme, vars)
else:
return _INSTALL_SCHEMES[scheme]
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""Returns a path corresponding to the scheme.
``scheme`` is the install scheme name.
"""
return get_paths(scheme, vars, expand)[name]
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform.
On Unix, this means every variable defined in Python's installed Makefile;
On Windows and Mac OS it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
import re
global _CONFIG_VARS
if _CONFIG_VARS is None:
_CONFIG_VARS = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# Distutils.
_CONFIG_VARS['prefix'] = _PREFIX
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
_CONFIG_VARS['py_version'] = _PY_VERSION
_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
_CONFIG_VARS['base'] = _PREFIX
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
# GCC(mingw) use posix build system
posix_build = None
if os.name == 'posix':
posix_build = True
else:
if os.name in ('nt', 'os2'):
if sys.version.find('GCC') >= 0:
posix_build = True
else:
posix_build = False
if posix_build == False:
_init_non_posix(_CONFIG_VARS)
if posix_build == True:
_init_posix(_CONFIG_VARS)
# Setting 'userbase' is done below the call to the
# init function to enable using 'get_config_var' in
# the init-function.
_CONFIG_VARS['userbase'] = _getuserbase()
if 'srcdir' not in _CONFIG_VARS:
_CONFIG_VARS['srcdir'] = _PROJECT_BASE
# Convert srcdir into an absolute path if it appears necessary.
# Normally it is relative to the build directory. However, during
# testing, for example, we might be running a non-installed python
# from a different directory.
if _PYTHON_BUILD and posix_build == True:
base = _PROJECT_BASE
try:
cwd = os.getcwd()
except OSError:
cwd = None
if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
base != cwd):
# srcdir is relative and we are not in the same directory
# as the executable. Assume executable is in the build
# directory and make srcdir absolute.
srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
# OS X platforms require special customization to handle
# multi-architecture, multi-os-version installers
if sys.platform == 'darwin':
import _osx_support
_osx_support.customize_config_vars(_CONFIG_VARS)
if args:
vals = []
for name in args:
vals.append(_CONFIG_VARS.get(name))
return vals
else:
return _CONFIG_VARS
def get_config_var(name):
"""Return the value of a single variable using the dictionary returned by
'get_config_vars()'.
Equivalent to get_config_vars().get(name)
"""
return get_config_vars().get(name)
def get_platform():
"""Return a string that identifies the current platform.
This is used mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
import re
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
# Set for cross builds explicitly
if "_PYTHON_HOST_PLATFORM" in os.environ:
return os.environ["_PYTHON_HOST_PLATFORM"]
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
osname, host, release, version, machine = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# We can't use "platform.architecture()[0]" because a
# bootstrap problem. We use a dict to get an error
# if some suspicious happens.
bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
machine += ".%s" % bitness[sys.maxint]
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
import _osx_support
osname, release, machine = _osx_support.get_platform_osx(
get_config_vars(),
osname, release, machine)
return "%s-%s-%s" % (osname, release, machine)
def get_python_version():
return _PY_VERSION_SHORT
def _print_dict(title, data):
for index, (key, value) in enumerate(sorted(data.items())):
if index == 0:
print '%s: ' % (title)
print '\t%s = "%s"' % (key, value)
def _main():
"""Display all information sysconfig detains."""
if '--generate-posix-vars' in sys.argv:
_generate_posix_vars()
return
print 'Platform: "%s"' % get_platform()
print 'Python version: "%s"' % get_python_version()
print 'Current installation scheme: "%s"' % _get_default_scheme()
print
_print_dict('Paths', get_paths())
print
_print_dict('Variables', get_config_vars())
if __name__ == '__main__':
_main()
|
qenter/vlc-android
|
toolchains/arm/lib/python2.7/sysconfig.py
|
Python
|
gpl-2.0
| 23,143
|
"""The setup for our data_structures project."""
from setuptools import setup
setup(
name="Data structures",
description="Implementations of various data structures in Python",
version=0.1,
author="Maelle Vance, Sera Smith, Ben Shields, Joey DeRosa",
author_email="maellevance@gmail.com, seras37@gmail.com",
license="MIT",
py_modules=[
'linked_list',
'stack',
'dll',
'queue',
'deque',
'priority_queue',
'binheap',
'simple_graph',
'trie_tree',
'binary_search_tree',
'hash_table',
'trie_tree',
'merge_sort',
'insertion_sort',
'quick_sort'
],
package_dir={'': 'src'},
install_requires=[],
extras_require={
"test": ["tox", "pytest", "pytest-cov"]
},
)
|
ellezv/data_structures
|
setup.py
|
Python
|
mit
| 826
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/svn/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib import TaskGen,Task,Utils
from waflib.Tools import c_preproc
from waflib.Tools.ccroot import link_task,stlink_task
def c_hook(self,node):
return self.create_compiled_task('c',node)
class c(Task.Task):
run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
vars=['CCDEPS']
ext_in=['.h']
scan=c_preproc.scan
Task.classes['cc']=cc=c
class cprogram(link_task):
run_str='${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS}'
ext_out=['.bin']
inst_to='${BINDIR}'
chmod=Utils.O755
class cshlib(cprogram):
inst_to='${LIBDIR}'
class cstlib(stlink_task):
pass
TaskGen.extension('.c')(c_hook)
|
drayside/kodkod
|
libs/.waf-1.6.6-c57dd0fa119e23d36c23d598487c6880/waflib/Tools/c.py
|
Python
|
mit
| 1,096
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plataforma_fadcanic.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
CARocha/plataforma_fadcanic
|
manage.py
|
Python
|
mit
| 262
|
from student.roles import CourseStaffRole, CourseInstructorRole, CourseCreatorRole
from student.models import CourseAccessRole
# Role name for OrgStaffRole, OrgInstructorRole has hardcoded in __init__ method ("staff" and "instructor")
LIBRARY_CREATE_ROLES = [
'staff',
'instructor',
CourseInstructorRole.ROLE,
CourseStaffRole.ROLE,
CourseCreatorRole.ROLE
]
def can_create_library(user):
qs = CourseAccessRole.objects.filter(user_id=user.id, role__in=LIBRARY_CREATE_ROLES)
is_library_creator = qs.exists()
return is_library_creator
|
miptliot/edx-platform
|
openedx/eduscaled/cms/utils.py
|
Python
|
agpl-3.0
| 566
|
# -*- coding: utf-8 -*-
"""The FVDE file entry implementation."""
from dfvfs.lib import definitions
from dfvfs.lib import errors
from dfvfs.vfs import root_only_file_entry
from dfvfs.vfs import vfs_stat
class FVDEFileEntry(root_only_file_entry.RootOnlyFileEntry):
"""Class that implements a file entry object using FVDE."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_FVDE
def _GetStat(self):
"""Retrieves the stat object.
Returns:
VFSStat: stat object.
Raises:
BackEndError: when the FVDE file is missing.
"""
fvde_volume = self._file_system.GetFVDEVolume()
if fvde_volume is None:
raise errors.BackEndError(u'Missing FVDE volume.')
stat_object = vfs_stat.VFSStat()
# File data stat information.
stat_object.size = fvde_volume.get_size()
# Date and time stat information.
# Ownership and permissions stat information.
# File entry type stat information.
stat_object.type = stat_object.TYPE_FILE
# Other stat information.
return stat_object
|
dc3-plaso/dfvfs
|
dfvfs/vfs/fvde_file_entry.py
|
Python
|
apache-2.0
| 1,039
|
"""User containers module."""
from dependency_injector import containers, providers
from . import entities, repositories
class UserContainer(containers.DeclarativeContainer):
database = providers.Dependency()
user = providers.Factory(entities.User)
user_repository = providers.Singleton(
repositories.UserRepository,
entity_factory=user.provider,
db=database,
)
|
ets-labs/python-dependency-injector
|
examples/miniapps/decoupled-packages/example/user/containers.py
|
Python
|
bsd-3-clause
| 409
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
import models
import forms
import nested_admin
# Register your models here.
class CiudadAdmin(admin.ModelAdmin):
list_display = ['nombre']
form = forms.CiudadForm
#end class
class EmpresaAdmin(admin.ModelAdmin):
list_display = ['nit','first_name','direccion','ciudad']
form = forms.EmpresaForm
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.EmpresaEditForm
# end if
return super(EmpresaAdmin, self).get_form(request, obj, *args, **kwargs)
# end def
#end class
class EmpresaInline(nested_admin.NestedStackedInline):
model = models.Empresa
form = forms.EmpresaForm
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.EmpresaEditForm
# end if
return super(EmpresaInline, self).get_form(request, obj, *args, **kwargs)
# end def
class SupervisorAdmin(admin.ModelAdmin):
list_display = ['identificacion','first_name','last_name','direccion','ciudad']
form = forms.SupervisorForm
filter_horizontal =['empresas']
#inlines = [EmpresaInline]
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.SupervisorEditForm
# end if
return super(SupervisorAdmin, self).get_form(request, obj, *args, **kwargs)
# end def
#end class
class TiendaAdmin(admin.ModelAdmin):
list_display = ['nombre', 'direccion','referencia','empresa','ciudad']
form = forms.TiendaForm
#end class
admin.site.register(models.Ciudad, CiudadAdmin)
admin.site.register(models.Empresa, EmpresaAdmin)
admin.site.register(models.Tienda, TiendaAdmin)
admin.site.register(models.Supervisor, SupervisorAdmin)
|
darkdrei/GestionRegistro
|
empresa/admin.py
|
Python
|
mit
| 1,855
|
import concurrent.futures
import contextlib
import json
import logging
import random
import threading
import time
from collections import deque
from subprocess import check_output
import pytest
import requests
import retrying
from test_helpers import expanded_config
from test_util.marathon import get_test_app, get_test_app_in_docker, get_test_app_in_ucr
log = logging.getLogger(__name__)
timeout = 500
maxthreads = 16
backend_port_st = 8000
def lb_enabled():
return expanded_config['enable_lb'] == 'true'
@retrying.retry(wait_fixed=2000,
stop_max_delay=timeout * 1000,
retry_on_result=lambda ret: ret is False,
retry_on_exception=lambda x: True)
def ensure_routable(cmd, host, port):
proxy_uri = 'http://{}:{}/run_cmd'.format(host, port)
log.info('Sending {} data: {}'.format(proxy_uri, cmd))
r = requests.post(proxy_uri, data=cmd)
log.info('Requests Response: %s', repr(r.json()))
assert r.json()['status'] == 0
return json.loads(r.json()['output'])
class VipTest:
def __init__(self, num, container, vip, vipaddr, samehost, vipnet, proxynet):
self.num = num
self.vip = vip.format(num, 7000 + num)
self.vipaddr = vipaddr.format(num, 7000 + num)
self.container = container
self.samehost = samehost
self.vipnet = vipnet
self.proxynet = proxynet
self.notes = ""
def __str__(self):
return ('VipTest(container={}, vip={},vipaddr={},samehost={},'
'vipnet={},proxynet={}, notes={})').format(self.container, self.vip, self.vipaddr, self.samehost,
self.vipnet, self.proxynet, self.notes)
def log(self, s, lvl=logging.DEBUG):
m = 'VIP_TEST {} {}'.format(s, self)
log.log(lvl, m)
def docker_vip_app(network, host, vip):
# docker app definition defines its own healthchecks
app, uuid = get_test_app_in_docker()
app['id'] = '/viptest/' + app['id']
app['container']['docker']['network'] = network
app['mem'] = 16
app['cpu'] = 0.01
if network == 'HOST':
app['cmd'] = '/opt/mesosphere/bin/dcos-shell python '\
'/opt/mesosphere/active/dcos-integration-test/util/python_test_server.py $PORT0'
del app['container']['docker']['portMappings']
if vip is not None:
app['portDefinitions'] = [{'labels': {'VIP_0': vip}}]
else:
app['cmd'] = '/opt/mesosphere/bin/dcos-shell python '\
'/opt/mesosphere/active/dcos-integration-test/util/python_test_server.py 9080'
app['container']['docker']['portMappings'] = [{
'hostPort': 0,
'containerPort': 9080,
'protocol': 'tcp',
'name': 'test',
'labels': {}
}]
if vip is not None:
app['container']['docker']['portMappings'][0]['labels'] = {'VIP_0': vip}
if network == 'USER':
app['ipAddress'] = {'networkName': 'dcos'}
app['constraints'] = [['hostname', 'CLUSTER', host]]
return app, uuid
def mesos_vip_app(num, network, host, vip, ucr=False):
app = None
uuid = None
port = backend_port_st + num
if ucr is False:
app, uuid = get_test_app()
else:
app, uuid = get_test_app_in_ucr()
app['id'] = '/viptest/' + app['id']
app['mem'] = 16
app['cpu'] = 0.01
# define a health check that works with all the network options
app['healthChecks'] = [{
'protocol': 'MESOS_HTTP',
'path': '/ping',
'gracePeriodSeconds': 5,
'intervalSeconds': 10,
'timeoutSeconds': 10,
'maxConsecutiveFailures': 3,
}]
assert network != 'BRIDGE'
if network == 'HOST':
app['cmd'] = '/opt/mesosphere/bin/dcos-shell python '\
'/opt/mesosphere/active/dcos-integration-test/util/python_test_server.py $PORT0'
app['portDefinitions'] = [{
'protocol': 'tcp',
'port': 0
}]
if vip is not None:
app['portDefinitions'][0]['labels'] = {'VIP_0': vip}
app['healthChecks'][0]['portIndex'] = 0
if network == 'USER':
app['ipAddress'] = {
'discovery': {
'ports': [{
'protocol': 'tcp',
'name': 'test',
'number': port,
}]
}
}
app['cmd'] = '/opt/mesosphere/bin/dcos-shell python '\
'/opt/mesosphere/active/dcos-integration-test/util/python_test_server.py {}'.format(port)
app['ipAddress']['networkName'] = 'dcos'
if vip is not None:
app['ipAddress']['discovery']['ports'][0]['labels'] = {'VIP_0': vip}
app['healthChecks'][0]['port'] = port
app['portDefinitions'] = []
app['constraints'] = [['hostname', 'CLUSTER', host]]
log.info('app: {}'.format(json.dumps(app)))
return app, uuid
def vip_app(num, container, network, host, vip):
if container == 'UCR':
return mesos_vip_app(num, network, host, vip, ucr=True)
if container == 'DOCKER':
return docker_vip_app(network, host, vip)
assert container == 'NONE', 'unkown container option {}'.format(container)
return mesos_vip_app(num, network, host, vip, ucr=False)
def vip_test(dcos_api_session, r):
r.log('START')
agents = list(dcos_api_session.all_slaves)
# make sure we can reproduce
random.seed(r.vip)
random.shuffle(agents)
host1 = agents[0]
host2 = agents[0]
if not r.samehost:
host2 = agents[1]
log.debug('host1 is is: {}'.format(host1))
log.debug('host2 is is: {}'.format(host2))
origin_app, app_uuid = vip_app(r.num, r.container, r.vipnet, host1, r.vip)
# allow to run on both public and private agents
origin_app['acceptedResourceRoles'] = ["*", "slave_public"]
proxy_app, _ = vip_app(r.num, r.container, r.proxynet, host2, None)
# allow to run on both public and private agents
proxy_app['acceptedResourceRoles'] = ["*", "slave_public"]
returned_uuid = None
with contextlib.ExitStack() as stack:
# try to avoid thundering herd
time.sleep(random.randint(0, 30))
stack.enter_context(dcos_api_session.marathon.deploy_and_cleanup(origin_app, timeout=timeout))
sp = stack.enter_context(dcos_api_session.marathon.deploy_and_cleanup(proxy_app, timeout=timeout))
proxy_host = sp[0].host
proxy_port = sp[0].port
if proxy_port == 0 and sp[0].ip is not None:
proxy_port = backend_port_st + r.num
proxy_host = sp[0].ip
log.info("proxy endpoints are {}".format(sp))
cmd = '/opt/mesosphere/bin/curl -s -f -m 5 http://{}/test_uuid'.format(r.vipaddr)
returned_uuid = ensure_routable(cmd, proxy_host, proxy_port)
log.debug('returned_uuid is: {}'.format(returned_uuid))
assert returned_uuid is not None
assert returned_uuid['test_uuid'] == app_uuid
r.log('PASSED')
@pytest.fixture
def reduce_logging():
start_log_level = logging.getLogger('test_util.marathon').getEffectiveLevel()
# gotta go up to warning to mute it as its currently at info
logging.getLogger('test_util.marathon').setLevel(logging.WARNING)
yield
logging.getLogger('test_util.marathon').setLevel(start_log_level)
@pytest.mark.skipif(not lb_enabled(), reason='Load Balancer disabled')
def test_vip(dcos_api_session, reduce_logging):
'''Test VIPs between the following source and destination configurations:
* containers: DOCKER, UCR and NONE
* networks: USER, BRIDGE (docker only), HOST
* agents: source and destnations on same agent or different agents
* vips: named and unnamed vip
'''
addrs = [['1.1.1.{}:{}', '1.1.1.{}:{}'],
['/namedvip{}:{}', 'namedvip{}.marathon.l4lb.thisdcos.directory:{}']]
# tests
# UCR doesn't support BRIDGE mode
permutations = [[c, vi, va, sh, vn, pn]
for c in ['NONE', 'UCR', 'DOCKER']
for [vi, va] in addrs
for sh in [True, False]
for vn in ['USER', 'BRIDGE', 'HOST']
for pn in ['USER', 'BRIDGE', 'HOST']]
tests = [VipTest(i, c, vi, va, sh, vn, pn) for i, [c, vi, va, sh, vn, pn] in enumerate(permutations)]
executor = concurrent.futures.ThreadPoolExecutor(max_workers=maxthreads)
# deque is thread safe
failed_tests = deque(tests)
passed_tests = deque()
skipped_tests = deque()
# skip certain tests
for r in tests:
if r.container == 'UCR' or r.container == 'NONE':
if r.vipnet == 'BRIDGE' or r.proxynet == 'BRIDGE':
r.notes = "bridge networks are not supported by mesos runtime"
failed_tests.remove(r)
skipped_tests.append(r)
continue
if not r.samehost and len(dcos_api_session.all_slaves) == 1:
r.notes = "needs more then 1 agent to run"
failed_tests.remove(r)
skipped_tests.append(r)
def run(test):
vip_test(dcos_api_session, test)
failed_tests.remove(test)
passed_tests.append(test)
tasks = [executor.submit(run, t) for t in failed_tests]
for t in concurrent.futures.as_completed(tasks):
try:
t.result()
except Exception as exc:
# just log the exception, each failed test is recored in the `failed_tests` array
log.info('vip_test generated an exception: {}'.format(exc))
[r.log('PASSED', lvl=logging.INFO) for r in passed_tests]
[r.log('SKIPPED', lvl=logging.INFO) for r in skipped_tests]
[r.log('FAILED', lvl=logging.INFO) for r in failed_tests]
log.info('VIP_TEST num agents: {}'.format(len(dcos_api_session.all_slaves)))
assert len(failed_tests) == 0
@retrying.retry(wait_fixed=2000,
stop_max_delay=timeout * 1000,
retry_on_exception=lambda x: True)
def test_if_overlay_ok(dcos_api_session):
def _check_overlay(hostname, port):
overlays = dcos_api_session.get('overlay-agent/overlay', host=hostname, port=port).json()['overlays']
assert len(overlays) > 0
for overlay in overlays:
assert overlay['state']['status'] == 'STATUS_OK'
for master in dcos_api_session.masters:
_check_overlay(master, 5050)
for slave in dcos_api_session.all_slaves:
_check_overlay(slave, 5051)
@pytest.mark.skipif(lb_enabled(), reason='Load Balancer enabled')
def test_if_minuteman_disabled(dcos_api_session):
'''Test to make sure minuteman is disabled'''
data = check_output(['/usr/bin/env', 'ip', 'rule'])
# Minuteman creates this ip rule: `9999: from 9.0.0.0/8 lookup 42`
# We check it doesn't exist
assert str(data).find('9999') == -1
def test_ip_per_container(dcos_api_session):
'''Test if we are able to connect to a task with ip-per-container mode
'''
# Launch the test_server in ip-per-container mode
app_definition, test_uuid = get_test_app_in_docker(ip_per_container=True)
assert len(dcos_api_session.slaves) >= 2, 'IP Per Container tests require 2 private agents to work'
app_definition['instances'] = 2
app_definition['constraints'] = [['hostname', 'UNIQUE']]
with dcos_api_session.marathon.deploy_and_cleanup(app_definition, check_health=True) as service_points:
app_port = app_definition['container']['docker']['portMappings'][0]['containerPort']
cmd = '/opt/mesosphere/bin/curl -s -f -m 5 http://{}:{}/ping'.format(service_points[1].ip, app_port)
ensure_routable(cmd, service_points[0].host, service_points[0].port)
@retrying.retry(wait_fixed=2000,
stop_max_delay=100 * 2000,
retry_on_exception=lambda x: True)
def geturl(url):
rs = requests.get(url)
assert rs.status_code == 200
r = rs.json()
log.info('geturl {} -> {}'.format(url, r))
return r
@pytest.mark.skipif(not lb_enabled(), reason='Load Balancer disabled')
def test_l4lb(dcos_api_session):
'''Test l4lb is load balancing between all the backends
* create 5 apps using the same VIP
* get uuid from the VIP in parallel from many threads
* verify that 5 uuids have been returned
* only testing if all 5 are hit at least once
'''
numapps = 5
numthreads = numapps * 4
apps = []
rvs = deque()
with contextlib.ExitStack() as stack:
for _ in range(numapps):
origin_app, origin_uuid = get_test_app()
# same vip for all the apps
origin_app['portDefinitions'][0]['labels'] = {'VIP_0': '/l4lbtest:5000'}
apps.append(origin_app)
sp = stack.enter_context(dcos_api_session.marathon.deploy_and_cleanup(origin_app))
# make sure that the service point responds
geturl('http://{}:{}/ping'.format(sp[0].host, sp[0].port))
# make sure that the VIP is responding too
geturl('http://l4lbtest.marathon.l4lb.thisdcos.directory:5000/ping')
# do many requests in parallel.
def thread_request():
# deque is thread safe
rvs.append(geturl('http://l4lbtest.marathon.l4lb.thisdcos.directory:5000/test_uuid'))
threads = [threading.Thread(target=thread_request) for i in range(0, numthreads)]
for t in threads:
t.start()
for t in threads:
t.join()
expected_uuids = [a['id'].split('-')[2] for a in apps]
received_uuids = [r['test_uuid'] for r in rvs if r is not None]
assert len(set(expected_uuids)) == numapps
assert len(set(received_uuids)) == numapps
assert set(expected_uuids) == set(received_uuids)
|
darkonie/dcos
|
packages/dcos-integration-test/extra/test_networking.py
|
Python
|
apache-2.0
| 13,788
|
#!/usr/bin/env python3
import os
import glob
from shutil import copytree, copy2, move
from xml.etree import ElementTree
from pathlib import Path
from scripts.build_env import BuildEnv, Platform
from scripts.platform_builder import PlatformBuilder
class zeromqWindowsBuilder(PlatformBuilder):
def __init__(self,
config_package: dict=None,
config_platform: dict=None):
super().__init__(config_package, config_platform)
def pre(self):
super().pre()
subpkg_url = 'https://github.com/zeromq/cppzmq/archive/v4.3.0.tar.gz'
subpkg_name = 'cppzmq'
subpkg_archive = 'cppzmq-4.3.0.tar.gz'
self.tag_log("[CPPZMQ] Preparing sub package")
self.env.download_file(subpkg_url, subpkg_archive)
self.env.extract_tarball(subpkg_archive, subpkg_name)
def build(self):
super().build()
self.build_libzmq()
def post(self):
super().post()
if self.env.BUILD_TYPE == 'Debug':
self.tag_log("Renaming built libraries ..")
if os.path.exists(Path(f'{self.env.install_lib_path}\\libzmq-v141-mt-sgd-4_3_1.lib')):
move(f'{self.env.install_lib_path}\\libzmq-v141-mt-sgd-4_3_1.lib',
f'{self.env.install_lib_path}\\libzmq.lib')
def build_libzmq(self):
# Build zeromq
# build_path = Path('{}/{}/builds/msvc/vs2015'.format(
build_path = Path('{}/{}/build'.format(
self.env.source_path,
self.config['name']
))
_check = self.env.install_lib_path / self.config.get("checker")
if os.path.exists(_check):
self.tag_log("Already built.")
return
self.tag_log("Start building ..")
self.env.mkdir_p(build_path)
os.chdir(build_path)
# CMake build
cmd = '''cmake .. -A x64 \
-D POLLER="" \
-D WITH_LIBSODIUM=OFF \
-DWITH_PERF_TOOL=OFF \
-DZMQ_BUILD_TESTS=OFF \
-DCMAKE_INSTALL_PREFIX={} \
'''.format(
self.env.install_path
)
self.log('\n '.join(f' [CMD]:: {cmd}'.split()))
self.env.run_command(cmd, module_name='cppzmq')
BuildEnv.patch_static_MSVC(Path(f'{build_path}/libzmq-static.vcxproj'), self.env.BUILD_TYPE)
cmd = '''msbuild ZeroMQ.sln \
/maxcpucount:{} \
/t:libzmq-static \
/p:Option-sodium=false \
/p:PlatformToolSet={} \
/p:Configuration={} \
/p:Platform=x64 \
/p:OutDir={}\\ \
'''.format(self.env.NJOBS,
self.env.compiler_version, self.env.BUILD_TYPE,
self.env.install_lib_path)
# TODO: Apply CMake installation
# cmd = '''cmake --build . \
# -j {} \
# --config {} \
# --target install \
# '''.format(
# self.env.NJOBS,
# self.env.BUILD_TYPE
# )
self.log('\n '.join(f' [CMD]:: {cmd}'.split()))
self.env.run_command(cmd, module_name=self.config['name'])
# Rename to 'libzmq.lib'
os.chdir(self.env.install_lib_path)
for proj in glob.glob(r'libzmq*.lib'):
self.tag_log(f' Patching [{proj}]')
os.renames(proj, 'libzmq.lib')
def patch_libzmq_prop(self, path):
msvc_ns_prefix = "{http://schemas.microsoft.com/developer/msbuild/2003}"
ElementTree.register_namespace('', "http://schemas.microsoft.com/developer/msbuild/2003")
tree = ElementTree.parse(path)
root = tree.getroot()
list = root.findall(msvc_ns_prefix+"PropertyGroup")
for child in list:
try:
item = child.find(msvc_ns_prefix+"Linkage-libsodium")
item.text = ""
except:
pass
self.tag_log("Patched")
tree.write(path, encoding="utf-8", xml_declaration=True)
|
lovewinds/story-project
|
external/scripts/packages/zeromq/windows.py
|
Python
|
gpl-2.0
| 4,234
|
#!/usr/bin/python3
## system-config-printer
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2014, 2015 Red Hat, Inc.
## Copyright (C) 2006 Florian Festi <ffesti@redhat.com>
## Copyright (C) 2006, 2007, 2008, 2009 Tim Waugh <twaugh@redhat.com>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import cups
from .cupshelpers import parseDeviceID
from . import xmldriverprefs
import itertools
import string
import time
import locale
import os.path
import functools
import re
from . import _debugprint, set_debugprint_fn
from functools import reduce
__all__ = ['ppdMakeModelSplit',
'PPDs']
_MFR_BY_RANGE = [
# Fill in missing manufacturer names based on model name
("HP", re.compile("deskjet"
"|dj[ 0-9]?"
"|laserjet"
"|lj"
"|color laserjet"
"|color lj"
"|designjet"
"|officejet"
"|oj"
"|photosmart"
"|ps "
"|psc"
"|edgeline")),
("Epson", re.compile("stylus|aculaser")),
("Apple", re.compile("stylewriter"
"|imagewriter"
"|deskwriter"
"|laserwriter")),
("Canon", re.compile("pixus"
"|pixma"
"|selphy"
"|imagerunner"
"|bj"
"|lbp")),
("Brother", re.compile("hl|dcp|mfc")),
("Xerox", re.compile("docuprint"
"|docupage"
"|phaser"
"|workcentre"
"|homecentre")),
("Lexmark", re.compile("optra|(:color )?jetprinter")),
("KONICA MINOLTA", re.compile("magicolor"
"|pageworks"
"|pagepro")),
("Kyocera", re.compile("fs-"
"|km-"
"|taskalfa")),
("Ricoh", re.compile("aficio")),
("Oce", re.compile("varioprint")),
("Oki", re.compile("okipage|microline"))
]
_MFR_NAMES_BY_LOWER = {}
for mfr, regexp in _MFR_BY_RANGE:
_MFR_NAMES_BY_LOWER[mfr.lower ()] = mfr
_HP_MODEL_BY_NAME = {
"dj": "DeskJet",
"lj": "LaserJet",
"oj": "OfficeJet",
"color lj": "Color LaserJet",
"ps ": "PhotoSmart",
"hp ": ""
}
_RE_turboprint = re.compile ("turboprint")
_RE_version_numbers = re.compile (r" v(?:er\.)?\d(?:\d*\.\d+)?(?: |$)")
_RE_ignore_suffix = re.compile (","
"| hpijs"
"| foomatic/"
"| - "
"| w/"
"| \\("
"| postscript"
"| ps"
"| pdf"
"| pxl"
"| zjs" # hpcups
"| zxs" # hpcups
"| pcl3" # hpcups
"| printer" # hpcups
"|_bt"
"| pcl" # Canon CQue
"| ufr ii" # Canon UFR II
"| br-script" # Brother PPDs
)
_RE_ignore_series = re.compile (" series| all-in-one", re.I)
def ppdMakeModelSplit (ppd_make_and_model):
"""
Split a ppd-make-and-model string into a canonical make and model pair.
@type ppd_make_and_model: string
@param ppd_make_and_model: IPP ppd-make-and-model attribute
@return: a string pair representing the make and the model
"""
# If the string starts with a known model name (like "LaserJet") assume
# that the manufacturer name is missing and add the manufacturer name
# corresponding to the model name
ppd_make_and_model.strip ()
make = None
cleanup_make = False
l = ppd_make_and_model.lower ()
for mfr, regexp in _MFR_BY_RANGE:
if regexp.match (l):
make = mfr
model = ppd_make_and_model
break
# Handle PPDs provided by Turboprint
if make is None and _RE_turboprint.search (l):
t = ppd_make_and_model.find (" TurboPrint")
if t != -1:
t2 = ppd_make_and_model.rfind (" TurboPrint")
if t != t2:
ppd_make_and_model = ppd_make_and_model[t + 12:t2]
else:
ppd_make_and_model = ppd_make_and_model[:t]
try:
make, model = ppd_make_and_model.split("_", 1)
except:
make = ppd_make_and_model
model = ''
make = re.sub (r"(?<=[a-z])(?=[0-9])", " ", make)
make = re.sub (r"(?<=[a-z])(?=[A-Z])", " ", make)
model = re.sub (r"(?<=[a-z])(?=[0-9])", " ", model)
model = re.sub (r"(?<=[a-z])(?=[A-Z])", " ", model)
model = re.sub (r" Jet", "Jet", model)
model = re.sub (r"Photo Smart", "PhotoSmart", model)
cleanup_make = True
# Special handling for two-word manufacturers
elif l.startswith ("konica minolta "):
make = "KONICA MINOLTA"
model = ppd_make_and_model[15:]
elif l.startswith ("lexmark international "):
make = "Lexmark"
model = ppd_make_and_model[22:]
elif l.startswith ("kyocera mita "):
make = "Kyocera"
model = ppd_make_and_model[13:]
elif l.startswith ("kyocera "):
make = "Kyocera"
model = ppd_make_and_model[8:]
elif l.startswith ("fuji xerox "):
make = "Fuji Xerox"
model = ppd_make_and_model[11:]
# Finally, take the first word as the name of the manufacturer.
else:
cleanup_make = True
try:
make, model = ppd_make_and_model.split(" ", 1)
except:
make = ppd_make_and_model
model = ''
# Standardised names for manufacturers.
makel = make.lower ()
if cleanup_make:
if (makel.startswith ("hewlett") and
makel.endswith ("packard")):
make = "HP"
makel = "hp"
elif (makel.startswith ("konica") and
makel.endswith ("minolta")):
make = "KONICA MINOLTA"
makel = "konica minolta"
else:
# Fix case errors.
mfr = _MFR_NAMES_BY_LOWER.get (makel)
if mfr:
make = mfr
# HP and Canon PostScript PPDs give NickNames like:
# *NickName: "HP LaserJet 4 Plus v2013.111 Postscript (recommended)"
# *NickName: "Canon MG4100 series Ver.3.90"
# Find the version number and truncate at that point. But beware,
# other model names can legitimately look like version numbers,
# e.g. Epson PX V500.
# Truncate only if the version number has only one digit, or a dot
# with digits before and after.
modell = model.lower ()
v = modell.find (" v")
if v != -1:
# Look for " v" or " ver." followed by a digit, optionally
# followed by more digits, a dot, and more digits; and
# terminated by a space of the end of the line.
vmatch = _RE_version_numbers.search (modell)
if vmatch:
# Found it -- truncate at that point.
vstart = vmatch.start ()
modell = modell[:vstart]
model = model[:vstart]
suffix = _RE_ignore_suffix.search (modell)
if suffix:
suffixstart = suffix.start ()
modell = modell[:suffixstart]
model = model[:suffixstart]
# Remove the word "Series" if present. Some models are referred
# to as e.g. HP OfficeJet Series 300 (from hpcups, and in the
# Device IDs of such models), and other groups of models are
# referred to in drivers as e.g. Epson Stylus Color Series (CUPS).
(model, n) = _RE_ignore_series.subn ("", model, count=1)
if n:
modell = model.lower ()
if makel == "hp":
for name, fullname in _HP_MODEL_BY_NAME.items ():
if modell.startswith (name):
model = fullname + model[len (name):]
modell = model.lower ()
break
model = model.strip ()
return (make, model)
def normalize (strin):
"""
This function normalizes manufacturer and model names for comparing.
The string is turned to lower case and leading and trailing white
space is removed. After that each sequence of non-alphanumeric
characters (including white space) is replaced by a single space and
also at each change between letters and numbers a single space is added.
This makes the comparison only done by alphanumeric characters and the
words formed from them. So mostly two strings which sound the same when
you pronounce them are considered equal. Printer manufacturers do not
market two models whose names sound the same but differ only by
upper/lower case, spaces, dashes, ..., but in printer drivers names can
be easily supplied with these details of the name written in the wrong
way, especially if the IEEE-1284 device ID of the printer is not known.
This way we get a very reliable matching of printer model names.
Examples:
- Epson PM-A820 -> epson pm a 820
- Epson PM A820 -> epson pm a 820
- HP PhotoSmart C 8100 -> hp photosmart c 8100
- hp Photosmart C8100 -> hp photosmart c 8100
@type strin: string that can be the make or the model
@return: a normalized lowercase string in which punctuations have been replaced with spaces.
"""
lstrin = strin.strip ().lower ()
normalized = ""
BLANK=0
ALPHA=1
DIGIT=2
lastchar = BLANK
alnumfound = False
for i in range (len (lstrin)):
if lstrin[i].isalpha ():
if lastchar != ALPHA and alnumfound:
normalized += " ";
lastchar = ALPHA
elif lstrin[i].isdigit ():
if lastchar != DIGIT and alnumfound:
normalized += " ";
lastchar = DIGIT
else:
lastchar = BLANK
if lstrin[i].isalnum ():
normalized += lstrin[i]
alnumfound = True
return normalized
def _singleton (x):
"""If we don't know whether getPPDs() or getPPDs2() was used, this
function can unwrap an item from a list in either case."""
if isinstance (x, list):
return x[0]
return x
class PPDs:
"""
This class is for handling the list of PPDs returned by CUPS. It
indexes by PPD name and device ID, filters by natural language so
that foreign-language PPDs are not included, and sorts by driver
type. If an exactly-matching PPD is not available, it can
substitute with a PPD for a similar model or for a generic driver.
"""
# Status of match.
STATUS_SUCCESS = 0
STATUS_MODEL_MISMATCH = 1
STATUS_GENERIC_DRIVER = 2
STATUS_NO_DRIVER = 3
FIT_EXACT_CMD = xmldriverprefs.DriverType.FIT_EXACT_CMD
FIT_EXACT = xmldriverprefs.DriverType.FIT_EXACT
FIT_CLOSE = xmldriverprefs.DriverType.FIT_CLOSE
FIT_GENERIC = xmldriverprefs.DriverType.FIT_GENERIC
FIT_NONE = xmldriverprefs.DriverType.FIT_NONE
_fit_to_status = { FIT_EXACT_CMD: STATUS_SUCCESS,
FIT_EXACT: STATUS_SUCCESS,
FIT_CLOSE: STATUS_MODEL_MISMATCH,
FIT_GENERIC: STATUS_GENERIC_DRIVER,
FIT_NONE: STATUS_NO_DRIVER }
def __init__ (self, ppds, language=None, xml_dir=None):
"""
@type ppds: dict
@param ppds: dict of PPDs as returned by cups.Connection.getPPDs()
or cups.Connection.getPPDs2()
@type language: string
@param language: language name, as given by the first element
of the pair returned by locale.getlocale()
"""
self.ppds = ppds.copy ()
self.makes = None
self.ids = None
self.drivertypes = xmldriverprefs.DriverTypes ()
self.preforder = xmldriverprefs.PreferenceOrder ()
if xml_dir is None:
xml_dir = os.environ.get ("CUPSHELPERS_XMLDIR")
if xml_dir is None:
from . import config
xml_dir = os.path.join (config.sysconfdir, "cupshelpers")
try:
xmlfile = os.path.join (xml_dir, "preferreddrivers.xml")
(drivertypes, preferenceorder) = \
xmldriverprefs.PreferredDrivers (xmlfile)
self.drivertypes.load (drivertypes)
self.preforder.load (preferenceorder)
except Exception as e:
print("Error loading %s: %s" % (xmlfile, e))
self.drivertypes = None
self.preforder = None
if (language is None or
language == "C" or
language == "POSIX"):
language = "en_US"
u = language.find ("_")
if u != -1:
short_language = language[:u]
else:
short_language = language
to_remove = []
for ppdname, ppddict in self.ppds.items ():
try:
natural_language = _singleton (ppddict['ppd-natural-language'])
except KeyError:
continue
if natural_language == "en":
# Some manufacturer's PPDs are only available in this
# language, so always let them though.
continue
if natural_language == language:
continue
if natural_language == short_language:
continue
to_remove.append (ppdname)
for ppdname in to_remove:
del self.ppds[ppdname]
# CUPS sets the 'raw' model's ppd-make-and-model to 'Raw Queue'
# which unfortunately then appears as manufacturer Raw and
# model Queue. Use 'Generic' for this model.
if 'raw' in self.ppds:
makemodel = _singleton (self.ppds['raw']['ppd-make-and-model'])
if not makemodel.startswith ("Generic "):
self.ppds['raw']['ppd-make-and-model'] = "Generic " + makemodel
def getMakes (self):
"""
@returns: a list of strings representing makes, sorted according
to the current locale
"""
self._init_makes ()
makes_list = list(self.makes.keys ())
makes_list.sort (key=locale.strxfrm)
try:
# "Generic" should be listed first.
makes_list.remove ("Generic")
makes_list.insert (0, "Generic")
except ValueError:
pass
return makes_list
def getModels (self, make):
"""
@returns: a list of strings representing models, sorted using
cups.modelSort()
"""
self._init_makes ()
try:
models_list = list(self.makes[make].keys ())
except KeyError:
return []
def compare_models (a,b):
first = normalize (a)
second = normalize (b)
return cups.modelSort(first, second)
models_list.sort(key=functools.cmp_to_key(compare_models))
return models_list
def getInfoFromModel (self, make, model):
"""
Obtain a list of PPDs that are suitable for use with a
particular printer model, given its make and model name.
@returns: a dict, indexed by ppd-name, of dicts representing
PPDs (as given by cups.Connection.getPPDs)
"""
self._init_makes ()
try:
return self.makes[make][model]
except KeyError:
return {}
def getInfoFromPPDName (self, ppdname):
"""
@returns: a dict representing a PPD, as given by
cups.Connection.getPPDs
"""
return self.ppds[ppdname]
def getStatusFromFit (self, fit):
return self._fit_to_status.get (fit, xmldriverprefs.DriverType.FIT_NONE)
def orderPPDNamesByPreference (self, ppdnamelist=None,
downloadedfiles=None,
make_and_model=None,
devid=None, fit=None):
"""
Sort a list of PPD names by preferred driver type.
@param ppdnamelist: PPD names
@type ppdnamelist: string list
@param downloadedfiles: Filenames from packages downloaded
@type downloadedfiles: string list
@param make_and_model: device-make-and-model name
@type make_and_model: string
@param devid: Device ID dict
@type devid: dict indexed by Device ID field name, of strings;
except for CMD field which must be a string list
@param fit: Driver fit string for each PPD name
@type fit: dict of PPD name:fit
@returns: string list
"""
if ppdnamelist is None:
ppdnamelist = []
if downloadedfiles is None:
downloadedfiles = []
if fit is None:
fit = {}
if self.drivertypes and self.preforder:
ppds = {}
for ppdname in ppdnamelist:
ppds[ppdname] = self.ppds[ppdname]
orderedtypes = self.preforder.get_ordered_types (self.drivertypes,
make_and_model,
devid)
_debugprint("Valid driver types for this printer in priority order: %s" % repr(orderedtypes))
orderedppds = self.drivertypes.get_ordered_ppdnames (orderedtypes,
ppds, fit)
_debugprint("PPDs with assigned driver types in priority order: %s" % repr(orderedppds))
ppdnamelist = [typ_name[1] for typ_name in orderedppds]
_debugprint("Resulting PPD list in priority order: %s" % repr(ppdnamelist))
# Special handling for files we've downloaded. First collect
# their basenames.
downloadedfnames = set()
for downloadedfile in downloadedfiles:
(path, slash, fname) = downloadedfile.rpartition ("/")
downloadedfnames.add (fname)
if downloadedfnames:
# Next compare the basenames of each ppdname
downloadedppdnames = []
for ppdname in ppdnamelist:
(path, slash, ppdfname) = ppdname.rpartition ("/")
if ppdfname in downloadedfnames:
downloadedppdnames.append (ppdname)
# Finally, promote the matching ones to the head of the list.
if downloadedppdnames:
for ppdname in ppdnamelist:
if ppdname not in downloadedppdnames:
downloadedppdnames.append (ppdname)
ppdnamelist = downloadedppdnames
return ppdnamelist
def getPPDNamesFromDeviceID (self, mfg, mdl, description="",
commandsets=None, uri=None,
make_and_model=None):
"""
Obtain a best-effort PPD match for an IEEE 1284 Device ID.
@param mfg: MFG or MANUFACTURER field
@type mfg: string
@param mdl: MDL or MODEL field
@type mdl: string
@param description: DES or DESCRIPTION field, optional
@type description: string
@param commandsets: CMD or COMMANDSET field, optional
@type commandsets: string
@param uri: device URI, optional (only needed for debugging)
@type uri: string
@param make_and_model: device-make-and-model string
@type make_and_model: string
@returns: a dict of fit (string) indexed by PPD name
"""
_debugprint ("\n%s %s" % (mfg, mdl))
orig_mfg = mfg
orig_mdl = mdl
self._init_ids ()
if commandsets is None:
commandsets = []
# Start with an empty result list and build it up using
# several search methods, in increasing order of fuzziness.
fit = {}
# First, try looking up the device using the manufacturer and
# model fields from the Device ID exactly as they appear (but
# case-insensitively).
mfgl = mfg.lower ()
mdll = mdl.lower ()
id_matched = False
try:
for each in self.ids[mfgl][mdll]:
fit[each] = self.FIT_EXACT
id_matched = True
except KeyError:
pass
# The HP PPDs say "HP" not "Hewlett-Packard", so try that.
if mfgl == "hewlett-packard":
try:
for each in self.ids["hp"][mdll]:
fit[each] = self.FIT_EXACT
_debugprint ("**** Incorrect IEEE 1284 Device ID: %s" %
self.ids["hp"][mdll])
_debugprint ("**** Actual ID is MFG:%s;MDL:%s;" % (mfg, mdl))
_debugprint ("**** Please report a bug against the HPLIP component")
id_matched = True
except KeyError:
pass
# Now try looking up the device by ppd-make-and-model.
_debugprint ("Trying make/model names")
mdls = None
self._init_makes ()
make = None
if mfgl == "":
(mfg, mdl) = ppdMakeModelSplit (mdl)
mfgl = normalize (mfg)
mdll = normalize (mdl)
_debugprint ("mfgl: %s" % mfgl)
_debugprint ("mdll: %s" % mdll)
mfgrepl = {"hewlett-packard": "hp",
"lexmark international": "lexmark",
"kyocera": "kyocera mita"}
if mfgl in self.lmakes:
# Found manufacturer.
make = self.lmakes[mfgl]
elif mfgl in mfgrepl:
rmfg = mfgrepl[mfgl]
if rmfg in self.lmakes:
mfg = rmfg
mfgl = mfg
# Found manufacturer (after mapping to canonical name)
_debugprint ("remapped mfgl: %s" % mfgl)
make = self.lmakes[mfgl]
_debugprint ("make: %s" % make)
if make is not None:
mdls = self.makes[make]
mdlsl = self.lmodels[normalize(make)]
# Remove manufacturer name from model field
for prefix in [mfgl, 'hewlett-packard', 'hp']:
if mdll.startswith (prefix + ' '):
mdl = mdl[len (prefix) + 1:]
mdll = normalize (mdl)
_debugprint ("unprefixed mdll: %s" % mdll)
if mdll in self.lmodels[mfgl]:
model = mdlsl[mdll]
for each in mdls[model].keys ():
fit[each] = self.FIT_EXACT
_debugprint ("%s: %s" % (fit[each], each))
else:
# Make use of the model name clean-up in the
# ppdMakeModelSplit () function
(mfg2, mdl2) = ppdMakeModelSplit (mfg + " " + mdl)
mdl2l = normalize (mdl2)
_debugprint ("re-split mdll: %s" % mdl2l)
if mdl2l in self.lmodels[mfgl]:
model = mdlsl[mdl2l]
for each in list(mdls[model].keys ()):
fit[each] = self.FIT_EXACT
_debugprint ("%s: %s" % (fit[each], each))
if not fit and mdls:
(s, ppds) = self._findBestMatchPPDs (mdls, mdl)
if s != self.FIT_NONE:
for each in ppds:
fit[each] = s
_debugprint ("%s: %s" % (fit[each], each))
if commandsets:
if type (commandsets) != list:
commandsets = commandsets.split (',')
_debugprint ("Checking CMD field")
generic = self._getPPDNameFromCommandSet (commandsets)
if generic:
for driver in generic:
fit[driver] = self.FIT_GENERIC
_debugprint ("%s: %s" % (fit[driver], driver))
# Check by the URI whether our printer is connected via IPP
# and if not, remove the PPD entries for driverless printing
# (ppdname = "driverless:..." from the list)
if (not uri or
(not uri.startswith("ipp:") and
not uri.startswith("ipps:") and
(not uri.startswith("dnssd") or
not "._ipp" in uri))):
failed = set()
for ppdname in fit.keys ():
if (ppdname.startswith("driverless:")):
failed.add (ppdname)
if (len(failed) > 0):
_debugprint ("Removed %s due to non-IPP connection" % failed)
for each in failed:
del fit[each]
failed = set()
# What about the CMD field of the Device ID? Some devices
# have optional units for page description languages, such as
# PostScript, and they will report different CMD strings
# accordingly.
#
# By convention, if a PPD contains a Device ID with a CMD
# field, that PPD can only be used whenever any of the
# comma-separated words in the CMD field appear in the
# device's ID.
# (See Red Hat bug #630058).
#
# We'll do that check now, and any PPDs that fail
# (e.g. PostScript PPD for non-PostScript printer) can be
# eliminated from the list.
#
# The reason we don't do this check any earlier is that we
# don't want to eliminate PPDs only to have the fuzzy matcher
# add them back in afterwards.
#
# While doing this, any drivers that we can positively confirm
# as using a command set understood by the printer will be
# converted from FIT_EXACT to FIT_EXACT_CMD.
if id_matched and len (commandsets) > 0:
failed = set()
exact_cmd = set()
for ppdname in fit.keys ():
ppd_cmd_field = None
ppd = self.ppds[ppdname]
ppd_device_id = _singleton (ppd.get ('ppd-device-id'))
if ppd_device_id:
ppd_device_id_dict = parseDeviceID (ppd_device_id)
ppd_cmd_field = ppd_device_id_dict["CMD"]
if (not ppd_cmd_field and
# ppd-type is not reliable for driver-generated
# PPDs (see CUPS STR #3720). Neither gutenprint
# nor foomatic specify ppd-type in their CUPS
# drivers.
ppdname.find (":") == -1):
# If this is a PostScript PPD we know which
# command set it will use.
ppd_type = _singleton (ppd.get ('ppd-type'))
if ppd_type == "postscript":
ppd_cmd_field = ["POSTSCRIPT"]
if not ppd_cmd_field:
# We can't be sure which command set this driver
# uses.
continue
usable = False
for pdl in ppd_cmd_field:
if pdl in commandsets:
usable = True
break
if usable:
exact_cmd.add (ppdname)
else:
failed.add (ppdname)
# Assign the more specific fit "exact-cmd" to those that
# positively matched the CMD field.
for each in exact_cmd:
if fit[each] == self.FIT_EXACT:
fit[each] = self.FIT_EXACT_CMD
_debugprint (self.FIT_EXACT_CMD + ": %s" % each)
if len (failed) < len ([d for (d, m) in fit.items ()
if m != 'generic']):
_debugprint ("Removed %s due to CMD mis-match" % failed)
for each in failed:
del fit[each]
else:
_debugprint ("Not removing %s " % failed +
"due to CMD mis-match as it would "
"leave nothing good")
if not fit:
fallbacks = ["textonly.ppd", "postscript.ppd"]
found = False
for fallback in fallbacks:
_debugprint ("'%s' fallback" % fallback)
fallbackgz = fallback + ".gz"
for ppdpath in self.ppds.keys ():
if (ppdpath.endswith (fallback) or
ppdpath.endswith (fallbackgz)):
fit[ppdpath] = self.FIT_NONE
found = True
break
if found:
break
_debugprint ("Fallback '%s' not available" % fallback)
if not found:
_debugprint ("No fallback available; choosing any")
fit[list(self.ppds.keys ())[0]] = self.FIT_NONE
if not id_matched:
sanitised_uri = re.sub (pattern="//[^@]*@/?", repl="//",
string=str (uri))
try:
cmd = reduce (lambda x, y: x + ","+ y, commandsets)
except TypeError:
cmd = ""
id = "MFG:%s;MDL:%s;" % (orig_mfg, orig_mdl)
if cmd:
id += "CMD:%s;" % cmd
if description:
id += "DES:%s;" % description
_debugprint ("No ID match for device %s:" % sanitised_uri)
_debugprint (id)
return fit
def getPPDNameFromDeviceID (self, mfg, mdl, description="",
commandsets=None, uri=None,
downloadedfiles=None,
make_and_model=None):
"""
Obtain a best-effort PPD match for an IEEE 1284 Device ID.
The status is one of:
- L{STATUS_SUCCESS}: the match was successful, and an exact
match was found
- L{STATUS_MODEL_MISMATCH}: a similar match was found, but
the model name does not exactly match
- L{STATUS_GENERIC_DRIVER}: no match was found, but a
generic driver is available that can drive this device
according to its command set list
- L{STATUS_NO_DRIVER}: no match was found at all, and the
returned PPD name is a last resort
@param mfg: MFG or MANUFACTURER field
@type mfg: string
@param mdl: MDL or MODEL field
@type mdl: string
@param description: DES or DESCRIPTION field, optional
@type description: string
@param commandsets: CMD or COMMANDSET field, optional
@type commandsets: string
@param uri: device URI, optional (only needed for debugging)
@type uri: string
@param downloadedfiles: filenames from downloaded packages
@type downloadedfiles: string list
@param make_and_model: device-make-and-model string
@type make_and_model: string
@returns: an integer,string pair of (status,ppd-name)
"""
if commandsets is None:
commandsets = []
if downloadedfiles is None:
downloadedfiles = []
fit = self.getPPDNamesFromDeviceID (mfg, mdl, description,
commandsets, uri,
make_and_model)
# We've got a set of PPDs, any of which will drive the device.
# Now we have to choose the "best" one. This is quite tricky
# to decide, so let's sort them in order of preference and
# take the first.
devid = { "MFG": mfg, "MDL": mdl,
"DES": description,
"CMD": commandsets }
ppdnamelist = self.orderPPDNamesByPreference (list(fit.keys ()),
downloadedfiles,
make_and_model,
devid, fit)
_debugprint ("Found PPDs: %s" % str (ppdnamelist))
status = self.getStatusFromFit (fit[ppdnamelist[0]])
_debugprint ("Using %s (status: %d)" % (ppdnamelist[0], status))
return (status, ppdnamelist[0])
def _findBestMatchPPDs (self, mdls, mdl):
"""
Find the best-matching PPDs based on the MDL Device ID.
This function could be made a lot smarter.
"""
_debugprint ("Trying best match")
mdll = mdl.lower ()
if mdll.endswith (" series"):
# Strip " series" from the end of the MDL field.
mdll = mdll[:-7]
mdl = mdl[:-7]
best_mdl = None
best_matchlen = 0
mdlnames = list(mdls.keys ())
# Perform a case-insensitive model sort on the names.
mdlnamesl = [(x, x.lower()) for x in mdlnames]
mdlnamesl.append ((mdl, mdll))
mdlnamesl.sort (key=functools.cmp_to_key(lambda x, y: cups.modelSort(x[1], y[1])))
i = mdlnamesl.index ((mdl, mdll))
candidates = [mdlnamesl[i - 1]]
if i + 1 < len (mdlnamesl):
candidates.append (mdlnamesl[i + 1])
_debugprint (candidates[0][0] + " <= " + mdl + " <= " +
candidates[1][0])
else:
_debugprint (candidates[0][0] + " <= " + mdl)
# Look at the models immediately before and after ours in the
# sorted list, and pick the one with the longest initial match.
for (candidate, candidatel) in candidates:
prefix = os.path.commonprefix ([candidatel, mdll])
if len (prefix) > best_matchlen:
best_mdl = list(mdls[candidate].keys ())
best_matchlen = len (prefix)
_debugprint ("%s: match length %d" % (candidate, best_matchlen))
# Did we match more than half of the model name?
if best_mdl and best_matchlen > (len (mdll) / 2):
ppdnamelist = best_mdl
if best_matchlen == len (mdll):
fit = self.FIT_EXACT
else:
fit = self.FIT_CLOSE
else:
fit = self.FIT_NONE
ppdnamelist = None
# Last resort. Find the "most important" word in the MDL
# field and look for a match based solely on that. If
# there are digits, try lowering the number of
# significant figures.
mdlnames.sort (key=functools.cmp_to_key(cups.modelSort))
mdlitems = [(x.lower (), mdls[x]) for x in mdlnames]
modelid = None
for word in mdll.split (' '):
if modelid is None:
modelid = word
have_digits = False
for i in range (len (word)):
if word[i].isdigit ():
have_digits = True
break
if have_digits:
modelid = word
break
digits = 0
digits_start = -1
digits_end = -1
for i in range (len (modelid)):
if modelid[i].isdigit ():
if digits_start == -1:
digits_start = i
digits_end = i
digits += 1
elif digits_start != -1:
break
digits_end += 1
modelnumber = 0
if digits > 0:
modelnumber = int (modelid[digits_start:digits_end])
modelpattern = (modelid[:digits_start] + "%d" +
modelid[digits_end:])
_debugprint ("Searching for model ID '%s', '%s' %% %d" %
(modelid, modelpattern, modelnumber))
ignore_digits = 0
best_mdl = None
found = False
while ignore_digits < digits:
div = pow (10, ignore_digits)
modelid = modelpattern % ((modelnumber / div) * div)
_debugprint ("Ignoring %d of %d digits, trying %s" %
(ignore_digits, digits, modelid))
for (name, ppds) in mdlitems:
for word in name.split (' '):
if word.lower () == modelid:
found = True
break
if found:
best_mdl = list(ppds.keys ())
break
if found:
break
ignore_digits += 1
if digits < 2:
break
if found:
ppdnamelist = best_mdl
fit = self.FIT_CLOSE
return (fit, ppdnamelist)
def _getPPDNameFromCommandSet (self, commandsets=None):
"""Return ppd-name list or None, given a list of strings representing
the command sets supported."""
if commandsets is None:
commandsets = []
try:
self._init_makes ()
models = self.makes["Generic"]
except KeyError:
return None
def get (*candidates):
for model in candidates:
(s, ppds) = self._findBestMatchPPDs (models, model)
if s == self.FIT_EXACT:
return ppds
return None
cmdsets = [x.lower () for x in commandsets]
if (("postscript" in cmdsets) or ("postscript2" in cmdsets) or
("postscript level 2 emulation" in cmdsets)):
return get ("PostScript")
elif (("pclxl" in cmdsets) or ("pcl-xl" in cmdsets) or
("pcl6" in cmdsets) or ("pcl 6 emulation" in cmdsets)):
return get ("PCL 6/PCL XL", "PCL Laser")
elif "pcl5e" in cmdsets:
return get ("PCL 5e", "PCL Laser")
elif "pcl5c" in cmdsets:
return get ("PCL 5c", "PCL Laser")
elif ("pcl5" in cmdsets) or ("pcl 5 emulation" in cmdsets):
return get ("PCL 5", "PCL Laser")
elif "pcl" in cmdsets:
return get ("PCL 3", "PCL Laser")
elif (("escpl2" in cmdsets) or ("esc/p2" in cmdsets) or
("escp2e" in cmdsets)):
return get ("ESC/P Dot Matrix")
return None
def _init_makes (self):
if self.makes:
return
tstart = time.time ()
makes = {}
lmakes = {}
lmodels = {}
aliases = {} # Generic model name: set(specific model names)
for ppdname, ppddict in self.ppds.items ():
# One entry for ppd-make-and-model
ppd_make_and_model = _singleton (ppddict['ppd-make-and-model'])
ppd_mm_split = ppdMakeModelSplit (ppd_make_and_model)
ppd_makes_and_models = set([ppd_mm_split])
# The ppd-product IPP attribute contains values from each
# Product PPD attribute as well as the value from the
# ModelName attribute if present. The Product attribute
# values are surrounded by parentheses; the ModelName
# attribute value is not.
# Add another entry for each ppd-product that came from a
# Product attribute in the PPD file.
ppd_products = ppddict.get ('ppd-product', [])
if not isinstance (ppd_products, list):
ppd_products = [ppd_products]
ppd_products = set ([x for x in ppd_products if x.startswith ("(")])
if ppd_products:
# If there is only one ppd-product value it is
# unlikely to be useful.
if len (ppd_products) == 1:
ppd_products = set()
make = _singleton (ppddict.get ('ppd-make', '')).rstrip ()
if make:
make += ' '
lmake = normalize (make)
for ppd_product in ppd_products:
# *Product: attribute is "(text)"
if (ppd_product.startswith ("(") and
ppd_product.endswith (")")):
ppd_product = ppd_product[1:len (ppd_product) - 1]
if not ppd_product:
continue
# If manufacturer name missing, take it from ppd-make
lprod = normalize (ppd_product)
if not lprod.startswith (lmake):
ppd_product = make + ppd_product
ppd_makes_and_models.add (ppdMakeModelSplit (ppd_product))
# Add the entries to our dictionary
for make, model in ppd_makes_and_models:
lmake = normalize (make)
lmodel = normalize (model)
if lmake not in lmakes:
lmakes[lmake] = make
lmodels[lmake] = {}
makes[make] = {}
else:
make = lmakes[lmake]
if lmodel not in lmodels[lmake]:
lmodels[lmake][lmodel] = model
makes[make][model] = {}
else:
model = lmodels[lmake][lmodel]
makes[make][model][ppdname] = ppddict
# Build list of model aliases
if ppd_mm_split in ppd_makes_and_models:
ppd_makes_and_models.remove (ppd_mm_split)
if ppd_makes_and_models:
(make, model) = ppd_mm_split
if make in aliases:
models = aliases[make].get (model, set())
else:
aliases[make] = {}
models = set()
models = models.union ([x[1] for x in ppd_makes_and_models])
aliases[make][model] = models
# Now, for each set of model aliases, add all drivers from the
# "main" (generic) model name to each of the specific models.
for make, models in aliases.items ():
lmake = normalize (make)
main_make = lmakes[lmake]
for model, modelnames in models.items ():
main_model = lmodels[lmake].get (normalize (model))
if not main_model:
continue
main_ppds = makes[main_make][main_model]
for eachmodel in modelnames:
this_model = lmodels[lmake].get (normalize (eachmodel))
ppds = makes[main_make][this_model]
ppds.update (main_ppds)
self.makes = makes
self.lmakes = lmakes
self.lmodels = lmodels
_debugprint ("init_makes: %.3fs" % (time.time () - tstart))
def _init_ids (self):
if self.ids:
return
ids = {}
for ppdname, ppddict in self.ppds.items ():
id = _singleton (ppddict.get ('ppd-device-id'))
if not id:
continue
id_dict = parseDeviceID (id)
lmfg = id_dict['MFG'].lower ()
lmdl = id_dict['MDL'].lower ()
bad = False
if len (lmfg) == 0:
bad = True
if len (lmdl) == 0:
bad = True
if bad:
continue
if lmfg not in ids:
ids[lmfg] = {}
if lmdl not in ids[lmfg]:
ids[lmfg][lmdl] = []
ids[lmfg][lmdl].append (ppdname)
self.ids = ids
def _show_help():
print ("usage: ppds.py [--deviceid] [--list-models] [--list-ids] [--debug]")
|
zdohnal/system-config-printer
|
cupshelpers/ppds.py
|
Python
|
gpl-2.0
| 44,162
|
# vim:set et sts=4 sw=4:
#
# ibus - The Input Bus
#
# Copyright(c) 2007-2009 Peng Huang <shawn.p.huang@gmail.com>
# Copyright(c) 2007-2009 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or(at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
import gtk
import gtk.gdk as gdk
import gobject
import ibus
import icon
from handle import Handle
from menu import menu_position
from engineabout import EngineAbout
from toolitem import ToolButton,\
ToggleToolButton, \
SeparatorToolItem, \
MenuToolButton
from gettext import dgettext
_ = lambda a : dgettext("ibus", a)
N_ = lambda a : a
ICON_SIZE = gtk.ICON_SIZE_MENU
class LanguageBar(gtk.Toolbar):
__gtype_name__ = "IBusLanguagePanel"
__gsignals__ = {
"property-activate" : (
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_INT)),
"get-im-menu" : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_PYOBJECT,
()),
"show-engine-about" : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_PYOBJECT,
()),
"position-changed" : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT)),
}
def __init__ (self):
super(LanguageBar, self).__init__()
self.__show = 1
self.__enabled = False
self.__has_focus = False
self.__show_im_name = False
self.__im_name = None
self.set_style(gtk.TOOLBAR_BOTH_HORIZ)
self.set_show_arrow(False)
self.set_property("icon-size", ICON_SIZE)
self.__create_ui()
self.__properties = []
self.__toplevel = gtk.Window(gtk.WINDOW_POPUP)
self.__toplevel.connect("size-allocate", self.__toplevel_size_allocate_cb)
self.__toplevel.add(self)
self.__screen = gdk.screen_get_default()
self.__screen.connect("size-changed", self.__screen_size_changed_cb)
self.set_position(-1, -1)
def __create_ui(self):
# create move handle
self.__handle = gtk.ToolItem()
handle = Handle()
self.__handle.add(handle)
self.insert(self.__handle, -1)
handle.connect("move-end", self.__handle_move_end_cb)
# create input methods menu
# prop = ibus.Property(key = "", type = ibus.PROP_TYPE_TOGGLE, icon = "ibus", tooltip = _("Switch input method"))
self.__im_menu = gtk.ToggleToolButton()
self.__im_menu.set_homogeneous(False)
self.__im_menu.connect("toggled", self.__im_menu_toggled_cb)
self.insert(self.__im_menu, -1)
self.__about_button = gtk.ToolButton(gtk.STOCK_ABOUT)
self.__about_button.set_no_show_all(True)
self.__about_button.set_tooltip_text(_("About the Input Method"))
self.__about_button.connect("clicked", self.__about_button_clicked_cb)
self.insert(self.__about_button, -1)
def __screen_size_changed_cb(self, screen):
self.set_position(*self.__position)
def __im_menu_toggled_cb(self, widget):
if self.__im_menu.get_active():
menu = self.emit("get-im-menu")
menu.connect("deactivate", self.__im_menu_deactivate_cb)
menu.popup(None, None,
menu_position,
0,
gtk.get_current_event_time(),
widget)
def __about_button_clicked_cb(self, widget):
if self.__enabled:
self.emit("show-engine-about")
def __im_menu_deactivate_cb(self, menu):
self.__im_menu.set_active(False)
def __handle_move_end_cb(self, handle):
x, y = self.__toplevel.get_position()
w, h = self.__toplevel.get_size()
self.__position = x + w, y + h
self.emit("position-changed", *self.__position)
def __toplevel_size_allocate_cb(self, toplevel, allocation):
x, y = self.__position
w, h = self.__screen.get_width(), self.__screen.get_height()
if x >= w - 80 or True:
self.__toplevel.move(x - allocation.width, y - allocation.height)
def __remove_properties(self):
# reset all properties
map(lambda i: i.destroy(), self.__properties)
self.__properties = []
def __set_opacity(self, opacity):
if self.__toplevel.window == None:
self.__toplevel.realize()
self.__toplevel.window.set_opacity(opacity)
def do_show(self):
gtk.Toolbar.do_show(self)
def do_size_request(self, requisition):
gtk.Toolbar.do_size_request(self, requisition)
self.__toplevel.resize(1, 1)
def set_im_icon(self, icon_name):
widget = icon.IconWidget(icon_name, 18)
self.__im_menu.set_icon_widget(widget)
def set_show_im_name(self, show):
self.__show_im_name = show
self.set_im_name(self.__im_name)
self.__im_menu.set_is_important(show)
def set_im_name(self, text):
self.__im_name = text
if text:
self.__im_menu.set_tooltip_text(text)
self.__im_menu.set_label(text)
else:
self.__im_menu.set_tooltip_text(_("Switch input method"))
self.__im_menu.set_label("")
def reset(self):
self.__remove_properties()
def set_enabled(self, enabled):
self.__enabled = enabled
if self.__enabled:
self.__about_button.show()
self.__set_opacity(1.0)
if self.__has_focus:
if self.__show in (1, 2):
self.show_all()
else:
self.__about_button.hide()
self.__set_opacity(0.5)
if self.__show in (1, 0):
self.hide_all()
def is_enabled(self):
return self.__enabled
def set_show(self, show):
if show not in (0, 1, 2):
show = 1
self.__show = show
if self.__has_focus:
self.focus_in()
else:
self.focus_out()
def set_position(self, x, y):
w, h = self.__screen.get_width(), self.__screen.get_height()
if x < 0 or y < 0:
x = w - 20
y = h - 40
if x > w:
x = w - 20
if y > h:
y = h - 40
self.__position = x, y
w, h = self.__toplevel.get_size()
self.__toplevel.move(self.__position[0] - w, self.__position[1] - h)
def get_show(self):
return self.__show
def register_properties(self, props):
self.__remove_properties()
# create new properties
for i, prop in enumerate(props):
if prop.type == ibus.PROP_TYPE_NORMAL:
item = ToolButton(prop = prop)
elif prop.type == ibus.PROP_TYPE_TOGGLE:
item = ToggleToolButton(prop = prop)
elif prop.type == ibus.PROP_TYPE_MENU:
item = MenuToolButton(prop = prop)
elif prop.type == PROP_TYPE_SEPARATOR:
item = SeparatorToolItem()
else:
raise IBusException("Unknown property type = %d" % prop.type)
item.connect("property-activate",
lambda w, n, s: self.emit("property-activate", n, s))
item.set_sensitive(prop.sensitive)
item.set_no_show_all(True)
if prop.visible:
item.show()
else:
item.hide()
self.__properties.append(item)
self.insert(item, i + 2)
def update_property(self, prop):
map(lambda x: x.update_property(prop), self.__properties)
def show_all(self):
self.__toplevel.show_all()
self.__toplevel.window.raise_()
gtk.Toolbar.show_all(self)
def hide_all(self):
x, y = self.__toplevel.get_position()
self.__toplevel.hide_all()
gtk.Toolbar.hide_all(self)
# save bar position
self.__toplevel.move(x, y)
def focus_in(self):
self.__has_focus = True
self.__im_menu.set_sensitive(True)
if self.__enabled:
if self.__show in (1, 2):
self.show_all()
else:
self.hide_all()
def focus_out(self):
self.__has_focus = False
self.__im_menu.set_sensitive(False)
if self.__show in (0, 1):
self.hide_all()
else:
self.show_all()
|
sun-im/ibus
|
ui/gtk/languagebar.py
|
Python
|
lgpl-2.1
| 9,031
|
"""
Escape the `body` part of .chm source file to 7-bit ASCII, to fix visual
effect on some MBCS Windows systems.
https://bugs.python.org/issue32174
"""
import re
from html.entities import codepoint2name
from sphinx.util.logging import getLogger
# escape the characters which codepoint > 0x7F
def _process(string):
def escape(matchobj):
codepoint = ord(matchobj.group(0))
name = codepoint2name.get(codepoint)
if name is None:
return '&#%d;' % codepoint
else:
return '&%s;' % name
return re.sub(r'[^\x00-\x7F]', escape, string)
def escape_for_chm(app, pagename, templatename, context, doctree):
# only works for .chm output
if getattr(app.builder, 'name', '') != 'htmlhelp':
return
# escape the `body` part to 7-bit ASCII
body = context.get('body')
if body is not None:
context['body'] = _process(body)
def fixup_keywords(app, exception):
# only works for .chm output
if getattr(app.builder, 'name', '') != 'htmlhelp' or exception:
return
getLogger(__name__).info('fixing HTML escapes in keywords file...')
outdir = app.builder.outdir
outname = app.builder.config.htmlhelp_basename
with app.builder.open_file(outdir, outname + '.hhk', 'r') as f:
index = f.read()
with app.builder.open_file(outdir, outname + '.hhk', 'w') as f:
f.write(index.replace(''', '''))
def setup(app):
# `html-page-context` event emitted when the HTML builder has
# created a context dictionary to render a template with.
app.connect('html-page-context', escape_for_chm)
# `build-finished` event emitted when all the files have been
# output.
app.connect('build-finished', fixup_keywords)
return {'version': '1.0', 'parallel_read_safe': True}
|
FFMG/myoddweb.piger
|
monitor/api/python/Python-3.7.2/Doc/tools/extensions/escape4chm.py
|
Python
|
gpl-2.0
| 1,817
|
# -*- coding: utf8 -*-
SQL = """SELECT ONAME, concat(FKOD," опись:",OKOD) FROM `afweb_opis` WHERE FKOD LIKE '%%%(q)s%%' or OKOD LIKE '%%%(q)s%%' or ONAME LIKE '%%%(q)s%%' limit %(limit)s ;"""
|
ffsdmad/af-web
|
cgi-bin/plugins/autocomplit_mod/opis_search.py
|
Python
|
gpl-3.0
| 199
|
"""
[2017-02-08] Challenge #302 [Intermediate] ASCII Histogram Maker: Part 1 - The Simple Bar Chart
https://www.reddit.com/r/dailyprogrammer/comments/5st2so/20170208_challenge_302_intermediate_ascii/
# Description
Any Excel user is probably familiar with the bar chart - a simple plot showing vertical bars to represent the frequency
of something you counted. For today's challenge you'll be producing bar charts in ASCII.
(Part 2 will have you assemble a proper histogram from a collection of data.)
# Input Description
You'll be given four numbers on the first line telling you the start and end of the horizontal (X) axis and the
vertical (Y) axis, respectively. Then you'll have a number on a single line telling you how many records to read. Then
you'll be given the data as three numbers: the first two represent the interval as a start (inclusive) and end
(exclusive), the third number is the frequency of that variable. Example:
140 190 1 8
5
140 150 1
150 160 0
160 170 7
170 180 6
180 190 2
# Output Description
Your program should emit an ASCII bar chart showing the frequencies of the buckets. Your program may use any character
to represent the data point, I show an asterisk below. From the above example:
8
7 *
6 * *
5 * *
4 * *
3 * *
2 * * *
1 * * * *
140 150 160 170 180 190
# Challenge Input
0 50 1 10
5
0 10 1
10 20 3
20 30 6
30 40 4
40 50 2
"""
def main():
pass
if __name__ == "__main__":
main()
|
DayGitH/Python-Challenges
|
DailyProgrammer/DP20170208B.py
|
Python
|
mit
| 1,545
|
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import IECore
import Gaffer
import GafferScene
import GafferSceneTest
class ScenePlugTest( GafferSceneTest.SceneTestCase ) :
def testRunTimeTyped( self ) :
p = GafferScene.ScenePlug()
self.assertTrue( p.isInstanceOf( Gaffer.ValuePlug.staticTypeId() ) )
self.assertEqual( IECore.RunTimeTyped.baseTypeId( p.typeId() ), Gaffer.ValuePlug.staticTypeId() )
def testDynamicSerialisation( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n"]["p"] = GafferScene.ScenePlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
ss = s.serialise()
s = Gaffer.ScriptNode()
s.execute( ss )
def testFullTransform( self ) :
translation = imath.M44f().translate( imath.V3f( 1 ) )
scaling = imath.M44f().scale( imath.V3f( 10 ) )
n = GafferSceneTest.CompoundObjectSource()
n["in"].setValue(
IECore.CompoundObject( {
"children" : {
"group" : {
"transform" : IECore.M44fData( translation ),
"children" : {
"ball" : {
"transform" : IECore.M44fData( scaling ),
}
}
},
},
} )
)
self.assertEqual( n["out"].transform( "/" ), imath.M44f() )
self.assertEqual( n["out"].transform( "/group" ), translation )
self.assertEqual( n["out"].transform( "/group/ball" ), scaling )
self.assertEqual( n["out"].fullTransform( "/" ), imath.M44f() )
self.assertEqual( n["out"].fullTransform( "/group" ), translation )
m = n["out"].fullTransform( "/group/ball" )
self.assertEqual( m.translation(), imath.V3f( 1 ) )
extractedScaling = imath.V3f()
m.extractScaling( extractedScaling )
self.assertEqual( extractedScaling, imath.V3f( 10 ) )
self.assertEqual( m, scaling * translation )
def testFullAttributes( self ) :
n = GafferSceneTest.CompoundObjectSource()
n["in"].setValue(
IECore.CompoundObject( {
"children" : {
"group" : {
"attributes" : {
"a" : IECore.StringData( "a" ),
"b" : IECore.StringData( "b" ),
},
"children" : {
"ball" : {
"attributes" : {
"b" : IECore.StringData( "bOverride" ),
"c" : IECore.StringData( "c" ),
},
}
}
},
},
} )
)
self.assertEqual(
n["out"].fullAttributes( "/group" ),
IECore.CompoundObject( {
"a" : IECore.StringData( "a" ),
"b" : IECore.StringData( "b" ),
} )
)
self.assertEqual(
n["out"].fullAttributes( "/group/ball" ),
IECore.CompoundObject( {
"a" : IECore.StringData( "a" ),
"b" : IECore.StringData( "bOverride" ),
"c" : IECore.StringData( "c" ),
} )
)
def testCreateCounterpart( self ) :
s1 = GafferScene.ScenePlug( "a", Gaffer.Plug.Direction.Out )
s2 = s1.createCounterpart( "b", Gaffer.Plug.Direction.In )
self.assertEqual( s2.getName(), "b" )
self.assertEqual( s2.getFlags(), s1.getFlags() )
self.assertEqual( s2.direction(), Gaffer.Plug.Direction.In )
def testAccessorOverloads( self ) :
p = GafferScene.Plane()
self.assertEqual( p["out"].attributes( "/plane" ), p["out"].attributes( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].transform( "/plane" ), p["out"].transform( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].object( "/plane" ), p["out"].object( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].bound( "/plane" ), p["out"].bound( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].childNames( "/plane" ), p["out"].childNames( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].attributesHash( "/plane" ), p["out"].attributesHash( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].transformHash( "/plane" ), p["out"].transformHash( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].objectHash( "/plane" ), p["out"].objectHash( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].boundHash( "/plane" ), p["out"].boundHash( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertEqual( p["out"].childNamesHash( "/plane" ), p["out"].childNamesHash( IECore.InternedStringVectorData( [ "plane" ] ) ) )
self.assertRaises( TypeError, p["out"].boundHash, 10 )
def testBoxPromotion( self ) :
b = Gaffer.Box()
b["n"] = GafferScene.StandardAttributes()
self.assertTrue( Gaffer.PlugAlgo.canPromote( b["n"]["in"] ) )
self.assertTrue( Gaffer.PlugAlgo.canPromote( b["n"]["out"] ) )
i = Gaffer.PlugAlgo.promote( b["n"]["in"] )
o = Gaffer.PlugAlgo.promote( b["n"]["out"] )
self.assertEqual( b["n"]["in"].getInput(), i )
self.assertEqual( o.getInput(), b["n"]["out"] )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( b["n"]["in"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( b["n"]["out"] ) )
def testNoneAsPath( self ) :
p = GafferScene.Plane()
self.assertRaises( Exception, p["out"].transform, None )
def testStringToPath( self ) :
self.assertEqual( GafferScene.ScenePlug.stringToPath( "" ), IECore.InternedStringVectorData() )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "/" ), IECore.InternedStringVectorData() )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "/a" ), IECore.InternedStringVectorData( [ "a" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "a" ), IECore.InternedStringVectorData( [ "a" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "/a/b" ), IECore.InternedStringVectorData( [ "a", "b" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "/a/b/" ), IECore.InternedStringVectorData( [ "a", "b" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "//a//b//" ), IECore.InternedStringVectorData( [ "a", "b" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "/foo/bar/" ), IECore.InternedStringVectorData( [ "foo", "bar" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "foo/bar/" ), IECore.InternedStringVectorData( [ "foo", "bar" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "foo/bar/.." ), IECore.InternedStringVectorData( [ "foo" ] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "foo/bar/../.." ), IECore.InternedStringVectorData( [] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "foo/bar/../../.." ), IECore.InternedStringVectorData( [] ) )
self.assertEqual( GafferScene.ScenePlug.stringToPath( "foo/bar/../toto" ), IECore.InternedStringVectorData( [ "foo", "toto" ] ) )
def testPathToString( self ) :
self.assertEqual( GafferScene.ScenePlug.pathToString( IECore.InternedStringVectorData() ), "/" )
self.assertEqual( GafferScene.ScenePlug.pathToString( IECore.InternedStringVectorData( [ "a" ] ) ), "/a" )
self.assertEqual( GafferScene.ScenePlug.pathToString( IECore.InternedStringVectorData( [ "a", "b" ] ) ), "/a/b" )
def testManyStringToPathCalls( self ) :
GafferSceneTest.testManyStringToPathCalls()
def testSetPlugs( self ) :
p = GafferScene.ScenePlug()
self.assertTrue( isinstance( p["setNames"], Gaffer.InternedStringVectorDataPlug ) )
self.assertEqual( p["setNames"].defaultValue(), IECore.InternedStringVectorData() )
self.assertTrue( isinstance( p["set"], Gaffer.PathMatcherDataPlug ) )
self.assertEqual( p["set"].defaultValue(), IECore.PathMatcherData() )
def testGlobalsAccessors( self ) :
p = GafferScene.ScenePlug()
self.assertEqual( p.globals(), p["globals"].getValue() )
self.assertFalse( p.globals().isSame( p["globals"].getValue() ) )
self.assertTrue( p.globals( _copy = False ).isSame( p["globals"].getValue( _copy = False ) ) )
self.assertEqual( p.setNames(), p["setNames"].getValue() )
self.assertFalse( p.setNames().isSame( p["setNames"].getValue() ) )
self.assertTrue( p.setNames( _copy = False ).isSame( p["setNames"].getValue( _copy = False ) ) )
self.assertEqual( p.globalsHash(), p["globals"].hash() )
self.assertEqual( p.setNamesHash(), p["setNames"].hash() )
if __name__ == "__main__":
unittest.main()
|
hradec/gaffer
|
python/GafferSceneTest/ScenePlugTest.py
|
Python
|
bsd-3-clause
| 9,869
|
from .configuration_line_parser import ConfigurationLineParser
from .ert_keywords import ErtKeywords
|
arielalmendral/ert
|
python/python/ert_gui/ide/keywords/__init__.py
|
Python
|
gpl-3.0
| 101
|
#!/usr/bin/python
from otpsetup.shortcuts import DjangoBrokerConnection
from otpsetup.shortcuts import stop_current_instance, build_multi_queue
from otpsetup import settings
from datetime import datetime
import traceback
import handlers
import process_gtfs, transformer
print "Starting Graph Builder Consumer"
queues = build_multi_queue(["create_instance", "rebuild_graph", "build_managed", "build_managed_osm", "process_gtfs"])
def handle(conn, body, message):
key = message.delivery_info['routing_key']
print "handling key "+key
try:
getattr(handlers, key)(conn, body)
except:
print "gb handler error"
now = datetime.now()
errfile = "/var/otp/gb_err_%s_%s" % (key, now.strftime("%F-%T"))
traceback.print_exc(file=open(errfile,"a"))
message.ack()
with DjangoBrokerConnection() as conn:
with conn.Consumer(queues, callbacks=[lambda body, message: handle(conn, body, message)]) as consumer:
# Process messages and handle events on all channels
try:
while True:
conn.drain_events(timeout=600)
except:
print "exited loop"
conn.close()
#stop_current_instance()
|
opentripplanner/OTPSetup
|
otpsetup/graph_builder/main.py
|
Python
|
gpl-3.0
| 1,230
|
import unittest
from System.Windows.Browser.HtmlPage import Document as d
from System.Windows.Browser.HtmlPage import BrowserInformation as browser
class HtmlBridgeRegression(unittest.TestCase):
def setUp(self):
self.div = d.CreateElement('div')
self.div.id = 'testDiv'
a = d.CreateElement('a')
a.id = 'a1'
a.innerHTML = "Link should be enabled..."
self.div.AppendChild(a)
span = d.CreateElement('span')
span.id = 'h3'
span.innerHTML = 'Needs to be updated'
span.SetStyleAttribute('background-color', 'pink')
self.div.AppendChild(span)
d.Body.AppendChild(self.div)
def tearDown(self):
d.Body.RemoveChild(self.div)
self.div = None
def test_GetElementById(self):
elements = ['testDiv', "a1", "h3"]
for e in elements:
self.assertFalse(d.GetElementById(e) is None)
def test_GetStyleAttribute(self):
uas = browser.UserAgent.ToString()
color = 'rgb(255, 192, 203)' if uas.Contains('AppleWebKit') else 'pink'
self.assertEqual(
d.GetElementById('h3').GetStyleAttribute('background-color'),
color)
def test_CreateElement(self):
self.assert_(d.CreateElement('div') != None)
def test_SetAttribute(self):
new_ctl = d.CreateElement('div')
new_ctl.SetAttribute("id", "new_ctl")
self.assertEqual(new_ctl.GetAttribute('id'), "new_ctl")
def test_SetProperty(self):
new_ctl = d.CreateElement('div')
new_value = "This is added by Merlin SL Test!"
new_ctl.SetProperty("innerHTML", new_value)
self.assertEqual(new_ctl.GetProperty("innerHTML"), new_value)
def test_AppendChild(self):
old_cnt = self.div.Children.Count
new_ctl = d.CreateElement('div')
self.div.AppendChild(new_ctl)
self.assertEqual(self.div.Children.Count, old_cnt + 1)
def test_ChildrenCollection(self):
self.div.AppendChild(d.CreateElement("div"))
self.assertEqual(self.div.Children.Count, 3)
|
tempbottle/dlr
|
Src/Hosts/Silverlight/Tests/tests/regressions/test_htmlbridge.py
|
Python
|
apache-2.0
| 2,103
|
from collections import OrderedDict
from plenum.common.messages.fields import LimitedLengthStringField
from plenum.common.messages.client_request import ClientMessageValidator
from plenum.common.messages.node_messages import Propagate
EXPECTED_ORDERED_FIELDS = OrderedDict([
("request", ClientMessageValidator),
("senderClient", LimitedLengthStringField),
])
def test_hash_expected_type():
assert Propagate.typename == "PROPAGATE"
def test_has_expected_fields():
actual_field_names = OrderedDict(Propagate.schema).keys()
assert list(actual_field_names) == list(EXPECTED_ORDERED_FIELDS.keys())
def test_has_expected_validators():
schema = dict(Propagate.schema)
for field, validator in EXPECTED_ORDERED_FIELDS.items():
assert isinstance(schema[field], validator)
|
evernym/zeno
|
plenum/test/input_validation/message_validation/test_propagate_message.py
|
Python
|
apache-2.0
| 805
|
#!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary bitmark distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $BITMARKDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'bitmark_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'bitmark_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
|
project-bitmark/pfennig
|
contrib/qt_translations.py
|
Python
|
mit
| 616
|
import sys
import traceback
import urllib2
from xml.dom.minidom import getDOMImplementation
from django.views.debug import get_safe_settings
from django.conf import settings
from hoptoad import VERSION, NAME, URL
from hoptoad import get_hoptoad_settings
from hoptoad.api.htv1 import _parse_environment, _parse_request, _parse_session
from hoptoad.api.htv1 import _parse_message
def _class_name(class_):
return class_.__class__.__name__
def _handle_errors(request, response):
if response:
code = "Http%s" % response
msg = "%(code)s: %(response)s at %(uri)s" % {
'code': code,
'response': { 'Http403': "Forbidden",
'Http404': "Page not found" }[code],
'uri': request.build_absolute_uri()
}
return (code, msg)
exc, inst = sys.exc_info()[:2]
return _class_name(inst), _parse_message(inst)
def generate_payload(request_tuple):
"""Generate an XML payload for a Hoptoad notification.
Parameters:
request_tuple -- A tuple containing a Django HTTPRequest and a possible
response code.
"""
request, response = request_tuple
hoptoad_settings = get_hoptoad_settings()
p_error_class, p_message = _handle_errors(request, response)
# api v2 from: http://help.hoptoadapp.com/faqs/api-2/notifier-api-v2
xdoc = getDOMImplementation().createDocument(None, "notice", None)
notice = xdoc.firstChild
# /notice/@version -- should be 2.0
notice.setAttribute('version', '2.0')
# /notice/api-key
api_key = xdoc.createElement('api-key')
api_key_data = xdoc.createTextNode(hoptoad_settings['HOPTOAD_API_KEY'])
api_key.appendChild(api_key_data)
notice.appendChild(api_key)
# /notice/notifier/name
# /notice/notifier/version
# /notice/notifier/url
notifier = xdoc.createElement('notifier')
for key, value in zip(["name", "version", "url"], [NAME, VERSION, URL]):
key = xdoc.createElement(key)
value = xdoc.createTextNode(str(value))
key.appendChild(value)
notifier.appendChild(key)
notice.appendChild(notifier)
# /notice/error/class
# /notice/error/message
error = xdoc.createElement('error')
for key, value in zip(["class", "message"], [p_error_class, p_message]):
key = xdoc.createElement(key)
value = xdoc.createTextNode(value)
key.appendChild(value)
error.appendChild(key)
# /notice/error/backtrace/error/line
backtrace = xdoc.createElement('backtrace')
# i do this here because I'm afraid of circular reference..
reversed_backtrace = list(
reversed(traceback.extract_tb(sys.exc_info()[2]))
)
if reversed_backtrace:
for filename, lineno, funcname, text in reversed_backtrace:
line = xdoc.createElement('line')
line.setAttribute('file', str(filename))
line.setAttribute('number', str(lineno))
line.setAttribute('method', str(funcname))
backtrace.appendChild(line)
else:
line = xdoc.createElement('line')
line.setAttribute('file', 'unknown')
line.setAttribute('number', '0')
line.setAttribute('method', 'unknown')
backtrace.appendChild(line)
error.appendChild(backtrace)
notice.appendChild(error)
# /notice/request
xrequest = xdoc.createElement('request')
# /notice/request/url -- request.build_absolute_uri()
xurl = xdoc.createElement('url')
xurl_data = xdoc.createTextNode(request.build_absolute_uri())
xurl.appendChild(xurl_data)
xrequest.appendChild(xurl)
# /notice/request/component -- not sure..
comp = xdoc.createElement('component')
#comp_data = xdoc.createTextNode('')
xrequest.appendChild(comp)
# /notice/request/action -- action which error occured
# ... no fucking clue..
# sjl: "actions" are the Rails equivalent of Django's views
# Is there a way to figure out which view a request object went to
# (if any)? Anyway, it's not GET/POST so I'm commenting it for now.
#action = xdoc.createElement('action') # maybe GET/POST??
#action_data = u"%s %s" % (request.method, request.META['PATH_INFO'])
#action_data = xdoc.createTextNode(action_data)
#action.appendChild(action_data)
#xrequest.appendChild(action)
# /notice/request/params/var -- check request.GET/request.POST
req_params = _parse_request(request).items()
if req_params:
params = xdoc.createElement('params')
for key, value in req_params:
var = xdoc.createElement('var')
var.setAttribute('key', key)
value = xdoc.createTextNode(str(value))
var.appendChild(value)
params.appendChild(var)
xrequest.appendChild(params)
# /notice/request/session/var -- check if sessions is enabled..
sessions = xdoc.createElement('session')
for key, value in _parse_session(request.session).iteritems():
var = xdoc.createElement('var')
var.setAttribute('key', key)
value = xdoc.createTextNode(str(value))
var.appendChild(value)
sessions.appendChild(var)
xrequest.appendChild(sessions)
# /notice/request/cgi-data/var -- all meta data
cgidata = xdoc.createElement('cgi-data')
for key, value in _parse_environment(request).iteritems():
var = xdoc.createElement('var')
var.setAttribute('key', key)
value = xdoc.createTextNode(str(value))
var.appendChild(value)
cgidata.appendChild(var)
xrequest.appendChild(cgidata)
notice.appendChild(xrequest)
# /notice/server-environment
serverenv = xdoc.createElement('server-environment')
# /notice/server-environment/project-root -- default to sys.path[0]
projectroot = xdoc.createElement('project-root')
projectroot.appendChild(xdoc.createTextNode(sys.path[0]))
serverenv.appendChild(projectroot)
# /notice/server-environment/environment-name -- environment name? wtf..
envname = xdoc.createElement('environment-name')
# no idea...
# sjl: This is supposed to be set to something like "test", "staging",
# or "production" to help you group the errors in the web interface.
# I'm still thinking about the best way to support this.
# bmjames: Taking this from a settings variable. I personally have a
# different settings.py for every environment and my deploy
# script puts the correct one in place, so this makes sense.
# But even if one had a single settings.py shared among
# environments, it should be possible to set this variable
# dynamically. It would simply be the responsibility of
# settings.py to do it, rather than the hoptoad middleware.
envname_text = hoptoad_settings.get('HOPTOAD_ENV_NAME', 'Unknown')
envname_data = xdoc.createTextNode(envname_text)
envname.appendChild(envname_data)
serverenv.appendChild(envname)
notice.appendChild(serverenv)
return xdoc.toxml('utf-8')
def _ride_the_toad(payload, timeout, use_ssl):
"""Send a notification (an HTTP POST request) to Hoptoad.
Parameters:
payload -- the XML payload for the request from _generate_payload()
timeout -- the maximum timeout, in seconds, or None to use the default
"""
headers = { 'Content-Type': 'text/xml' }
url_template = '%s://hoptoadapp.com/notifier_api/v2/notices'
notification_url = url_template % ("https" if use_ssl else "http")
# allow the settings to override all urls
notification_url = get_hoptoad_settings().get('HOPTOAD_NOTIFICATION_URL',
notification_url)
r = urllib2.Request(notification_url, payload, headers)
try:
if timeout:
# timeout is 2.6 addition!
response = urllib2.urlopen(r, timeout=timeout)
else:
response = urllib2.urlopen(r)
except urllib2.URLError, err:
pass
else:
try:
# getcode is 2.6 addition!!
status = response.getcode()
except AttributeError:
# default to just code
status = response.code
if status == 403 and use_ssl:
if get_hoptoad_settings().get('HOPTOAD_NO_SSL_FALLBACK', False):
# if we can not use SSL, re-invoke w/o using SSL
_ride_the_toad(payload, timeout, use_ssl=False)
if status == 403 and not use_ssl:
# we were not trying to use SSL but got a 403 anyway
# something else must be wrong (bad API key?)
pass
if status == 422:
# couldn't send to hoptoad..
pass
if status == 500:
# hoptoad is down
pass
def report(payload, timeout):
use_ssl = get_hoptoad_settings().get('HOPTOAD_USE_SSL', False)
return _ride_the_toad(payload, timeout, use_ssl)
|
sjl/django-hoptoad
|
hoptoad/api/htv2.py
|
Python
|
mit
| 9,161
|
#!/opt/local/bin/pypy -tt
# -*- coding: utf-8 -*-
#Copyright (C) 2014 Chris Hinsley All Rights Reserved
import sys, argparse, router
from copy import deepcopy
from ast import literal_eval
from mymath import *
def main():
parser = argparse.ArgumentParser(description = 'Pcb layout optimizer.', formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('infile', nargs = '?', type = argparse.FileType('r'), default = sys.stdin, help = 'filename, default stdin')
parser.add_argument('--t', nargs = 1, type = int, default = [600], help = 'timeout in seconds, default 600')
parser.add_argument('--v', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'verbosity level 0..1, default 0')
parser.add_argument('--s', nargs = 1, type = int, default = [1], help = 'number of samples, default 1')
parser.add_argument('--r', nargs = 1, type = int, default = [1], choices = range(1, 5), help = 'grid resolution 1..4, default 1')
parser.add_argument('--z', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'minimize vias 0..1, default 0')
parser.add_argument('--d', nargs = 1, type = int, default = [0], choices = range(0, 6), \
help = 'distance metric 0..5, default 0.\n' \
'0 -> manhattan\n1 -> squared_euclidean\n2 -> euclidean\n3 -> chebyshev\n4 -> reciprocal\n5 -> random')
parser.add_argument('--fr', nargs = 1, type = int, default = [2], choices = range(1, 6), help = 'flood range 1..5, default 2')
parser.add_argument('--xr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'even layer x range 0..5, default 1')
parser.add_argument('--yr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'odd layer y range 0..5, default 1')
args = parser.parse_args()
flood_range = args.fr[0]
flood_range_x_even_layer = args.xr[0]
flood_range_y_odd_layer = args.yr[0]
path_range = flood_range + 0
path_range_x_even_layer = flood_range_x_even_layer + 0
path_range_y_odd_layer = flood_range_y_odd_layer + 0
routing_flood_vectors = [[(x, y, 0) for x in xrange(-flood_range_x_even_layer, flood_range_x_even_layer + 1) for y in xrange(-flood_range, flood_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-flood_range, flood_range + 1) for y in xrange(-flood_range_y_odd_layer, flood_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)]]
routing_path_vectors = [[(x, y, 0) for x in xrange(-path_range_x_even_layer, path_range_x_even_layer + 1) for y in xrange(-path_range, path_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-path_range, path_range + 1) for y in xrange(-path_range_y_odd_layer, path_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)]]
dfunc = [manhattan_distance, squared_euclidean_distance, euclidean_distance, \
chebyshev_distance, reciprical_distance, random_distance][args.d[0]]
dimensions = literal_eval(args.infile.readline().strip())
pcb = router.Pcb(dimensions, routing_flood_vectors, routing_path_vectors, dfunc, args.r[0], args.v[0], args.z[0])
for line in args.infile:
track = literal_eval(line.strip())
if not track:
break
pcb.add_track(track)
args.infile.close()
pcb.print_pcb()
best_cost = None
best_pcb = None
for i in xrange(args.s[0]):
if not pcb.route(args.t[0]):
pcb.shuffle_netlist()
continue
cost = pcb.cost()
if best_cost == None or cost < best_cost:
best_cost = cost
best_pcb = deepcopy(pcb)
pcb.shuffle_netlist()
if best_pcb != None:
best_pcb.print_netlist()
best_pcb.print_stats()
else:
print []
if __name__ == '__main__':
main()
|
vygr/Python-PCB
|
pcb.py
|
Python
|
gpl-2.0
| 3,869
|
"""
WSGI config for hue7jip8 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hue7jip8.settings")
application = get_wsgi_application()
|
sih4sing5hong5/hue7jip8
|
hue7jip8/wsgi.py
|
Python
|
mit
| 394
|
from json import dumps
import random
import time
def strTimeProp(start, end, format, prop):
"""Get a time at a proportion of a range of two formatted times.
start and end should be strings specifying times formated in the
given format (strftime-style), giving an interval [start, end].
prop specifies how a proportion of the interval to be taken after
start. The returned time will be in the specified format.
"""
stime = time.mktime(time.strptime(start, format))
etime = time.mktime(time.strptime(end, format))
ptime = stime + prop * (etime - stime)
return time.strftime(format, time.localtime(ptime))
def randomDate(start, end, prop):
return strTimeProp(start, end, '%Y-%m-%d %H:%M:%S', prop)
# 1) Make batch_log file
f= open("batch_log.json","w")
# First add to batch_log only befriend and unfriend events:
f.write('{"D":"2", "T":"20"}\n')
f.write('{"event_type":"befriend", "timestamp":"2017-06-13 11:33:01", "id1": "1", "id2": "2"}\n')
f.write('{"event_type":"befriend", "timestamp":"2017-06-13 11:33:01", "id1": "1", "id2": "3"}\n')
f.write('{"event_type":"unfriend", "timestamp":"2017-06-13 11:33:01", "id1": "1", "id2": "3"}\n')
f.write('{"event_type":"befriend", "timestamp":"2017-06-13 11:33:01", "id1": "2", "id2": "3"}\n')
f.write('{"event_type":"befriend", "timestamp":"2017-06-13 11:33:01", "id1": "2", "id2": "4"}\n')
f.write('{"event_type":"befriend", "timestamp":"2017-06-13 11:33:01", "id1": "3", "id2": "4"}\n')
# Here batch_log contains only befriend and unfriend
nentries=10000
for ii in range(nentries):
uid=random.randrange(1,5,1) #random number between 1 and 4
amount=random.randrange(1,10) #random number between 1 and 10
timestamp=randomDate("2008-06-13 01:30:01", "2009-06-13 13:50:03", random.random())
purchase={
"id": uid,
"amount": amount,
"event_type": "purchase",
"timestamp": timestamp,
"id": uid,
"amount": amount}
f.write("{}\n".format(dumps(purchase)))
f.close()
#Now let's fill stream_log.json
f= open("stream_log.json","w")
nentries=2000
for ii in range(nentries):
uid=random.randrange(1,5,1) #random number between 1 and 4
amount=random.randrange(1,10) #random number between 1 and 10
timestamp=randomDate("2008-06-13 01:30:01", "2009-06-13 13:50:03", random.random())
purchase={
"id": uid,
"amount": amount,
"event_type": "purchase",
"timestamp": timestamp,
"id": uid,
"amount": amount}
f.write("{}\n".format(dumps(purchase)))
# Now add a few additional "flagged" purchases:
nentries=5
for ii in range(1,nentries):
uid=ii
amount=random.randrange(100,2000) #random number between 100 and 2000
purchase={
"id": uid,
"amount": amount,
"event_type": "purchase",
"timestamp": timestamp,
"id": uid,
"amount": amount}
f.write("{}\n".format(dumps(purchase)))
f.close()
|
trangel/Data-Science
|
tracking-purchases/insight_testsuite/tests/test_5/log_input/mkdb.py
|
Python
|
gpl-3.0
| 2,917
|
from flask import Blueprint
from flask import Flask, abort
from flask import jsonify
from flask import render_template
from flask import request,send_from_directory
import socket
import config as conf
import helpers
churn_api = Blueprint('churn_api', __name__)
@churn_api.route('')
def getChurn():
return render_template('churn/churn.html')
@churn_api.route('', methods=['POST'])
def submitChurn():
parameters = request.get_json(force=True)
print("Demo Churn:", parameters)
tweet = parameters['input']
# learning_type = request.get_json(force=True)["learning"]
port = conf.churn['e_port']
ip = conf.churn['e_host']
if request.method == 'POST':
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server_address = (ip, port)
try:
print('sending "%s"' % tweet)
sent = sock.sendto(tweet.encode(), server_address)
print('waiting to receive')
data, server = sock.recvfrom(4096)
answer = {'answer': data.decode()}
print(data.decode())
finally:
sock.close()
return jsonify(answer)
|
nachoaguadoc/aimlx-demos
|
controller/churn_controller.py
|
Python
|
mit
| 1,137
|
#!/usr/bin/python2
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Simple system info tool for bug reports, etc.
"""
# More info at:
#
# http://code.activestate.com/recipes/511491/
# http://www.koders.com/python/fidB436B8043AA994C550C0961247DACC3E04E84734.aspx?s=config
# http://developer.apple.com/documentation/Darwin/Reference/ManPages/man8/sysctl.8.html
# imports
import os
import sys
import time
VERBOSE = 0
def Banner(text):
print '=' * 70
print text
print '=' * 70
# quick hack to keep banner in sync with os.system output
sys.stdout.flush()
def InfoLinux():
Banner('OS:')
os.system('uname -a')
Banner('CPU:')
if VERBOSE:
os.system('cat /proc/cpuinfo')
else:
os.system("egrep 'name|MHz|stepping' /proc/cpuinfo")
Banner('RAM:')
if VERBOSE:
os.system('cat /proc/meminfo')
else:
os.system('cat /proc/meminfo | egrep Mem')
Banner('LOAD:')
os.system('cat /proc/loadavg')
Banner('UPTIME:')
os.system('cat /proc/uptime')
def InfoDarwin():
Banner('OS:')
os.system('sysctl kern | egrep "kern\.os|version"')
Banner('CPU:')
os.system('sysctl hw.machine')
os.system('sysctl hw.model')
os.system('sysctl hw.ncpu')
if VERBOSE:
os.system("sysctl hw")
else:
os.system("sysctl hw | egrep 'cpu'")
Banner('RAM:')
if VERBOSE:
os.system("sysctl hw")
else:
os.system("sysctl hw | egrep 'mem'")
Banner('LOAD:')
print 'TBD'
Banner('UPTIME:')
os.system('sysctl kern | egrep "kern\.boottime"')
def InfoWin32():
import _winreg
def GetRegistryOS( value):
db = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion')
return _winreg.QueryValueEx(db, value)[0]
Banner('OS:')
for key in ['ProductName',
'CSDVersion',
'CurrentBuildNumber']:
print GetRegistryOS(key)
Banner('CPU:')
db = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
'HARDWARE\\DESCRIPTION\\System\\CentralProcessor')
for n in range(0, 1000):
try:
cpu = _winreg.EnumKey(db, n)
except Exception:
break
print "\nProcessor :", cpu
db_cpu = _winreg.OpenKey(db, cpu)
for i in range(0, 1000):
try:
name, value, type =_winreg.EnumValue(db_cpu, i)
except Exception:
break
# skip binary data
if type == _winreg.REG_BINARY: continue
if type == _winreg.REG_FULL_RESOURCE_DESCRIPTOR: continue
print name, type, value
Banner('RAM:')
print 'TBD'
# TODO: this is currently broken since ctypes is not available
Banner('LOAD:')
print 'TBD'
Banner('UPTIME:')
print 'TBD'
PLATFORM_INFO = {
'linux2': InfoLinux,
'linux3': InfoLinux,
'darwin': InfoDarwin,
'win32': InfoWin32,
}
def main():
Banner('Python Info:')
print sys.platform
print sys.version
Banner('ENV:')
for e in ['PATH']:
print e, os.getenv(e)
if sys.platform in PLATFORM_INFO:
try:
PLATFORM_INFO[sys.platform]()
except Exception, err:
print 'ERRROR: processing sys info', str(err)
else:
print 'ERROR: unknwon platform', system.platform
return 0
sys.exit(main())
|
Lind-Project/native_client
|
tools/sysinfo.py
|
Python
|
bsd-3-clause
| 3,324
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Job import Job
## A Job subclass that performs writing.
#
# The writer defines what the result of this job is.
class WriteFileJob(Job):
## Creates a new job for writing.
#
# \param writer The file writer to use, with the correct MIME type.
# \param stream The output stream to write to.
# \param data Whatever it is what we want to write.
# \param mode Additional information to send to the writer, for example: such as whether to
# write in binary format or in ASCII format.
def __init__(self, writer, stream, data, mode):
super().__init__()
self._stream = stream
self._writer = writer
self._data = data
self._file_name = ""
self._mode = mode
def setFileName(self, name):
self._file_name = name
def getFileName(self):
return self._file_name
def getStream(self):
return self._stream
def run(self):
Job.yieldThread()
self.setResult(self._writer.write(self._stream, self._data, self._mode))
|
onitake/Uranium
|
UM/FileHandler/WriteFileJob.py
|
Python
|
agpl-3.0
| 1,153
|
# Copyright 2013-2015 VPAC
# Copyright 2014 The University of Melbourne
#
# This file is part of Karaage.
#
# Karaage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Karaage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Karaage If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from django.contrib.auth.decorators import user_passes_test
from django.core.exceptions import PermissionDenied
def admin_required(function=None):
"""
Decorator for views that checks that the user is an administrator,
redirecting to the log-in page if necessary.
"""
def check_perms(user):
# if user not logged in, show login form
if not user.is_authenticated():
return False
# if this site doesn't allow admin access, fail
if settings.ADMIN_IGNORED:
raise PermissionDenied
# check if the user has admin rights
if not user.is_admin:
raise PermissionDenied
return True
actual_decorator = user_passes_test(check_perms)
if function:
return actual_decorator(function)
return actual_decorator
def login_required(function=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
def check_perms(user):
# if user not logged in, show login form
if not user.is_authenticated():
return False
# if this is the admin site only admin access
if settings.ADMIN_REQUIRED and not user.is_admin:
raise PermissionDenied
return True
actual_decorator = user_passes_test(check_perms)
if function:
return actual_decorator(function)
return actual_decorator
def usage_required(function=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
def check_perms(user):
# if user not logged in, show login form
if not user.is_authenticated():
return False
# if this is the admin site only admin access
if settings.ADMIN_REQUIRED and not user.is_admin:
raise PermissionDenied
if settings.USAGE_IS_PUBLIC:
return True
if user.is_admin:
return True
return False
actual_decorator = user_passes_test(check_perms)
if function:
return actual_decorator(function)
return actual_decorator
def xmlrpc_machine_required(function=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
def actual_decorator(func):
def wrapper(machine_name, password, *args):
from django_xmlrpc.decorators import AuthenticationFailedException
from karaage.machines.models import Machine
machine = Machine.objects.authenticate(machine_name, password)
if machine is None:
raise AuthenticationFailedException
return func(machine, *args)
if hasattr(func, '_xmlrpc_signature'):
sig = func._xmlrpc_signature
sig['args'] = (['string'] * 2) + sig['args']
wrapper._xmlrpc_signature = sig
if func.__doc__:
wrapper.__doc__ = func.__doc__ + \
"\nNote: Machine authentication is required."
return wrapper
if function:
return actual_decorator(function)
return actual_decorator
|
monash-merc/karaage
|
karaage/common/decorators.py
|
Python
|
gpl-3.0
| 3,954
|
import os
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class DBConnector():
'''
where every row is the details one employee was paid for an entire month.
'''
@classmethod
def get_session(cls):
database_path = os.environ["SQL_DATABASE"]
engine = create_engine(database_path)
session = sessionmaker(bind=engine)()
return session
|
JasonThomasData/payslip_code_test
|
app/models/db_connector.py
|
Python
|
mit
| 498
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
User = get_user_model()
class TimtecUserAdmin(UserAdmin):
model = User
fieldsets = UserAdmin.fieldsets + (
(_('Timtec Info'), {'fields': ('accepted_terms', 'picture', 'cpf',
'occupation', 'institution')}),
)
admin.site.register(User, TimtecUserAdmin)
|
hacklabr/timtec
|
accounts/admin.py
|
Python
|
agpl-3.0
| 501
|
import sys
sys.path.append('../..')
import codestudio
z = codestudio.load('s1level86')
def draw_square():
for count in range(4):
z.move_forward(100)
z.turn_right(90)
def draw_triangle():
for count in range(3):
z.move_forward(100)
z.turn_right(120)
def draw_house():
draw_square()
z.move(100)
z.right(30)
draw_triangle()
draw_house()
z.check()
|
skilstak/code-dot-org-python
|
solutions/stage15-artist4/s1level86.py
|
Python
|
unlicense
| 404
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv, expression
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from openerp.report import report_sxw
from openerp.tools import float_compare, float_round
from openerp.exceptions import UserError, RedirectWarning
from operator import add
import time
class account_bank_statement(osv.osv):
def create(self, cr, uid, vals, context=None):
if vals.get('name', '/') == '/':
journal_id = vals.get('journal_id', self._default_journal_id(cr, uid, context=context))
vals['name'] = self._compute_default_statement_name(cr, uid, journal_id, context=context)
if vals.get('line_ids'):
for idx, line in enumerate(vals['line_ids']):
line[2]['sequence'] = idx + 1
return super(account_bank_statement, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
account_bank_statement_line_obj = self.pool.get('account.bank.statement.line')
for statement in self.browse(cr, uid, ids, context):
for idx, line in enumerate(statement.line_ids):
account_bank_statement_line_obj.write(cr, uid, [line.id], {'sequence': idx + 1}, context=context)
return res
def _default_journal_id(self, cr, uid, context=None):
if context is None:
context = {}
journal_pool = self.pool.get('account.journal')
journal_type = context.get('journal_type', False)
company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.bank.statement',context=context)
if journal_type:
ids = journal_pool.search(cr, uid, [('type', '=', journal_type),('company_id','=',company_id)])
if ids:
return ids[0]
return False
def _end_balance(self, cursor, user, ids, name, attr, context=None):
res = {}
for statement in self.browse(cursor, user, ids, context=context):
res[statement.id] = statement.balance_start
for line in statement.line_ids:
res[statement.id] += line.amount
return res
def _get_period(self, cr, uid, context=None):
periods = self.pool.get('account.period').find(cr, uid, context=context)
if periods:
return periods[0]
return False
def _compute_default_statement_name(self, cr, uid, journal_id, context=None):
context = dict(context or {})
obj_seq = self.pool.get('ir.sequence')
period = self.pool.get('account.period').browse(cr, uid, self._get_period(cr, uid, context=context), context=context)
context['ir_sequence_date'] = period.date_start
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, None)
return obj_seq.next_by_id(cr, uid, journal.sequence_id.id, context=context)
def _currency(self, cursor, user, ids, name, args, context=None):
res = {}
res_currency_obj = self.pool.get('res.currency')
res_users_obj = self.pool.get('res.users')
default_currency = res_users_obj.browse(cursor, user,
user, context=context).company_id.currency_id
for statement in self.browse(cursor, user, ids, context=context):
currency = statement.journal_id.currency
if not currency:
currency = default_currency
res[statement.id] = currency.id
currency_names = {}
for currency_id, currency_name in res_currency_obj.name_get(cursor,
user, [x for x in res.values()], context=context):
currency_names[currency_id] = currency_name
for statement_id in res.keys():
currency_id = res[statement_id]
res[statement_id] = (currency_id, currency_names[currency_id])
return res
def _get_statement(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('account.bank.statement.line').browse(cr, uid, ids, context=context):
result[line.statement_id.id] = True
return result.keys()
def _all_lines_reconciled(self, cr, uid, ids, name, args, context=None):
res = {}
for statement in self.browse(cr, uid, ids, context=context):
res[statement.id] = all([line.journal_entry_ids.ids for line in statement.line_ids])
return res
_order = "date desc, id desc"
_name = "account.bank.statement"
_description = "Bank Statement"
_inherit = ['mail.thread']
_columns = {
'name': fields.char(
'Reference', states={'draft': [('readonly', False)]},
readonly=True, # readonly for account_cash_statement
copy=False,
help='if you give the Name other then /, its created Accounting Entries Move '
'will be with same name as statement name. '
'This allows the statement entries to have the same references than the '
'statement itself'),
'date': fields.date('Date', required=True, states={'confirm': [('readonly', True)]},
select=True, copy=False),
'journal_id': fields.many2one('account.journal', 'Journal', required=True,
readonly=True, states={'draft':[('readonly',False)]}),
'period_id': fields.many2one('account.period', 'Period', required=True,
states={'confirm':[('readonly', True)]}),
'balance_start': fields.float('Starting Balance', digits_compute=dp.get_precision('Account'),
states={'confirm':[('readonly',True)]}),
'balance_end_real': fields.float('Ending Balance', digits_compute=dp.get_precision('Account'),
states={'confirm': [('readonly', True)]}, help="Computed using the cash control lines"),
'balance_end': fields.function(_end_balance,
store = {
'account.bank.statement': (lambda self, cr, uid, ids, c={}: ids, ['line_ids','move_line_ids','balance_start'], 10),
'account.bank.statement.line': (_get_statement, ['amount'], 10),
},
string="Computed Balance", help='Balance as calculated based on Opening Balance and transaction lines'),
'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'line_ids': fields.one2many('account.bank.statement.line',
'statement_id', 'Statement lines',
states={'confirm':[('readonly', True)]}, copy=True),
'move_line_ids': fields.one2many('account.move.line', 'statement_id',
'Entry lines', states={'confirm':[('readonly',True)]}),
'state': fields.selection([('draft', 'New'),
('open','Open'), # used by cash statements
('confirm', 'Closed')],
'Status', required=True, readonly="1",
copy=False,
help='When new statement is created the status will be \'Draft\'.\n'
'And after getting confirmation from the bank it will be in \'Confirmed\' status.'),
'currency': fields.function(_currency, string='Currency',
type='many2one', relation='res.currency'),
'account_id': fields.related('journal_id', 'default_debit_account_id', type='many2one', relation='account.account', string='Account used in this journal', readonly=True, help='used in statement reconciliation domain, but shouldn\'t be used elswhere.'),
'cash_control': fields.related('journal_id', 'cash_control' , type='boolean', relation='account.journal',string='Cash control'),
'all_lines_reconciled': fields.function(_all_lines_reconciled, string='All lines reconciled', type='boolean'),
}
_defaults = {
'name': '/',
'date': fields.date.context_today,
'state': 'draft',
'journal_id': _default_journal_id,
'period_id': _get_period,
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.bank.statement',context=c),
}
def _check_company_id(self, cr, uid, ids, context=None):
for statement in self.browse(cr, uid, ids, context=context):
if statement.company_id.id != statement.period_id.company_id.id:
return False
return True
_constraints = [
(_check_company_id, 'The journal and period chosen have to belong to the same company.', ['journal_id','period_id']),
]
def onchange_date(self, cr, uid, ids, date, company_id, context=None):
"""
Find the correct period to use for the given date and company_id, return it and set it in the context
"""
res = {}
period_pool = self.pool.get('account.period')
if context is None:
context = {}
ctx = context.copy()
ctx.update({'company_id': company_id})
pids = period_pool.find(cr, uid, dt=date, context=ctx)
if pids:
res.update({'period_id': pids[0]})
context = dict(context, period_id=pids[0])
return {
'value':res,
'context':context,
}
def button_dummy(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {}, context=context)
def _prepare_move(self, cr, uid, st_line, st_line_number, context=None):
"""Prepare the dict of values to create the move from a
statement line. This method may be overridden to implement custom
move generation (making sure to call super() to establish
a clean extension chain).
:param browse_record st_line: account.bank.statement.line record to
create the move from.
:param char st_line_number: will be used as the name of the generated account move
:return: dict of value to create() the account.move
"""
return {
'statement_line_id': st_line.id,
'journal_id': st_line.statement_id.journal_id.id,
'period_id': st_line.statement_id.period_id.id,
'date': st_line.date,
'name': st_line_number,
'ref': st_line.ref,
}
def _get_counter_part_account(self, cr, uid, st_line, context=None):
"""Retrieve the account to use in the counterpart move.
:param browse_record st_line: account.bank.statement.line record to create the move from.
:return: int/long of the account.account to use as counterpart
"""
if st_line.amount >= 0:
return st_line.statement_id.journal_id.default_credit_account_id.id
return st_line.statement_id.journal_id.default_debit_account_id.id
def _get_counter_part_partner(self, cr, uid, st_line, context=None):
"""Retrieve the partner to use in the counterpart move.
:param browse_record st_line: account.bank.statement.line record to create the move from.
:return: int/long of the res.partner to use as counterpart
"""
return st_line.partner_id and st_line.partner_id.id or False
def _prepare_bank_move_line(self, cr, uid, st_line, move_id, amount, company_currency, context=None):
"""Compute the args to build the dict of values to create the counter part move line from a
statement line by calling the _prepare_move_line_vals.
:param browse_record st_line: account.bank.statement.line record to create the move from.
:param int/long move_id: ID of the account.move to link the move line
:param float amount: amount of the move line
:param browse_record company_currency: res.currency record of the concerned company
:return: dict of value to create() the bank account.move.line
"""
currency_obj = self.pool.get('res.currency')
if context is None:
context = {}
ctx = context.copy()
ctx['date'] = st_line.date
account_id = self._get_counter_part_account(cr, uid, st_line, context=context)
partner_id = self._get_counter_part_partner(cr, uid, st_line, context=context)
debit = ((amount > 0) and amount) or 0.0
credit = ((amount < 0) and -amount) or 0.0
cur_id = False
amt_cur = False
if st_line.statement_id.currency.id != company_currency.id or (st_line.currency_id and st_line.amount_currency):
#the amount given is in foreign currency and needs to be converted at the ratio of the transaction
st_line_currency_rate = st_line.currency_id and (st_line.amount_currency / st_line.amount) or 1
if st_line.currency_id != company_currency:
debit = currency_obj.round(cr, uid, company_currency, debit / st_line_currency_rate)
credit = currency_obj.round(cr, uid, company_currency, credit / st_line_currency_rate)
amt_cur = amount
if st_line.statement_id.currency != company_currency:
amt_cur = amount / st_line_currency_rate
cur_id = st_line.statement_id.currency.id != company_currency.id and st_line.statement_id.currency.id or st_line.currency_id.id
if st_line.statement_id.currency.id != company_currency.id and st_line.currency_id != company_currency:
debit = currency_obj.compute(cr, uid, st_line.statement_id.currency.id, company_currency.id, debit, context=ctx)
credit = currency_obj.compute(cr, uid, st_line.statement_id.currency.id, company_currency.id, credit, context=ctx)
return self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit,
amount_currency=amt_cur, currency_id=cur_id, account_id=account_id,
partner_id=partner_id, context=context)
def _prepare_move_line_vals(self, cr, uid, st_line, move_id, debit, credit, currency_id=False,
amount_currency=False, account_id=False, partner_id=False, context=None):
"""Prepare the dict of values to create the move line from a
statement line.
:param browse_record st_line: account.bank.statement.line record to
create the move from.
:param int/long move_id: ID of the account.move to link the move line
:param float debit: debit amount of the move line
:param float credit: credit amount of the move line
:param int/long currency_id: ID of currency of the move line to create
:param float amount_currency: amount of the debit/credit expressed in the currency_id
:param int/long account_id: ID of the account to use in the move line if different
from the statement line account ID
:param int/long partner_id: ID of the partner to put on the move line
:return: dict of value to create() the account.move.line
"""
acc_id = account_id or st_line.account_id.id
cur_id = currency_id or st_line.statement_id.currency.id
par_id = partner_id or (((st_line.partner_id) and st_line.partner_id.id) or False)
return {
'name': st_line.name,
'date': st_line.date,
'ref': st_line.ref,
'move_id': move_id,
'partner_id': par_id,
'account_id': acc_id,
'credit': credit,
'debit': debit,
'statement_id': st_line.statement_id.id,
'journal_id': st_line.statement_id.journal_id.id,
'period_id': st_line.statement_id.period_id.id,
'currency_id': amount_currency and cur_id,
'amount_currency': amount_currency,
}
def balance_check(self, cr, uid, st_id, journal_type='bank', context=None):
st = self.browse(cr, uid, st_id, context=context)
if not ((abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001) or (abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001)):
raise UserError(_('The statement balance is incorrect !\nThe expected balance (%.2f) is different than the computed one. (%.2f)') % (st.balance_end_real, st.balance_end))
return True
def statement_close(self, cr, uid, ids, journal_type='bank', context=None):
return self.write(cr, uid, ids, {'state':'confirm'}, context=context)
def check_status_condition(self, cr, uid, state, journal_type='bank'):
return state in ('draft','open')
def button_confirm_bank(self, cr, uid, ids, context=None):
if context is None:
context = {}
for st in self.browse(cr, uid, ids, context=context):
j_type = st.journal_id.type
if not self.check_status_condition(cr, uid, st.state, journal_type=j_type):
continue
self.balance_check(cr, uid, st.id, journal_type=j_type, context=context)
if (not st.journal_id.default_credit_account_id) \
or (not st.journal_id.default_debit_account_id):
raise UserError(_('Please verify that an account is defined in the journal.'))
for line in st.move_line_ids:
if line.state != 'valid':
raise UserError(_('The account entries lines are not in valid state.'))
move_ids = []
for st_line in st.line_ids:
if not st_line.amount:
continue
if st_line.account_id and not st_line.journal_entry_ids.ids:
#make an account move as before
vals = {
'debit': st_line.amount < 0 and -st_line.amount or 0.0,
'credit': st_line.amount > 0 and st_line.amount or 0.0,
'account_id': st_line.account_id.id,
'name': st_line.name
}
self.pool.get('account.bank.statement.line').process_reconciliation(cr, uid, st_line.id, [vals], context=context)
elif not st_line.journal_entry_ids.ids:
raise UserError(_('All the account entries lines must be processed in order to close the statement.'))
move_ids += st_line.journal_entry_ids.ids
if move_ids:
self.pool.get('account.move').post(cr, uid, move_ids, context=context)
self.message_post(cr, uid, [st.id], body=_('Statement %s confirmed, journal items were created.') % (st.name,), context=context)
self.link_bank_to_partner(cr, uid, ids, context=context)
return self.write(cr, uid, ids, {'state': 'confirm', 'closing_date': time.strftime("%Y-%m-%d %H:%M:%S")}, context=context)
def button_cancel(self, cr, uid, ids, context=None):
bnk_st_line_ids = []
for st in self.browse(cr, uid, ids, context=context):
bnk_st_line_ids += [line.id for line in st.line_ids]
self.pool.get('account.bank.statement.line').cancel(cr, uid, bnk_st_line_ids, context=context)
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def _compute_balance_end_real(self, cr, uid, journal_id, context=None):
res = False
if journal_id:
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
if journal.with_last_closing_balance:
cr.execute('SELECT balance_end_real \
FROM account_bank_statement \
WHERE journal_id = %s AND NOT state = %s \
ORDER BY date DESC,id DESC LIMIT 1', (journal_id, 'draft'))
res = cr.fetchone()
return res and res[0] or 0.0
def onchange_journal_id(self, cr, uid, statement_id, journal_id, context=None):
if not journal_id:
return {}
balance_start = self._compute_balance_end_real(cr, uid, journal_id, context=context)
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
currency = journal.currency or journal.company_id.currency_id
res = {'balance_start': balance_start, 'company_id': journal.company_id.id, 'currency': currency.id}
if journal.type == 'cash':
res['cash_control'] = journal.cash_control
return {'value': res}
def unlink(self, cr, uid, ids, context=None):
statement_line_obj = self.pool['account.bank.statement.line']
for item in self.browse(cr, uid, ids, context=context):
if item.state != 'draft':
raise UserError(_('In order to delete a bank statement, you must first cancel it to delete related journal items.'))
# Explicitly unlink bank statement lines
# so it will check that the related journal entries have
# been deleted first
statement_line_obj.unlink(cr, uid, [line.id for line in item.line_ids], context=context)
return super(account_bank_statement, self).unlink(cr, uid, ids, context=context)
def button_journal_entries(self, cr, uid, ids, context=None):
ctx = (context or {}).copy()
ctx['journal_id'] = self.browse(cr, uid, ids[0], context=context).journal_id.id
return {
'name': _('Journal Items'),
'view_type':'form',
'view_mode':'tree',
'res_model':'account.move.line',
'view_id':False,
'type':'ir.actions.act_window',
'domain':[('statement_id','in',ids)],
'context':ctx,
}
def number_of_lines_reconciled(self, cr, uid, ids, context=None):
bsl_obj = self.pool.get('account.bank.statement.line')
return bsl_obj.search_count(cr, uid, [('statement_id', 'in', ids), ('journal_entry_ids', '!=', [])], context=context)
def link_bank_to_partner(self, cr, uid, ids, context=None):
for statement in self.browse(cr, uid, ids, context=context):
for st_line in statement.line_ids:
if st_line.bank_account_id and st_line.partner_id and st_line.bank_account_id.partner_id.id != st_line.partner_id.id:
# Update the partner informations of the bank account, possibly overriding existing ones
bank_obj = self.pool.get('res.partner.bank')
bank_vals = bank_obj.onchange_partner_id(cr, uid, [st_line.bank_account_id.id], st_line.partner_id.id, context=context)['value']
bank_vals.update({'partner_id': st_line.partner_id.id})
bank_obj.write(cr, uid, [st_line.bank_account_id.id], bank_vals, context=context)
class account_bank_statement_line(osv.osv):
def create(self, cr, uid, vals, context=None):
if vals.get('amount_currency', 0) and not vals.get('amount', 0):
raise UserError(_('If "Amount Currency" is specified, then "Amount" must be as well.'))
return super(account_bank_statement_line, self).create(cr, uid, vals, context=context)
def unlink(self, cr, uid, ids, context=None):
for item in self.browse(cr, uid, ids, context=context):
if item.journal_entry_ids:
raise UserError(_("""In order to delete a bank statement line, you must first cancel it to delete related journal items."""))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
def cancel(self, cr, uid, ids, context=None):
account_move_obj = self.pool.get('account.move')
move_ids = []
for line in self.browse(cr, uid, ids, context=context):
if line.journal_entry_ids.ids :
move_ids += line.journal_entry_ids.ids
account_move_obj.write(cr, uid, move_ids, {'statement_line_id': False}, context=context)
for move in account_move_obj.browse(cr, uid, move_ids, context=context):
for aml in move.line_id:
if aml.reconcile_id:
move_lines = [l.id for l in aml.reconcile_id.line_id]
move_lines.remove(aml.id)
self.pool.get('account.move.reconcile').unlink(cr, uid, [aml.reconcile_id.id], context=context)
if len(move_lines) >= 2:
self.pool.get('account.move.line').reconcile_partial(cr, uid, move_lines, 'auto', context=context)
if move_ids:
account_move_obj.button_cancel(cr, uid, move_ids, context=context)
account_move_obj.unlink(cr, uid, move_ids, context)
def get_data_for_reconciliations(self, cr, uid, ids, excluded_ids=None, search_reconciliation_proposition=True, context=None):
""" Returns the data required to display a reconciliation, for each statement line id in ids """
ret = []
if excluded_ids is None:
excluded_ids = []
for st_line in self.browse(cr, uid, ids, context=context):
reconciliation_data = {}
if search_reconciliation_proposition:
reconciliation_proposition = self.get_reconciliation_proposition(cr, uid, st_line, excluded_ids=excluded_ids, context=context)
for mv_line in reconciliation_proposition:
excluded_ids.append(mv_line['id'])
reconciliation_data['reconciliation_proposition'] = reconciliation_proposition
else:
reconciliation_data['reconciliation_proposition'] = []
st_line = self.get_statement_line_for_reconciliation(cr, uid, st_line, context=context)
reconciliation_data['st_line'] = st_line
ret.append(reconciliation_data)
return ret
def get_statement_line_for_reconciliation(self, cr, uid, st_line, context=None):
""" Returns the data required by the bank statement reconciliation widget to display a statement line """
if context is None:
context = {}
statement_currency = st_line.journal_id.currency or st_line.journal_id.company_id.currency_id
rml_parser = report_sxw.rml_parse(cr, uid, 'reconciliation_widget_asl', context=context)
if st_line.amount_currency and st_line.currency_id:
amount = st_line.amount_currency
amount_currency = st_line.amount
amount_currency_str = amount_currency > 0 and amount_currency or -amount_currency
amount_currency_str = rml_parser.formatLang(amount_currency_str, currency_obj=statement_currency)
else:
amount = st_line.amount
amount_currency_str = ""
amount_str = amount > 0 and amount or -amount
amount_str = rml_parser.formatLang(amount_str, currency_obj=st_line.currency_id or statement_currency)
data = {
'id': st_line.id,
'ref': st_line.ref,
'note': st_line.note or "",
'name': st_line.name,
'date': st_line.date,
'amount': amount,
'amount_str': amount_str, # Amount in the statement line currency
'currency_id': st_line.currency_id.id or statement_currency.id,
'partner_id': st_line.partner_id.id,
'statement_id': st_line.statement_id.id,
'account_code': st_line.journal_id.default_debit_account_id.code,
'account_name': st_line.journal_id.default_debit_account_id.name,
'partner_name': st_line.partner_id.name,
'communication_partner_name': st_line.partner_name,
'amount_currency_str': amount_currency_str, # Amount in the statement currency
'has_no_partner': not st_line.partner_id.id,
}
if st_line.partner_id.id:
if amount > 0:
data['open_balance_account_id'] = st_line.partner_id.property_account_receivable.id
else:
data['open_balance_account_id'] = st_line.partner_id.property_account_payable.id
return data
def _domain_reconciliation_proposition(self, cr, uid, st_line, excluded_ids=None, context=None):
if excluded_ids is None:
excluded_ids = []
domain = [('ref', '=', st_line.name),
('reconcile_id', '=', False),
('state', '=', 'valid'),
('account_id.reconcile', '=', True),
('id', 'not in', excluded_ids)]
return domain
def get_reconciliation_proposition(self, cr, uid, st_line, excluded_ids=None, context=None):
""" Returns move lines that constitute the best guess to reconcile a statement line. """
mv_line_pool = self.pool.get('account.move.line')
# Look for structured communication
if st_line.name:
domain = self._domain_reconciliation_proposition(cr, uid, st_line, excluded_ids=excluded_ids, context=context)
match_id = mv_line_pool.search(cr, uid, domain, offset=0, limit=1, context=context)
if match_id:
mv_line_br = mv_line_pool.browse(cr, uid, match_id, context=context)
target_currency = st_line.currency_id or st_line.journal_id.currency or st_line.journal_id.company_id.currency_id
mv_line = mv_line_pool.prepare_move_lines_for_reconciliation_widget(cr, uid, mv_line_br, target_currency=target_currency, target_date=st_line.date, context=context)[0]
mv_line['has_no_partner'] = not bool(st_line.partner_id.id)
# If the structured communication matches a move line that is associated with a partner, we can safely associate the statement line with the partner
if (mv_line['partner_id']):
self.write(cr, uid, st_line.id, {'partner_id': mv_line['partner_id']}, context=context)
mv_line['has_no_partner'] = False
return [mv_line]
# How to compare statement line amount and move lines amount
precision_digits = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
currency_id = st_line.currency_id.id or st_line.journal_id.currency.id
# NB : amount can't be == 0 ; so float precision is not an issue for amount > 0 or amount < 0
amount = st_line.amount_currency or st_line.amount
domain = [('reconcile_partial_id', '=', False)]
if currency_id:
domain += [('currency_id', '=', currency_id)]
sign = 1 # correct the fact that st_line.amount is signed and debit/credit is not
amount_field = 'debit'
if currency_id == False:
if amount < 0:
amount_field = 'credit'
sign = -1
else:
amount_field = 'amount_currency'
# Look for a matching amount
domain_exact_amount = domain + [(amount_field, '=', float_round(sign * amount, precision_digits=precision_digits))]
match_id = self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, offset=0, limit=1, additional_domain=domain_exact_amount)
if match_id:
return match_id
if not st_line.partner_id.id:
return []
# Select move lines until their total amount is greater than the statement line amount
domain += [('account_id.type', 'in', ((amount > 0 and 'receivable' or 'payable'), 'liquidity'))] # Make sure we can't mix receivable and payable
domain += (amount_field == 'amount_currency' and amount < 0) and [(amount_field, '<', 0)] or [(amount_field, '>', 0)]
mv_lines = self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, limit=5, additional_domain=domain)
ret = []
total = 0
for line in mv_lines:
total += abs(line['debit'] - line['credit'])
ret.append(line)
if float_compare(total, abs(amount), precision_digits=precision_digits) != -1:
break
return ret
def get_move_lines_for_reconciliation_by_statement_line_id(self, cr, uid, st_line_id, excluded_ids=None, str=False, offset=0, limit=None, count=False, additional_domain=None, context=None):
""" Bridge between the web client reconciliation widget and get_move_lines_for_reconciliation (which expects a browse record) """
if excluded_ids is None:
excluded_ids = []
if additional_domain is None:
additional_domain = []
st_line = self.browse(cr, uid, st_line_id, context=context)
return self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids, str, offset, limit, count, additional_domain, context=context)
def _domain_move_lines_for_reconciliation(self, cr, uid, st_line, excluded_ids=None, str=False, additional_domain=None, context=None):
if excluded_ids is None:
excluded_ids = []
if additional_domain is None:
additional_domain = []
else:
additional_domain = expression.normalize_domain(additional_domain)
# Domain to fetch reconciled move lines (use case where you register a payment before you get the bank statement)
domain_rapprochement = ['&', ('statement_id', '=', False), ('account_id', 'in', [st_line.journal_id.default_credit_account_id.id, st_line.journal_id.default_debit_account_id.id])]
# Domain to fetch unreconciled move lines (the bank reconciliation process will create a payment journal entry to reconcile with)
domain_reconciliation = [('reconcile_id', '=', False)]
if st_line.partner_id.id:
domain_reconciliation = expression.AND([domain_reconciliation, [('account_id.type', 'in', ['payable', 'receivable'])]])
else:
domain_reconciliation = expression.AND([domain_reconciliation, [('account_id.reconcile', '=', True)]])
# Let's add what applies to both
domain = expression.OR([domain_rapprochement, domain_reconciliation])
domain = expression.AND([domain, [('state', '=', 'valid')]])
if st_line.partner_id.id:
domain = expression.AND([domain, [('partner_id', '=', st_line.partner_id.id)]])
if excluded_ids:
domain = expression.AND([domain, [('id', 'not in', excluded_ids)]])
if str:
str_domain = [
'|', ('move_id.name', 'ilike', str),
'|', ('move_id.ref', 'ilike', str),
'|', ('date_maturity', 'like', str),
'&', ('name', '!=', '/'), ('name', 'ilike', str)
]
if not st_line.partner_id.id:
str_domain = expression.OR([str_domain, [('partner_id.name', 'ilike', str)]])
domain = expression.AND([domain, str_domain])
return expression.AND([additional_domain, domain])
def get_move_lines_for_reconciliation(self, cr, uid, st_line, excluded_ids=None, str=False, offset=0, limit=None, count=False, additional_domain=None, context=None):
""" Find the move lines that could be used to reconcile a statement line. If count is true, only returns the count.
:param st_line: the browse record of the statement line
:param integers list excluded_ids: ids of move lines that should not be fetched
:param boolean count: just return the number of records
:param tuples list additional_domain: additional domain restrictions
"""
mv_line_pool = self.pool.get('account.move.line')
domain = self._domain_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, str=str, additional_domain=additional_domain, context=context)
# Get move lines ; in case of a partial reconciliation, only keep one line (the first whose amount is greater than
# the residual amount because it is presumably the invoice, which is the relevant item in this situation)
filtered_lines = []
reconcile_partial_ids = []
actual_offset = offset
while True:
line_ids = mv_line_pool.search(cr, uid, domain, offset=actual_offset, limit=limit, order="date_maturity asc, id asc", context=context)
lines = mv_line_pool.browse(cr, uid, line_ids, context=context)
make_one_more_loop = False
for line in lines:
if line.reconcile_partial_id and \
(line.reconcile_partial_id.id in reconcile_partial_ids or \
abs(line.debit - line.credit) < abs(line.amount_residual)):
#if we filtered a line because it is partially reconciled with an already selected line, we must do one more loop
#in order to get the right number of items in the pager
make_one_more_loop = True
continue
filtered_lines.append(line)
if line.reconcile_partial_id:
reconcile_partial_ids.append(line.reconcile_partial_id.id)
if not limit or not make_one_more_loop or len(filtered_lines) >= limit:
break
actual_offset = actual_offset + limit
lines = limit and filtered_lines[:limit] or filtered_lines
# Either return number of lines
if count:
return len(lines)
# Or return list of dicts representing the formatted move lines
else:
target_currency = st_line.currency_id or st_line.journal_id.currency or st_line.journal_id.company_id.currency_id
mv_lines = mv_line_pool.prepare_move_lines_for_reconciliation_widget(cr, uid, lines, target_currency=target_currency, target_date=st_line.date, context=context)
has_no_partner = not bool(st_line.partner_id.id)
for line in mv_lines:
line['has_no_partner'] = has_no_partner
return mv_lines
def get_currency_rate_line(self, cr, uid, st_line, currency_diff, move_id, context=None):
if currency_diff < 0:
account_id = st_line.company_id.expense_currency_exchange_account_id.id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_config')
msg = _("You need to configure the 'Loss Exchange Rate Account' in order to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise RedirectWarning(msg, action_id, _('Go to Account Configuration'))
else:
account_id = st_line.company_id.income_currency_exchange_account_id.id
if not account_id:
raise UserError(_("You should configure the 'Gain Exchange Rate Account' in the accounting settings, to manage automatically the booking of accounting entries related to differences between exchange rates."))
return {
'move_id': move_id,
'name': _('change') + ': ' + (st_line.name or '/'),
'period_id': st_line.statement_id.period_id.id,
'journal_id': st_line.journal_id.id,
'partner_id': st_line.partner_id.id,
'company_id': st_line.company_id.id,
'statement_id': st_line.statement_id.id,
'debit': currency_diff < 0 and -currency_diff or 0,
'credit': currency_diff > 0 and currency_diff or 0,
'amount_currency': 0.0,
'date': st_line.date,
'account_id': account_id
}
def process_reconciliations(self, cr, uid, data, context=None):
for datum in data:
self.process_reconciliation(cr, uid, datum[0], datum[1], context=context)
def process_reconciliation(self, cr, uid, id, mv_line_dicts, context=None):
""" Creates a move line for each item of mv_line_dicts and for the statement line. Reconcile a new move line with its counterpart_move_line_id if specified.
Finally, mark the statement line as reconciled by putting the reconciled moves ids in the column journal_entry_ids.
:param int id: id of the bank statement line
:param list of dicts mv_line_dicts: move lines to create. If counterpart_move_line_id is specified, reconcile with it
"""
if context is None:
context = {}
st_line = self.browse(cr, uid, id, context=context)
company_currency = st_line.journal_id.company_id.currency_id
statement_currency = st_line.journal_id.currency or company_currency
bs_obj = self.pool.get('account.bank.statement')
am_obj = self.pool.get('account.move')
aml_obj = self.pool.get('account.move.line')
currency_obj = self.pool.get('res.currency')
# Checks
if st_line.journal_entry_ids.ids:
raise UserError(_('The bank statement line was already reconciled.'))
for mv_line_dict in mv_line_dicts:
for field in ['debit', 'credit', 'amount_currency']:
if field not in mv_line_dict:
mv_line_dict[field] = 0.0
if mv_line_dict.get('counterpart_move_line_id'):
mv_line = aml_obj.browse(cr, uid, mv_line_dict.get('counterpart_move_line_id'), context=context)
if mv_line.reconcile_id:
raise UserError(_('A selected move line was already reconciled.'))
# When the move line is already paid (fully reconciled move), we just link them to the bank statement
to_reconcile_mv_line_ids = [x['counterpart_move_line_id'] for x in mv_line_dicts if x.get('already_paid', False)]
if to_reconcile_mv_line_ids:
aml_obj.write(cr, uid, to_reconcile_mv_line_ids, {'statement_id': st_line.statement_id.id}, context=context)
for aml in aml_obj.browse(cr, uid, to_reconcile_mv_line_ids, context=context):
am_obj.write(cr, uid, aml.move_id.id, {'statement_line_id': st_line.id}, context=context)
# Otherwise, we create a move line. Either matching an existing journal entry (eg. invoice), in which
# case we reconcile the existing and the new move lines together, or being a write-off.
to_create_mv_line_dicts = [x for x in mv_line_dicts if not x.get('already_paid', False)]
if to_create_mv_line_dicts:
# Create the move
move_name = (st_line.statement_id.name or st_line.name) + "/" + str(st_line.sequence)
move_vals = bs_obj._prepare_move(cr, uid, st_line, move_name, context=context)
move_id = am_obj.create(cr, uid, move_vals, context=context)
# Create the move line for the statement line
st_line_amount = st_line.currency_id and st_line.amount_currency or st_line.amount
if to_reconcile_mv_line_ids:
already_reconciled_amount = reduce(add, [x['credit'] - x['debit'] for x in mv_line_dicts if x.get('already_paid', False)])
st_line_amount -= already_reconciled_amount
bank_st_move_vals = bs_obj._prepare_bank_move_line(cr, uid, st_line, move_id, st_line_amount, company_currency, context=context)
aml_obj.create(cr, uid, bank_st_move_vals, context=context)
# Complete the dicts
st_line_currency = st_line.currency_id or statement_currency
st_line_currency_rate = st_line.currency_id and (st_line.amount_currency / st_line.amount) or 1
to_create = []
for mv_line_dict in to_create_mv_line_dicts:
mv_line_dict.pop('already_paid', None)
mv_line_dict['ref'] = move_name
mv_line_dict['move_id'] = move_id
mv_line_dict['period_id'] = st_line.statement_id.period_id.id
mv_line_dict['journal_id'] = st_line.journal_id.id
mv_line_dict['company_id'] = st_line.company_id.id
mv_line_dict['statement_id'] = st_line.statement_id.id
if mv_line_dict.get('counterpart_move_line_id'):
mv_line = aml_obj.browse(cr, uid, mv_line_dict['counterpart_move_line_id'], context=context)
mv_line_dict['partner_id'] = mv_line.partner_id.id or st_line.partner_id.id
mv_line_dict['account_id'] = mv_line.account_id.id
if mv_line.currency_id:
ctx = context.copy()
ctx['date'] = mv_line.date
mv_line_dict['amount_currency'] = currency_obj.compute(cr, uid, company_currency.id, mv_line.currency_id.id, mv_line_dict['debit'] - mv_line_dict['credit'], context=ctx)
mv_line_dict['currency_id'] = mv_line.currency_id.id
if st_line_currency.id != company_currency.id:
ctx = context.copy()
ctx['date'] = st_line.date
mv_line_dict['amount_currency'] = mv_line_dict['debit'] - mv_line_dict['credit']
mv_line_dict['currency_id'] = st_line_currency.id
if st_line.currency_id and statement_currency.id == company_currency.id:
#statement is in company currency but the transaction is in foreign currency
debit_at_current_rate = self.pool.get('res.currency').round(cr, uid, company_currency, mv_line_dict['debit'] / st_line_currency_rate)
credit_at_current_rate = self.pool.get('res.currency').round(cr, uid, company_currency, mv_line_dict['credit'] / st_line_currency_rate)
elif st_line.currency_id:
#statement is in foreign currency and the transaction is in another one
debit_at_current_rate = currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, mv_line_dict['debit'] / st_line_currency_rate, context=ctx)
credit_at_current_rate = currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, mv_line_dict['credit'] / st_line_currency_rate, context=ctx)
else:
#statement is in foreign currency and no extra currency is given for the transaction
debit_at_current_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['debit'] / st_line_currency_rate, context=ctx)
credit_at_current_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['credit'] / st_line_currency_rate, context=ctx)
if mv_line_dict.get('counterpart_move_line_id'):
#post an account line that use the same currency rate than the counterpart (to balance the account) and post the difference in another line
ctx['date'] = mv_line.date
debit_at_old_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['debit'], context=ctx)
credit_at_old_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['credit'], context=ctx)
mv_line_dict['credit'] = credit_at_old_rate
mv_line_dict['debit'] = debit_at_old_rate
if debit_at_old_rate - debit_at_current_rate:
currency_diff = debit_at_current_rate - debit_at_old_rate
to_create.append(self.get_currency_rate_line(cr, uid, st_line, -currency_diff, move_id, context=context))
if credit_at_old_rate - credit_at_current_rate:
currency_diff = credit_at_current_rate - credit_at_old_rate
to_create.append(self.get_currency_rate_line(cr, uid, st_line, currency_diff, move_id, context=context))
else:
mv_line_dict['debit'] = debit_at_current_rate
mv_line_dict['credit'] = credit_at_current_rate
elif statement_currency.id != company_currency.id:
#statement is in foreign currency but the transaction is in company currency
prorata_factor = (mv_line_dict['debit'] - mv_line_dict['credit']) / st_line.amount_currency
mv_line_dict['amount_currency'] = prorata_factor * st_line.amount
if mv_line_dict.get('counterpart_move_line_id'):
counterpart_line = aml_obj.browse(cr, uid, mv_line_dict['counterpart_move_line_id'], context=context)
if counterpart_line.currency_id and counterpart_line.currency_id.id == statement_currency.id:
#post an account line that use the same currency rate than the counterpart (to balance the account) and post the difference in another line
ctx = context.copy()
ctx['date'] = mv_line.date
debit_at_old_rate = mv_line_dict['amount_currency'] > 0 and currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, mv_line_dict['amount_currency'], context=ctx) or 0.0
credit_at_old_rate = mv_line_dict['amount_currency'] < 0 and currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, -mv_line_dict['amount_currency'], context=ctx) or 0.0
if debit_at_old_rate - mv_line_dict['debit']:
currency_diff = mv_line_dict['debit'] - debit_at_old_rate
to_create.append(self.get_currency_rate_line(cr, uid, st_line, -currency_diff, move_id, context=context))
if credit_at_old_rate - mv_line_dict['credit']:
currency_diff = mv_line_dict['credit'] - credit_at_old_rate
to_create.append(self.get_currency_rate_line(cr, uid, st_line, currency_diff, move_id, context=context))
mv_line_dict['debit'] = debit_at_old_rate
mv_line_dict['credit'] = credit_at_old_rate
to_create.append(mv_line_dict)
# If the reconciliation is performed in another currency than the company currency, the amounts are converted to get the right debit/credit.
# If there is more than 1 debit and 1 credit, this can induce a rounding error, which we put in the foreign exchane gain/loss account.
if st_line_currency.id != company_currency.id:
diff_amount = bank_st_move_vals['debit'] - bank_st_move_vals['credit'] \
+ sum(aml['debit'] for aml in to_create) - sum(aml['credit'] for aml in to_create)
if not company_currency.is_zero(diff_amount):
diff_aml = self.get_currency_rate_line(cr, uid, st_line, diff_amount, move_id, context=context)
diff_aml['name'] = _('Rounding error from currency conversion')
to_create.append(diff_aml)
# Create move lines
move_line_pairs_to_reconcile = []
for mv_line_dict in to_create:
counterpart_move_line_id = None # NB : this attribute is irrelevant for aml_obj.create() and needs to be removed from the dict
if mv_line_dict.get('counterpart_move_line_id'):
counterpart_move_line_id = mv_line_dict['counterpart_move_line_id']
del mv_line_dict['counterpart_move_line_id']
new_aml_id = aml_obj.create(cr, uid, mv_line_dict, context=context)
if counterpart_move_line_id != None:
move_line_pairs_to_reconcile.append([new_aml_id, counterpart_move_line_id])
# Reconcile
for pair in move_line_pairs_to_reconcile:
aml_obj.reconcile_partial(cr, uid, pair, context=context)
# FIXME : if it wasn't for the multicompany security settings in account_security.xml, the method would just
# return [('journal_entry_ids', '!=', True)]
# Unfortunately, that spawns a "no access rights" error ; it shouldn't.
def _needaction_domain_get(self, cr, uid, context=None):
user = self.pool.get("res.users").browse(cr, uid, uid)
return ['|', ('company_id', '=', False), ('company_id', 'child_of', [user.company_id.id]), ('journal_entry_ids', '=', False), ('account_id', '=', False)]
_order = "statement_id desc, sequence"
_name = "account.bank.statement.line"
_description = "Bank Statement Line"
_inherit = ['ir.needaction_mixin']
_columns = {
'name': fields.char('Communication', required=True),
'date': fields.date('Date', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'partner_id': fields.many2one('res.partner', 'Partner'),
'bank_account_id': fields.many2one('res.partner.bank','Bank Account'),
'account_id': fields.many2one('account.account', 'Account', help="This technical field can be used at the statement line creation/import time in order to avoid the reconciliation process on it later on. The statement line will simply create a counterpart on this account"),
'statement_id': fields.many2one('account.bank.statement', 'Statement', select=True, required=True, ondelete='restrict'),
'journal_id': fields.related('statement_id', 'journal_id', type='many2one', relation='account.journal', string='Journal', store=True, readonly=True),
'partner_name': fields.char('Partner Name', help="This field is used to record the third party name when importing bank statement in electronic format, when the partner doesn't exist yet in the database (or cannot be found)."),
'ref': fields.char('Reference'),
'note': fields.text('Notes'),
'sequence': fields.integer('Sequence', select=True, help="Gives the sequence order when displaying a list of bank statement lines."),
'company_id': fields.related('statement_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'journal_entry_ids': fields.one2many('account.move', 'statement_line_id', 'Journal Entries', copy=False, readonly=True),
'amount_currency': fields.float('Amount Currency', help="The amount expressed in an optional other currency if it is a multi-currency entry.", digits_compute=dp.get_precision('Account')),
'currency_id': fields.many2one('res.currency', 'Currency', help="The optional other currency if it is a multi-currency entry."),
}
_defaults = {
'date': lambda self,cr,uid,context={}: context.get('date', fields.date.context_today(self,cr,uid,context=context)),
}
class account_statement_operation_template(osv.osv):
_name = "account.statement.operation.template"
_description = "Preset for the lines that can be created in a bank statement reconciliation"
_columns = {
'name': fields.char('Button Label', required=True),
'account_id': fields.many2one('account.account', 'Account', ondelete='cascade', domain=[('type', 'not in', ('view', 'closed', 'consolidation'))]),
'label': fields.char('Journal Item Label'),
'amount_type': fields.selection([('fixed', 'Fixed'),('percentage_of_total','Percentage of total amount'),('percentage_of_balance', 'Percentage of open balance')], 'Amount type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account'), required=True, help="The amount will count as a debit if it is negative, as a credit if it is positive (except if amount type is 'Percentage of open balance')."),
'tax_id': fields.many2one('account.tax', 'Tax', ondelete='restrict', domain=[('type_tax_use','in',('purchase','all')), ('parent_id','=',False)]),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', ondelete='set null', domain=[('type','!=','view'), ('state','not in',('close','cancelled'))]),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'amount_type': 'percentage_of_balance',
'amount': 100.0,
'company_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
|
Grirrane/odoo
|
addons/account/account_bank_statement.py
|
Python
|
agpl-3.0
| 57,413
|
from . import (
code_blocks, headings, plain_summary, featured_image, media, tables,
authors, outdated_article,
)
PLUGINS = [
code_blocks, headings, plain_summary, featured_image, media, tables,
authors, outdated_article
]
|
honzajavorek/danube-delta
|
danube_delta/plugins/__init__.py
|
Python
|
mit
| 241
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import os
import re
from glanceclient import exc as glance_exc
from novaclient import exceptions as nova_exc
import six
from rally.common.i18n import _
from rally.common import objects
from rally import consts
from rally import exceptions
from rally import osclients
from rally.plugins.openstack.context.nova import flavors as flavors_ctx
from rally.task import types
from rally.verification.tempest import tempest
# TODO(boris-42): make the validators usable as a functions as well.
# At the moment validators can only be used as decorators.
class ValidationResult(object):
def __init__(self, is_valid, msg=None):
self.is_valid = is_valid
self.msg = msg
def validator(fn):
"""Decorator that constructs a scenario validator from given function.
Decorated function should return ValidationResult on error.
:param fn: function that performs validation
:returns: rally scenario validator
"""
def wrap_given(*args, **kwargs):
"""Dynamic validation decorator for scenario.
:param args: the arguments of the decorator of the benchmark scenario
ex. @my_decorator("arg1"), then args = ("arg1",)
:param kwargs: the keyword arguments of the decorator of the scenario
ex. @my_decorator(kwarg1="kwarg1"), then kwargs = {"kwarg1": "kwarg1"}
"""
@functools.wraps(fn)
def wrap_validator(config, clients, deployment):
# NOTE(amaretskiy): validator is successful by default
return (fn(config, clients, deployment, *args, **kwargs) or
ValidationResult(True))
def wrap_scenario(scenario):
# TODO(boris-42): remove this in future.
wrap_validator.permission = getattr(fn, "permission",
consts.EndpointPermission.USER)
scenario._meta_setdefault("validators", [])
scenario._meta_get("validators").append(wrap_validator)
return scenario
return wrap_scenario
return wrap_given
@validator
def number(config, clients, deployment, param_name, minval=None, maxval=None,
nullable=False, integer_only=False):
"""Checks that parameter is number that pass specified condition.
Ensure a parameter is within the range [minval, maxval]. This is a
closed interval so the end points are included.
:param param_name: Name of parameter to validate
:param minval: Lower endpoint of valid interval
:param maxval: Upper endpoint of valid interval
:param nullable: Allow parameter not specified, or parameter=None
:param integer_only: Only accept integers
"""
val = config.get("args", {}).get(param_name)
num_func = float
if integer_only:
# NOTE(boris-42): Force check that passed value is not float, this is
# important cause int(float_numb) won't raise exception
if type(val) == float:
return ValidationResult(False,
"%(name)s is %(val)s which hasn't int type"
% {"name": param_name, "val": val})
num_func = int
# None may be valid if the scenario sets a sensible default.
if nullable and val is None:
return ValidationResult(True)
try:
number = num_func(val)
if minval is not None and number < minval:
return ValidationResult(
False,
"%(name)s is %(val)s which is less than the minimum "
"(%(min)s)"
% {"name": param_name, "val": number, "min": minval})
if maxval is not None and number > maxval:
return ValidationResult(
False,
"%(name)s is %(val)s which is greater than the maximum "
"(%(max)s)"
% {"name": param_name, "val": number, "max": maxval})
return ValidationResult(True)
except (ValueError, TypeError):
return ValidationResult(
False,
"%(name)s is %(val)s which is not a valid %(type)s"
% {"name": param_name, "val": val, "type": num_func.__name__})
def _file_access_ok(filename, mode, param_name, required=True):
if not filename:
return ValidationResult(not required,
"Parameter %s required" % param_name)
if not os.access(os.path.expanduser(filename), mode):
return ValidationResult(
False, "Could not open %(filename)s with mode %(mode)s "
"for parameter %(param_name)s"
% {"filename": filename, "mode": mode, "param_name": param_name})
return ValidationResult(True)
@validator
def file_exists(config, clients, deployment, param_name, mode=os.R_OK,
required=True):
"""Validator checks parameter is proper path to file with proper mode.
Ensure a file exists and can be accessed with the specified mode.
Note that path to file will be expanded before access checking.
:param param_name: Name of parameter to validate
:param mode: Access mode to test for. This should be one of:
* os.F_OK (file exists)
* os.R_OK (file is readable)
* os.W_OK (file is writable)
* os.X_OK (file is executable)
If multiple modes are required they can be added, eg:
mode=os.R_OK+os.W_OK
:param required: Boolean indicating whether this argument is required.
"""
return _file_access_ok(config.get("args", {}).get(param_name), mode,
param_name, required)
def check_command_dict(command):
"""Check command-specifying dict `command', raise ValueError on error."""
# NOTE(pboldin): Here we check for the values not for presence of the keys
# due to template-driven configuration generation that can leave keys
# defined but values empty.
if command.get("interpreter"):
script_file = command.get("script_file")
if script_file:
command["script_file"] = os.path.expanduser(script_file)
if "script_inline" in command:
raise ValueError(
"Exactly one of script_inline or script_file with "
"interpreter is expected: %r" % command)
# User tries to upload a shell? Make sure it is same as interpreter
interpreter = command.get("interpreter")
interpreter = (interpreter[-1]
if isinstance(interpreter, (tuple, list))
else interpreter)
if (command.get("local_path") and
command.get("remote_path") != interpreter):
raise ValueError(
"When uploading an interpreter its path should be as well"
" specified as the `remote_path' string: %r" % command)
elif not command.get("remote_path"):
# No interpreter and no remote command to execute is given
raise ValueError(
"Supplied dict specifies no command to execute,"
" either interpreter or remote_path is required: %r" % command)
@validator
def valid_command(config, clients, deployment, param_name, required=True):
"""Checks that parameter is a proper command-specifying dictionary.
Ensure that the command dictionary is a proper command-specifying
dictionary described in `vmtasks.VMTasks.boot_runcommand_delete' docstring.
:param param_name: Name of parameter to validate
:param required: Boolean indicating that the command dictionary is required
"""
# TODO(pboldin): Make that a `jsonschema' check once generic validator
# is available.
command = config.get("args", {}).get(param_name)
if command is None:
return ValidationResult(not required,
"Command dicitionary is required")
try:
check_command_dict(command)
except ValueError as e:
return ValidationResult(False, str(e))
for key in "script_file", "local_path":
if command.get(key):
return _file_access_ok(
filename=command[key],
mode=os.R_OK,
param_name=param_name + "." + key,
required=True)
return ValidationResult(True)
def _get_validated_image(config, clients, param_name):
image_context = config.get("context", {}).get("images", {})
image_args = config.get("args", {}).get(param_name)
image_ctx_name = image_context.get("image_name")
if not image_args:
msg = _("Parameter %s is not specified.") % param_name
return (ValidationResult(False, msg), None)
if "image_name" in image_context:
# NOTE(rvasilets) check string is "exactly equal to" a regex
# or image name from context equal to image name from args
if "regex" in image_args:
match = re.match(image_args.get("regex"), image_ctx_name)
if image_ctx_name == image_args.get("name") or (
"regex" in image_args and match):
image = {
"size": image_context.get("min_disk", 0),
"min_ram": image_context.get("min_ram", 0),
"min_disk": image_context.get("min_disk", 0)
}
return (ValidationResult(True), image)
try:
image_id = types.ImageResourceType.transform(
clients=clients, resource_config=image_args)
image = clients.glance().images.get(image=image_id).to_dict()
return (ValidationResult(True), image)
except (glance_exc.HTTPNotFound, exceptions.InvalidScenarioArgument):
message = _("Image '%s' not found") % image_args
return (ValidationResult(False, message), None)
def _get_flavor_from_context(config, flavor_value):
if "flavors" not in config.get("context", {}):
raise exceptions.InvalidScenarioArgument("No flavors context")
flavors = [flavors_ctx.FlavorConfig(**f)
for f in config["context"]["flavors"]]
resource = types.obj_from_name(resource_config=flavor_value,
resources=flavors, typename="flavor")
flavor = flavors_ctx.FlavorConfig(**resource)
flavor.id = "<context flavor: %s>" % flavor.name
return (ValidationResult(True), flavor)
def _get_validated_flavor(config, clients, param_name):
flavor_value = config.get("args", {}).get(param_name)
if not flavor_value:
msg = "Parameter %s is not specified." % param_name
return (ValidationResult(False, msg), None)
try:
flavor_id = types.FlavorResourceType.transform(
clients=clients, resource_config=flavor_value)
flavor = clients.nova().flavors.get(flavor=flavor_id)
return (ValidationResult(True), flavor)
except (nova_exc.NotFound, exceptions.InvalidScenarioArgument):
try:
return _get_flavor_from_context(config, flavor_value)
except exceptions.InvalidScenarioArgument:
pass
message = _("Flavor '%s' not found") % flavor_value
return (ValidationResult(False, message), None)
@validator
def validate_share_proto(config, clients, deployment):
"""Validates value of share protocol for creation of Manila share."""
allowed = ("NFS", "CIFS", "GLUSTERFS", "HDFS", )
share_proto = config.get("args", {}).get("share_proto")
if six.text_type(share_proto).upper() not in allowed:
message = _("Share protocol '%(sp)s' is invalid, allowed values are "
"%(allowed)s.") % {"sp": share_proto,
"allowed": "', '".join(allowed)}
return ValidationResult(False, message)
@validator
def image_exists(config, clients, deployment, param_name, nullable=False):
"""Returns validator for image_id
:param param_name: defines which variable should be used
to get image id value.
:param nullable: defines image id param is required
"""
image_value = config.get("args", {}).get(param_name)
if not image_value and nullable:
return ValidationResult(True)
return _get_validated_image(config, clients, param_name)[0]
@validator
def flavor_exists(config, clients, deployment, param_name):
"""Returns validator for flavor
:param param_name: defines which variable should be used
to get flavor id value.
"""
return _get_validated_flavor(config, clients, param_name)[0]
@validator
def image_valid_on_flavor(config, clients, deployment, flavor_name,
image_name):
"""Returns validator for image could be used for current flavor
:param flavor_name: defines which variable should be used
to get flavor id value.
:param image_name: defines which variable should be used
to get image id value.
"""
valid_result, flavor = _get_validated_flavor(config, clients, flavor_name)
if not valid_result.is_valid:
return valid_result
valid_result, image = _get_validated_image(config, clients, image_name)
if not valid_result.is_valid:
return valid_result
if flavor.ram < (image["min_ram"] or 0):
message = _("The memory size for flavor '%s' is too small "
"for requested image '%s'") % (flavor.id, image["id"])
return ValidationResult(False, message)
if flavor.disk:
if (image["size"] or 0) > flavor.disk * (1024 ** 3):
message = _("The disk size for flavor '%s' is too small "
"for requested image '%s'") % (flavor.id, image["id"])
return ValidationResult(False, message)
if (image["min_disk"] or 0) > flavor.disk:
message = _("The disk size for flavor '%s' is too small "
"for requested image '%s'") % (flavor.id, image["id"])
return ValidationResult(False, message)
@validator
def network_exists(config, clients, deployment, network_name):
"""Validator checks that network with network_name exist."""
network = config.get("args", {}).get(network_name, "private")
networks = [net.label for net in
clients.nova().networks.list()]
if network not in networks:
message = _("Network with name %(network)s not found. "
"Available networks: %(networks)s") % {
"network": network,
"networks": networks
}
return ValidationResult(False, message)
@validator
def external_network_exists(config, clients, deployment, network_name):
"""Validator checks that external network with given name exists."""
ext_network = config.get("args", {}).get(network_name)
if not ext_network:
return ValidationResult(True)
networks = [net.name for net in clients.nova().floating_ip_pools.list()]
if networks and isinstance(networks[0], dict):
networks = [n["name"] for n in networks]
if ext_network not in networks:
message = _("External (floating) network with name %(network)s "
"not found. "
"Available networks: %(networks)s") % {
"network": ext_network,
"networks": networks}
return ValidationResult(False, message)
@validator
def tempest_tests_exists(config, clients, deployment):
"""Validator checks that specified test exists."""
args = config.get("args", {})
if "test_name" in args:
tests = [args["test_name"]]
else:
tests = args.get("test_names", [])
if not tests:
return ValidationResult(False,
_("Parameter 'test_name' or 'test_names' "
"should be specified."))
verifier = tempest.Tempest(
deployment["uuid"],
source=config.get("context", {}).get("tempest", {}).get("source"))
if not verifier.is_installed():
try:
verifier.install()
except tempest.TempestSetupFailure as e:
return ValidationResult(False, e)
if not verifier.is_configured():
verifier.generate_config_file()
allowed_tests = verifier.discover_tests()
for i, test in enumerate(tests):
if not test.startswith("tempest.api."):
tests[i] = "tempest.api." + test
wrong_tests = set(tests) - allowed_tests
if wrong_tests:
message = (_("One or more tests not found: '%s'") %
"', '".join(sorted(wrong_tests)))
return ValidationResult(False, message)
@validator
def tempest_set_exists(config, clients, deployment):
"""Validator that check that tempest set_name is valid."""
set_name = config.get("args", {}).get("set_name")
if not set_name:
return ValidationResult(False, "`set_name` is not specified.")
if set_name not in (list(consts.TempestTestsSets) +
list(consts.TempestTestsAPI)):
message = _("There is no tempest set with name '%s'.") % set_name
return ValidationResult(False, message)
@validator
def required_parameters(config, clients, deployment, *required_params):
"""Validator for checking required parameters are specified.
:param *required_params: list of required parameters
"""
missing = set(required_params) - set(config.get("args", {}))
if missing:
message = _("%s parameters are not defined in "
"the benchmark config file") % ", ".join(missing)
return ValidationResult(False, message)
@validator
def required_services(config, clients, deployment, *required_services):
"""Validator checks if specified OpenStack services are available.
:param *required_services: list of services names
"""
available_services = list(clients.services().values())
if consts.Service.NOVA_NET in required_services:
nova = osclients.Clients(
objects.Credential(**deployment["admin"])).nova()
for service in nova.services.list():
if (service.binary == consts.Service.NOVA_NET and
service.status == "enabled"):
available_services.append(consts.Service.NOVA_NET)
for service in required_services:
# NOTE(andreykurilin): validator should ignore services configured via
# context(a proper validation should be in context)
service_config = config.get("context", {}).get(
"api_versions", {}).get(service, {})
if (service not in available_services and
not ("service_type" in service_config or
"service_name" in service_config)):
return ValidationResult(
False, _("'{0}' service is not available. Hint: If '{0}' "
"service has non-default service_type, try to setup "
"it via 'api_versions' context.").format(service))
@validator
def required_neutron_extensions(config, clients, deployment,
*required_extensions):
"""Validator checks if the specified Neutron extension is available
:param required_extensions: list of Neutron extensions
"""
extensions = clients.neutron().list_extensions().get("extensions", [])
aliases = map(lambda x: x["alias"], extensions)
for extension in required_extensions:
if extension not in aliases:
msg = (_("Neutron extension %s is not configured") % extension)
return ValidationResult(False, msg)
@validator
def required_cinder_services(config, clients, deployment, service_name):
"""Validator checks that specified Cinder service is available.
It uses Cinder client with admin permissions to call 'cinder service-list'
call
:param service_name: Cinder service name
"""
admin_client = osclients.Clients(
objects.Credential(**deployment["admin"])).cinder()
for service in admin_client.services.list():
if (service.binary == six.text_type(service_name) and
service.state == six.text_type("up")):
return ValidationResult(True)
msg = _("%s service is not available") % service_name
return ValidationResult(False, msg)
@validator
def required_clients(config, clients, deployment, *components, **kwargs):
"""Validator checks if specified OpenStack clients are available.
:param *components: list of client components names
:param **kwargs: optional parameters:
admin - bool, whether to use admin clients
"""
if kwargs.get("admin", False):
clients = osclients.Clients(objects.Credential(**deployment["admin"]))
for client_component in components:
try:
getattr(clients, client_component)()
except ImportError:
return ValidationResult(
False,
_("Client for %s is not installed. To install it run "
"`pip install -r"
" optional-requirements.txt`") % client_component)
@validator
def required_contexts(config, clients, deployment, *context_names):
"""Validator checks if required benchmark contexts are specified.
:param *context_names: list of context names that should be specified
"""
missing_contexts = set(context_names) - set(config.get("context", {}))
if missing_contexts:
message = (_("The following contexts are required but missing from "
"the benchmark configuration file: %s") %
", ".join(missing_contexts))
return ValidationResult(False, message)
@validator
def required_openstack(config, clients, deployment, admin=False, users=False):
"""Validator that requires OpenStack admin or (and) users.
This allows us to create 4 kind of benchmarks:
1) not OpenStack related (validator is not specified)
2) requires OpenStack admin
3) requires OpenStack admin + users
4) requires OpenStack users
:param admin: requires OpenStack admin
:param users: requires OpenStack users
"""
if not (admin or users):
return ValidationResult(
False, _("You should specify admin=True or users=True or both."))
if deployment["admin"] and deployment["users"]:
return ValidationResult(True)
if deployment["admin"]:
if users and not config.get("context", {}).get("users"):
return ValidationResult(False,
_("You should specify 'users' context"))
return ValidationResult(True)
if deployment["users"] and admin:
return ValidationResult(False, _("Admin credentials required"))
@validator
def volume_type_exists(config, clients, deployment, param_name):
"""Returns validator for volume types.
check_types: defines variable to be used as the flag to determine if
volume types should be checked for existence.
"""
val = config.get("args", {}).get(param_name)
if val:
volume_types_list = clients.cinder().volume_types.list()
if len(volume_types_list) < 1:
message = (_("Must have at least one volume type created "
"when specifying use of volume types."))
return ValidationResult(False, message)
@validator
def restricted_parameters(config, clients, deployment, param_names,
subdict=None):
"""Validates that parameters is not set.
:param param_names: parameter or parameters list to be validated.
:param subdict: sub-dict of "config" to search for param_names. if
not defined - will search in "config"
"""
if not isinstance(param_names, (list, tuple)):
param_names = [param_names]
restricted_params = []
for param_name in param_names:
args = config.get("args", {})
a_dict, a_key = (args, subdict) if subdict else (config, "args")
if param_name in a_dict.get(a_key, {}):
restricted_params.append(param_name)
if restricted_params:
msg = (_("You can't specify parameters '%(params)s' in '%(a_dict)s'")
% {"params": ", ".join(restricted_params),
"a_dict": subdict if subdict else "args"})
return ValidationResult(False, msg)
|
group-policy/rally
|
rally/task/validation.py
|
Python
|
apache-2.0
| 24,903
|
from .base import SQLAdapter
from . import adapters, with_connection_or_raise
@adapters.register_for("informix")
class Informix(SQLAdapter):
dbengine = "informix"
drivers = ("informixdb",)
def _initialize_(self):
super(Informix, self)._initialize_()
ruri = self.uri.split("://", 1)[1]
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError("Invalid URI string in DAL")
user = self.credential_decoder(m.group("user"))
if not user:
raise SyntaxError("User required")
password = self.credential_decoder(m.group("password"))
if not password:
password = ""
host = m.group("host")
if not host:
raise SyntaxError("Host name required")
db = m.group("db")
if not db:
raise SyntaxError("Database name required")
self.dsn = "%s@%s" % (db, host)
self.driver_args.update(user=user, password=password)
self.get_connection()
def connector(self):
return self.driver.connect(self.dsn, **self.driver_args)
def _after_first_connection(self):
self.dbms_version = int(self.connection.dbms_version.split(".")[0])
@with_connection_or_raise
def execute(self, *args, **kwargs):
command = self.filter_sql_command(args[0])
if command[-1:] == ";":
command = command[:-1]
handlers = self._build_handlers_for_execution()
for handler in handlers:
handler.before_execute(command)
rv = self.cursor.execute(command, *args[1:], **kwargs)
for handler in handlers:
handler.after_execute(command)
return rv
def test_connection(self):
self.execute("SELECT COUNT(*) FROM systables;")
def lastrowid(self, table):
return self.cursor.sqlerrd[1]
@adapters.register_for("informix-se")
class InformixSE(Informix):
def rowslice(self, rows, minimum=0, maximum=None):
if maximum is None:
return rows[minimum:]
return rows[minimum:maximum]
|
web2py/pydal
|
pydal/adapters/informix.py
|
Python
|
bsd-3-clause
| 2,076
|
"""
MIT License
Copyright (c) 2017 cgalleguillosm, AlessioNetti
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import logging
from sys import maxsize
from random import seed
from abc import abstractmethod, ABC
from sortedcontainers.sortedlist import SortedListWithKey
from enum import Enum
from copy import deepcopy
from accasim.base.resource_manager_class import ResourceManager
from accasim.base.allocator_class import AllocatorBase
class DispatcherError(Exception):
pass
class JobVerification(Enum):
REJECT = -1 # All jobs are rejected
NO_CHECK = 0 # No verification
CHECK_TOTAL = 1 # Total requested resources are verified
CHECK_REQUEST = 2 # Each node x resources are verified
class SchedulerBase(ABC):
"""
This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`).
An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must
be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`.
"""
MAXSIZE = maxsize
ALLOW_MAPPING_SAME_NODE = True
def __init__(self, _seed, allocator=None, job_check=JobVerification.CHECK_REQUEST, **kwargs):
"""
Construct a scheduler
:param seed: Seed for the random state
:param resource_manager: A Resource Manager object for dealing with system resources.
:param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If an allocator isn't defined, the scheduler class must generate the entire dispatching plan.
:param job_check: A job may be rejected if it doesnt comply with:
- JobVerification.REJECT: Any job is rejected
- JobVerification.NO_CHECK: All jobs are accepted
- JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system.
- JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one.
:param kwargs:
- skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible.
Otherwise, the allocation will stop after the first fail.
"""
seed(_seed)
self._counter = 0
self.allocator = None
self._logger = logging.getLogger('accasim')
self._system_capacity = None
self._nodes_capacity = None
self.resource_manager = None
if allocator:
assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler'
self.allocator = allocator
# self.set_resource_manager(resource_manager)
assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__)
if job_check == JobVerification.REJECT:
print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.')
self._job_check = job_check
# Check resources
self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s
# Skip jobs during allocation
self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False)
@property
def name(self):
"""
Name of the schedulign method
"""
raise NotImplementedError
@abstractmethod
def get_id(self):
"""
Must return the full ID of the scheduler, including policy and allocator.
:return: the scheduler's id.
"""
raise NotImplementedError
@abstractmethod
def scheduling_method(self, cur_time, es_dict, es):
"""
This function must map the queued events to available nodes at the current time.
:param cur_time: current time
:param es_dict: dictionary with full data of the job events
:param es: events to be scheduled
:return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs
"""
raise Exception('This function must be implemented!!')
def set_resource_manager(self, resource_manager):
"""
Set a resource manager.
:param resource_manager: An instantiation of a resource_manager class or None
"""
if resource_manager:
if self.allocator:
self.allocator.set_resource_manager(resource_manager)
assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler'
self.resource_manager = resource_manager
else:
self.resource_manager = None
def schedule(self, cur_time, es_dict, es):
"""
Method for schedule. It calls the specific scheduling method.
:param cur_time: current time
:param es_dict: dictionary with full data of the events
:param es: events to be scheduled
:return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids.
"""
assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.'
self._counter += 1
self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter))
self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es)))
self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage))
rejected = []
# At least a job need 1 core and 1 kb/mb/gb of mem to run
if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]):
self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.")
return [(None, e, []) for e in es], rejected
accepted = []
# Verify jobs with the defined Job Policy
for e in es:
job = es_dict[e]
if not job.get_checked() and not self._check_job_request(job):
if self._job_check != JobVerification.REJECT:
self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check))
rejected.append(e)
continue
accepted.append(job)
to_allocate = []
# On accepted jobs by policy, try to schedule with the scheduling policy
if accepted:
to_allocate, to_reject = self.scheduling_method(cur_time, accepted, es_dict)
rejected += to_reject
for e in to_reject:
self._logger.warning('{} has been rejected by the dispatcher. (Scheduling policy)'.format(e))
# If there are scheduled jobs and an allocator defined, try to allocate the scheduled jobs.
if to_allocate and self.allocator:
dispatching_plan = self.allocator.allocate(to_allocate, cur_time, skip=self.skip_jobs_on_allocation)
else:
dispatching_plan = to_allocate
return dispatching_plan, rejected
def _check_job_request(self, _job):
"""
Simple method that checks if the loaded _job violates the system's resource constraints.
:param _job: Job object
:return: True if the _job is valid, false otherwise
"""
_job.set_checked(True)
if self._job_check == JobVerification.REJECT:
return False
elif self._job_check == JobVerification.NO_CHECK:
return True
elif self._job_check == JobVerification.CHECK_TOTAL:
# We verify that the _job does not violate the system's resource constraints by comparing the total
if not self._system_capacity:
self._system_capacity = self.resource_manager.system_capacity('total')
return not any([_job.requested_resources[res] * _job.requested_nodes > self._system_capacity[res] for res in _job.requested_resources.keys()])
elif self._job_check == JobVerification.CHECK_REQUEST:
if not self._nodes_capacity:
self._nodes_capacity = self.resource_manager.system_capacity('nodes')
# We verify the _job request can be fitted in the system
_requested_resources = _job.requested_resources
_requested_nodes = _job.requested_nodes
_fits = 0
_diff_node = 0
for _node, _attrs in self._nodes_capacity.items():
# How many time a request fits on the node
_nfits = min([_attrs[_attr] // req for _attr, req in _requested_resources.items() if req > 0 ])
# Update current number of times the current job fits in the nodes
if _nfits > 0:
_fits += _nfits
_diff_node += 1
if self.ALLOW_MAPPING_SAME_NODE:
# Since _fits >> _diff_node this logical comparison is omitted.
if _fits >= _requested_nodes:
return True
else:
if _diff_node >= _requested_nodes:
return True
return False
raise DispatcherError('Invalid option.')
def __str__(self):
return self.get_id()
class SimpleHeuristic(SchedulerBase):
"""
Simple scheduler, sorts the event depending on the chosen policy.
If a single job allocation fails, all subsequent jobs fail too.
Sorting as name, sort funct parameters
"""
def __init__(self, seed, allocator, name, sorting_parameters, **kwargs):
SchedulerBase.__init__(self, seed, allocator, **kwargs)
self.name = name
self.sorting_parameters = sorting_parameters
def get_id(self):
"""
Returns the full ID of the scheduler, including policy and allocator.
:return: the scheduler's id.
"""
return '-'.join([self.__class__.__name__, self.name, self.allocator.get_id()])
def scheduling_method(self, cur_time, jobs, es_dict):
"""
This function must map the queued events to available nodes at the current time.
:param cur_time: current time
:param es_dict: dictionary with full data of the events
:param es: events to be scheduled
:return: a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs
"""
to_reject = []
to_schedule = SortedListWithKey(jobs, **self.sorting_parameters)
return to_schedule, to_reject
class FirstInFirstOut(SimpleHeuristic):
"""
**FirstInFirstOut scheduling policy.**
The first come, first served (commonly called FirstInFirstOut ‒ first in, first out)
process scheduling algorithm is the simplest process scheduling algorithm.
"""
name = 'FIFO'
""" Name of the Scheduler policy. """
sorting_arguments = {
'key': lambda x: x.queued_time
}
""" This sorting function allows to sort the jobs in relation of the scheduling policy. """
def __init__(self, _allocator, _seed=0, **kwargs):
"""
FirstInFirstOut Constructor
"""
SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs)
class LongestJobFirst(SimpleHeuristic):
"""
**LJF scheduling policy.**
Longest Job First (LJF) sorts the jobs, where the longest jobs are preferred over the shortest ones.
"""
name = 'LJF'
""" Name of the Scheduler policy. """
sorting_arguments = {
'key': lambda x:-x.expected_duration
}
""" This sorting function allows to sort the jobs in relation of the scheduling policy. """
def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs):
"""
LJF Constructor
"""
SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs)
class ShortestJobFirst(SimpleHeuristic):
"""
**SJF scheduling policy.**
Shortest Job First (SJF) sorts the jobs, where the shortest jobs are preferred over the longest ones.
"""
name = 'SJF'
""" Name of the Scheduler policy. """
sorting_arguments = {
'key': lambda x: x.expected_duration
}
""" This sorting function allows to sort the jobs in relation of the scheduling policy. """
def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs):
"""
SJF Constructor
"""
SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs)
class EASYBackfilling(SchedulerBase):
"""
EASY Backfilling scheduler.
Whenever a job cannot be allocated, a reservation is made for it. After this, the following jobs are used to
backfill the schedule, not allowing them to use the reserved nodes.
This dispatching methods includes its own calls to the allocator over the dispatching process.
Then it isn't use the auto allocator call, after the schedule generation.
"""
name = 'EBF'
""" Name of the Scheduler policy. """
def __init__(self, allocator, seed=0, **kwargs):
"""
Easy BackFilling Constructor
"""
SchedulerBase.__init__(self, seed, allocator=None, **kwargs)
self._blocked_job_id = None
self._reserved_slot = (None, [],)
self.nonauto_allocator = allocator
self.allocator_rm_set = False
# self.nonauto_allocator.set_resource_manager(resource_manager)
def get_id(self):
"""
Returns the full ID of the scheduler, including policy and allocator.
:return: the scheduler's id.
"""
return '-'.join([self.name, self.nonauto_allocator.name])
def scheduling_method(self, cur_time, queued_jobs, es_dict):
"""
This function must map the queued events to available nodes at the current time.
:param cur_time: current time
:param queued_jobs: Jobs to be dispatched
:param es_dict: dictionary with full data of the events
:return: a list of tuples (time to schedule, event id, list of assigned nodes), and a list of rejected job ids
"""
if not self.allocator_rm_set:
self.nonauto_allocator.set_resource_manager(self.resource_manager)
self.allocator_rm_set = True
avl_resources = self.resource_manager.current_availability
self.nonauto_allocator.set_resources(avl_resources)
to_dispatch = []
to_reject = []
_to_fill = []
_prev_blocked = None
_time_reached = False
if self._reserved_slot[0] and self._reserved_slot[0] <= cur_time:
_time_reached = True
# Tries to allocate the blocked job
self._logger.trace('There is a blocked job {} with {}'.format(self._blocked_job_id, self._reserved_slot))
# assert(self._blocked_job_id == queued_jobs[0].id), 'The first element is not the blocked one. ({} != {})'.format(self._blocked_job_id, queued_jobs[0].id)
blocked_job = queued_jobs[0]
queued_jobs = queued_jobs[1:]
allocation = self.nonauto_allocator.allocating_method(blocked_job, cur_time, skip=False)
if allocation[-1]:
self._logger.trace('{}: {} blocked job can be allocated. Unblocking'.format(cur_time, self._blocked_job_id))
self._blocked_job_id = None
self._reserved_slot = (None, [])
_prev_blocked = [allocation]
else:
# There are jobs still using the reserved nodes
self._logger.trace('{} job is still blocked. Reservation {}'.format(self._blocked_job_id, self._reserved_slot))
# Add the current allocation for the (un)blocked job.
to_dispatch += [allocation]
if self._blocked_job_id is None and queued_jobs:
# Tries to perform a FIFO allocation if there is no blocked job
# Returns the (partial) allocation and the idx for the blocked job, also sets the self._blocked_job_id var
_allocated_jobs, blocked_idx = self._try_fifo_allocation(queued_jobs, cur_time)
# There is a blocked job
if not (blocked_idx is None):
# If there is no a reservation, calculate it for the blocked job
if not self._reserved_slot[0]:
blocked_job = queued_jobs[blocked_idx]
self._logger.trace('Blocked {} Job: Calculate the reservation'.format(self._blocked_job_id))
# Current reservation (future time, reserved nodes)
self._reserved_slot = self._calculate_slot(cur_time, deepcopy(avl_resources), _allocated_jobs[:blocked_idx], _prev_blocked, blocked_job, es_dict)
self._logger.trace('Blocked {} Job: Nodes {} are reserved at {}'.format(self._blocked_job_id, self._reserved_slot[1], self._reserved_slot[0]))
# Include the blocked job
to_dispatch += _allocated_jobs[:blocked_idx + 1]
_to_fill = queued_jobs[blocked_idx + 1:]
else:
to_dispatch += _allocated_jobs
else:
if not _time_reached:
# The blocked job
to_dispatch += [(None, self._blocked_job_id, [])]
# All the remaining queued jobs
_to_fill = queued_jobs[1:]
else:
# The remaining queued jobs
_to_fill = queued_jobs
if _to_fill:
self._logger.trace('Blocked job {}. {} jobs candidates to fill the gap'.format(self._blocked_job_id, len(_to_fill)))
# Filling the gap between cur_time and res_time
(reserved_time, reserved_nodes) = self._reserved_slot
filling_allocation = self.nonauto_allocator.allocating_method(_to_fill, cur_time, \
reserved_time=reserved_time,
reserved_nodes=[],
skip=True
)
# Include the remaining jobs
to_dispatch += filling_allocation
return to_dispatch, to_reject
def _try_fifo_allocation(self, queued_jobs, cur_time):
"""
Allocates as many jobs as possible using the FIFO approach. As soon as one allocation fails, all subsequent jobs fail too.
Then, the return tuple contains info about the allocated jobs (assigned nodes and such) and also the position of the blocked job.
:param queued_jobs: List of job objects
:param cur_time: current time
:return job allocation, and position of the blocked job in the list
"""
# Try to allocate jobs as in FIFO
_allocated_jobs = self.nonauto_allocator.allocating_method(queued_jobs, cur_time, skip=False)
# Check if there is a blocked job (a job without an allocation)
blocked_idx = None
for i, (_, job_id, allocated_nodes) in enumerate(_allocated_jobs):
if not allocated_nodes:
self._blocked_job_id = job_id
blocked_idx = i
break
return _allocated_jobs, blocked_idx
def _calculate_slot(self, cur_time, avl_resources, decided_allocations, prev_blocked, blocked_job, es_dict):
"""
Computes a reservation for the blocked job, by releasing incrementally the resources used by the running
events and recently allocated jobs. The earliest slot in which blocked_job fits is chosen.
:param avl_resources: Actual available resources
:param decided_allocations: Allocated jobs on the current iteration.
:param prev_blocked: Allocation corresponding to the previous blocked job which has been unblocked during this iteration
:param blocked_jobs: Event to be fitted in the time slot
:param es_dist: Job dictionary
:return: a tuple of time of the slot and nodes
"""
current_allocations = self.resource_manager.current_allocations
# Creates a list the jobs sorted by soonest ending time first
future_endings = SortedListWithKey(key=lambda x:x[1])
# Running jobs
for job_id, resources in current_allocations.items():
future_endings.add((job_id, es_dict[job_id].start_time + es_dict[job_id].expected_duration, resources))
# Previous blocked job has been scheduled
if prev_blocked:
decided_allocations += prev_blocked
# Current allocated job
for (_, job_id, nodes) in decided_allocations:
_dec_alloc = {}
for node in nodes:
if not(node in _dec_alloc):
_dec_alloc[node] = {k:v for k, v in es_dict[job_id].requested_resources.items()}
else:
for res, v in es_dict[job_id].requested_resources.items():
_dec_alloc[node][res] += v
future_endings.add((job_id, cur_time + es_dict[job_id].expected_duration, _dec_alloc))
_required_alloc = blocked_job.requested_nodes
_requested_resources = blocked_job.requested_resources
_partial_alloc = {}
# Calculate the partial allocation on the current system state
for node, resources in avl_resources.items():
new_alloc = min([resources[req] // _requested_resources[req] for req in _requested_resources])
if new_alloc > 0:
_partial_alloc[node] = new_alloc
# Calculate the partial allocation on the next future endings
for (job_id, res_time, used_nodes) in future_endings:
for node, used_resources in used_nodes.items():
if not(node in avl_resources):
avl_resources[node] = {r:0 for r in _requested_resources}
for r, v in used_resources.items():
avl_resources[node][r] += v
cur_alloc = _partial_alloc.get(node, 0)
new_alloc = min([avl_resources[node][req] // _requested_resources[req] for req in _requested_resources])
_diff = new_alloc - cur_alloc
if _diff > 0:
_partial_alloc[node] = _partial_alloc.get(node, 0) + _diff
# At this point the blocked job can be allocated
if sum(_partial_alloc.values()) >= _required_alloc:
ctimes = 0
nodes = []
for node, times in _partial_alloc.items():
ctimes += times
nodes.append(node)
if ctimes >= _required_alloc:
break
return (res_time, nodes,)
raise DispatcherError('Can\'t find the slot.... no end? :(')
|
cgalleguillosm/accasim
|
accasim/base/scheduler_class.py
|
Python
|
mit
| 26,009
|
import threading
# 创建全局ThreadLocal对象:
local_school = threading.local()
def process_student():
# 获取当前线程关联的student:
std = local_school.student
print('Hello, %s (in %s)' % (std, threading.current_thread().name))
def process_thread(name):
# 绑定ThreadLocal的student:
local_school.student = name
process_student()
t1 = threading.Thread(target= process_thread, args=('Alice',), name='Thread-A')
t2 = threading.Thread(target= process_thread, args=('Bob',), name='Thread-B')
t1.start()
t2.start()
t1.join()
t2.join()
|
IIIIIIIIll/sdy_notes_liaoxf
|
LiaoXueFeng/multitasking_multithreading/localThread.py
|
Python
|
gpl-3.0
| 593
|
#!/usr/bin/env python
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test case that runs a checker on a file, matching errors against annotations.
Runs the given checker on the given file, accumulating all errors. The list
of errors is then matched against those annotated in the file. Based heavily
on devtools/javascript/gpylint/full_test.py.
"""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import re
import gflags as flags
import unittest as googletest
from closure_linter.common import erroraccumulator
class AnnotatedFileTestCase(googletest.TestCase):
"""Test case to run a linter against a single file."""
# Matches an all caps letters + underscores error identifer
_MESSAGE = {'msg': '[A-Z][A-Z_]+'}
# Matches a //, followed by an optional line number with a +/-, followed by a
# list of message IDs. Used to extract expected messages from testdata files.
# TODO(robbyw): Generalize to use different commenting patterns.
_EXPECTED_RE = re.compile(r'\s*//\s*(?:(?P<line>[+-]?[0-9]+):)?'
r'\s*(?P<msgs>%(msg)s(?:,\s*%(msg)s)*)' % _MESSAGE)
def __init__(self, filename, lint_callable, converter):
"""Create a single file lint test case.
Args:
filename: Filename to test.
lint_callable: Callable that lints a file. This is usually runner.Run().
converter: Function taking an error string and returning an error code.
"""
googletest.TestCase.__init__(self, 'runTest')
self._filename = filename
self._messages = []
self._lint_callable = lint_callable
self._converter = converter
def setUp(self):
flags.FLAGS.dot_on_next_line = True
def tearDown(self):
flags.FLAGS.dot_on_next_line = False
def shortDescription(self):
"""Provides a description for the test."""
return 'Run linter on %s' % self._filename
def runTest(self):
"""Runs the test."""
try:
filename = self._filename
stream = open(filename)
except IOError as ex:
raise IOError('Could not find testdata resource for %s: %s' %
(self._filename, ex))
expected = self._GetExpectedMessages(stream)
got = self._ProcessFileAndGetMessages(filename)
self.assertEqual(expected, got)
def _GetExpectedMessages(self, stream):
"""Parse a file and get a sorted list of expected messages."""
messages = []
for i, line in enumerate(stream):
match = self._EXPECTED_RE.search(line)
if match:
line = match.group('line')
msg_ids = match.group('msgs')
if line is None:
line = i + 1
elif line.startswith('+') or line.startswith('-'):
line = i + 1 + int(line)
else:
line = int(line)
for msg_id in msg_ids.split(','):
# Ignore a spurious message from the license preamble.
if msg_id != 'WITHOUT':
messages.append((line, self._converter(msg_id.strip())))
stream.seek(0)
messages.sort()
return messages
def _ProcessFileAndGetMessages(self, filename):
"""Trap gjslint's output parse it to get messages added."""
error_accumulator = erroraccumulator.ErrorAccumulator()
self._lint_callable(filename, error_accumulator)
errors = error_accumulator.GetErrors()
# Convert to expected tuple format.
error_msgs = [(error.token.line_number, error.code) for error in errors]
error_msgs.sort()
return error_msgs
|
SummerLW/Perf-Insight-Report
|
third_party/closure_linter/closure_linter/common/filetestcase.py
|
Python
|
bsd-3-clause
| 4,040
|
# coding: utf-8
try:
from urllib import quote #, unquote_plus
except ImportError:
from urllib.parse import quote #, unquote_plus
from grab.tools.lxml_tools import get_node_text
import logging
from .encoding import smart_str
class CaptchaError(Exception):
"""
Raised when yandex shows captcha.
"""
def is_banned(grab):
if grab.xpath_exists('//input[@class="b-captcha__input"]'):
return True
if grab.xpath_text('//title') == '403':
return True
return False
def build_search_url(query, page=1, per_page=None, lang='en', filter=True,
region=213, **kwargs):
"""
Build yandex search url with specified query and pagination options.
:param per_page: 10, 20, 30, 50, 100
213 region is Moscow
"""
query = smart_str(query)
url = 'http://yandex.ru/yandsearch?text=%s&lr=%s' % (
quote(query), region)
if kwargs:
url += '&' + urlencode(kwargs)
url += '&p=%d' % (page - 1)
return url
def is_last_page(grab):
"""
Detect if the fetched page is last page of search results.
"""
try:
next_link = grab.xpath_one('//a[contains(@class, "b-pager__next")]')
except IndexError:
logging.debug('No results found')
return True
else:
return False
def parse_search_results(grab, parse_index_size=False, strict_query=False):
"""
Parse yandex search results page content.
"""
if is_banned(grab):
raise CaptchaError('Captcha found')
elif grab.xpath_exists('//div[contains(@class, "b-error")]'):
err_msg = grab.xpath_text('//div[contains(@class, "b-error")]')
logging.debug('Found error message: %s' % err_msg)
return []
elif grab.xpath_exists('//ol[contains(@class, "b-serp-list")]'):
# TODO:
#if (strict_query and (
#grab.search(u'Нет результатов для') or grab.search(u'No results found for'))):
#pass
#logging.debug('Query modified')
results = []
# TODO: parse_index_size
# Yield found results
results = []
page_num = int(grab.xpath_text('//b[contains(@class, "b-pager__current")]'))
for elem in grab.xpath_list('//li[contains(@class, "b-serp-item")]'):
try:
try:
title_elem = elem.xpath('.//h2/a')[0]
snippet = get_node_text(
elem.xpath('.//div[contains(@class, "b-serp-item__text")]')[0])
except IndexError:
# this is video item or something like that
pass
else:
item = {
'page': page_num,
}
# url
item['url'] = title_elem.get('href')
#if url.startswith('/url?'):
#url = url.split('?q=')[1].split('&')[0]
#url = unquote_plus(url)
item['position'] = int(elem.xpath(
'.//h2/b[contains(@class, "b-serp-item__number")]/text()')[0])
# title
item['title'] = get_node_text(title_elem)
item['snippet'] = snippet
results.append(item)
except Exception as ex:
logging.error('', exc_info=ex)
return results
else:
print('parsing error')
raise ParsingError('Could not identify yandex page format')
|
boooka/GeoPowerOff
|
venv/lib/python2.7/site-packages/grab/tools/yandex.py
|
Python
|
apache-2.0
| 3,550
|
# Copyright (c) 2014 Alexander Bredo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -*- coding: utf-8 -*-
from base.applog import *
import urllib3
import json
from base.text import clean
'''
TODO:
- BUG: Illegal unquoted character ((CTRL-CHAR, code 8)): has to be escaped using backslash to be included in string value
'''
class ElasticsearchClient():
def __init__(self, host='127.0.0.1', port=9200, index='default', doctype='doc', ttl='1w'):
self.http = urllib3.PoolManager()
self.index = index
self.doctype = doctype
self.host = host
self.port = port
self.ttl = ttl
self.setup()
def setup(self):
if not self.exists_index():
log.info("Elasticsearch-Index '%s' does not exist. Trying to create now." % self.index)
self.create_index_mapping()
else:
log.info("Elasticsearch-Index '%s' present." % self.index)
def saveOne(self, data, doctype):
nice_data = json.dumps(clean(data), indent=4, separators=(',', ': '))
r = self.http.urlopen('POST',
'http://%s:%i/%s/%s' % (self.host, self.port, self.index, doctype),
headers = {'Content-Type':'application/json'},
body = nice_data
)
print (r.status, r.data)
if int(r.status/100) == 2:
log.debug("Element %s has been saved." % nice_data)
else:
log.error("Element could not be saved: %s. Error: %s" % (nice_data, r.data))
def saveMany(self, data, doctype):
log.debug("Trying to save %d items to Elasticsearch." % len(data))
serialized_data = [self.__makeStringsFromDict(x) for x in data]
head = ({ "index" : { "_index" : self.index, "_type" : doctype } }).__str__() + '\n'
dataAsStr = ('\n'.join([head + line.__str__() for line in serialized_data])).replace('\'', '\"') + '\n'
r = self.http.urlopen('POST',
'http://%s:%i/%s/%s/_bulk' % (self.host, self.port, self.index, doctype),
headers = {'Content-Type':'application/json'},
body = dataAsStr
)
if int(r.status/100) == 2:
log.debug("%s Elements has been saved." % len(data))
else:
log.error("Elements could not be saved: %s. Error: %s" % (dataAsStr, r.data))
def __makeStringsFromDict(self, dictionary):
try:
for key in dictionary.keys(): # Native Datatypes: No Objects!
if isinstance(dictionary[key], dict): # nested...
dictionary[key] = self.__makeStringsFromDict(dictionary[key])
elif isinstance(dictionary[key], str):
dictionary[key] = dictionary[key].__str__()
#elif isinstance(dictionary[key], int) and isinstance(dictionary[key], float):
# dictionary[key] = dictionary[key]
return dictionary
except Exception as e:
log.error(e)
def deleteIndex(self):
r = self.http.request('DELETE', 'http://%s:%i/%s/' % (self.host, self.port, self.index))
if int(r.status/100) == 2:
log.info("Elasticsearch-Index '%s' was removed." % self.index)
return True
else:
log.warning("Elasticsearch-Index '%s' does not exist." % self.index)
return False # print r.data
def exists_index(self):
r = self.http.request('GET', 'http://%s:%i/%s/_mapping' % (self.host, self.port, self.index))
if int(r.status/100) == 2:
return True
else:
return False # print r.data
def create_index_mapping(self):
# POST /index/
data = """{
"mappings" : {
"_default_" : {
"_ttl": {
"enabled": "true",
"default": "%s"
},
"properties" : {
"sourceIPv6Address": { "type": "string", "index": "not_analyzed" },
"destinationIPv6Address": { "type": "string", "index": "not_analyzed" },
"sourceHostname" : {"type" : "string", "index" : "not_analyzed"},
"destinationHostname" : {"type" : "string", "index" : "not_analyzed"},
"destinationTransportPortName" : {"type" : "string", "index" : "not_analyzed"},
"sourceTransportPortName" : {"type" : "string", "index" : "not_analyzed"},
"protocolIdentifierName" : {"type" : "string", "index" : "not_analyzed"},
"networkLocation" : {"type" : "string", "index" : "not_analyzed"},
"command" : {"type" : "string", "index" : "not_analyzed"},
"session" : {"type" : "string", "index" : "not_analyzed"}
}
}
}
}""" % self.ttl
r = self.http.urlopen('POST',
'http://%s:%i/%s/' % (self.host, self.port, self.index),
headers = {'Content-Type':'application/json'},
body = data
)
if int(r.status/100) == 2:
log.info("Elasticsearch-Index '%s' has been created." % self.index)
else:
log.error("Elasticsearch-Index '%s' has NOT been created. (%s)" % (self.index, r.data))
if __name__ == '__main__':
ec = ElasticsearchClient('lnx06-elasticsearch1', 9200, 'honeypot')
ec.deleteIndex()
#ec.saveOne({'ab':1, 'cd':'blub'}, 'intrusion')
#ec.saveMany([{'ab':1, 'cd':'blub'}, {'ddd':22, 'dfd':'fdgg'}], 'intrusion')
|
alexbredo/site-packages3
|
handler/elasticsearch.py
|
Python
|
bsd-2-clause
| 6,002
|
# Generated by Django 2.2.25 on 2021-12-30 13:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("letters", "0012_letter_created_by_staff"),
]
operations = [
migrations.AlterField(
model_name="letter",
name="created_by_is_staff",
field=models.BooleanField(verbose_name="Created by staff member"),
),
]
|
watchdogpolska/poradnia
|
poradnia/letters/migrations/0013_auto_20211230_1439.py
|
Python
|
mit
| 433
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Asserts and Boolean Checks.
See the @{$python/check_ops} guide.
@@assert_negative
@@assert_positive
@@assert_non_negative
@@assert_non_positive
@@assert_equal
@@assert_none_equal
@@assert_less
@@assert_less_equal
@@assert_greater
@@assert_greater_equal
@@assert_rank
@@assert_rank_at_least
@@assert_rank_in
@@assert_type
@@assert_integer
@@assert_proper_iterable
@@assert_same_float_dtype
@@assert_scalar
@@is_non_decreasing
@@is_numeric_tensor
@@is_strictly_increasing
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util import compat
NUMERIC_TYPES = frozenset(
[dtypes.float32, dtypes.float64, dtypes.int8, dtypes.int16, dtypes.int32,
dtypes.int64, dtypes.uint8, dtypes.qint8, dtypes.qint32, dtypes.quint8,
dtypes.complex64])
__all__ = [
'assert_negative',
'assert_positive',
'assert_proper_iterable',
'assert_non_negative',
'assert_non_positive',
'assert_equal',
'assert_none_equal',
'assert_integer',
'assert_less',
'assert_less_equal',
'assert_greater',
'assert_greater_equal',
'assert_rank',
'assert_rank_at_least',
'assert_rank_in',
'assert_same_float_dtype',
'assert_scalar',
'assert_type',
'is_non_decreasing',
'is_numeric_tensor',
'is_strictly_increasing',
]
def _maybe_constant_value_string(t):
if not isinstance(t, ops.Tensor):
return str(t)
const_t = tensor_util.constant_value(t)
if const_t is not None:
return str(const_t)
return t
def _assert_static(condition, data):
"""Raises a static ValueError with as much information as possible."""
if not condition:
data_static = [_maybe_constant_value_string(x) for x in data]
raise ValueError('\n'.join(data_static))
def assert_proper_iterable(values):
"""Static assert that values is a "proper" iterable.
`Ops` that expect iterables of `Tensor` can call this to validate input.
Useful since `Tensor`, `ndarray`, byte/text type are all iterables themselves.
Args:
values: Object to be checked.
Raises:
TypeError: If `values` is not iterable or is one of
`Tensor`, `SparseTensor`, `np.array`, `tf.compat.bytes_or_text_types`.
"""
unintentional_iterables = (
(ops.Tensor, sparse_tensor.SparseTensor, np.ndarray)
+ compat.bytes_or_text_types
)
if isinstance(values, unintentional_iterables):
raise TypeError(
'Expected argument "values" to be a "proper" iterable. Found: %s' %
type(values))
if not hasattr(values, '__iter__'):
raise TypeError(
'Expected argument "values" to be iterable. Found: %s' % type(values))
def assert_negative(x, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x < 0` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_negative(x)]):
output = tf.reduce_sum(x)
```
Negative means, for every element `x[i]` of `x`, we have `x[i] < 0`.
If `x` is empty this is trivially satisfied.
Args:
x: Numeric `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_negative".
Returns:
Op raising `InvalidArgumentError` unless `x` is all negative.
"""
message = message or ''
with ops.name_scope(name, 'assert_negative', [x, data]):
x = ops.convert_to_tensor(x, name='x')
if data is None:
data = [
message,
'Condition x < 0 did not hold element-wise:',
'x (%s) = ' % x.name, x]
zero = ops.convert_to_tensor(0, dtype=x.dtype)
return assert_less(x, zero, data=data, summarize=summarize)
def assert_positive(x, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x > 0` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_positive(x)]):
output = tf.reduce_sum(x)
```
Positive means, for every element `x[i]` of `x`, we have `x[i] > 0`.
If `x` is empty this is trivially satisfied.
Args:
x: Numeric `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_positive".
Returns:
Op raising `InvalidArgumentError` unless `x` is all positive.
"""
message = message or ''
with ops.name_scope(name, 'assert_positive', [x, data]):
x = ops.convert_to_tensor(x, name='x')
if data is None:
data = [
message, 'Condition x > 0 did not hold element-wise:',
'x (%s) = ' % x.name, x]
zero = ops.convert_to_tensor(0, dtype=x.dtype)
return assert_less(zero, x, data=data, summarize=summarize)
def assert_non_negative(x, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x >= 0` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_non_negative(x)]):
output = tf.reduce_sum(x)
```
Non-negative means, for every element `x[i]` of `x`, we have `x[i] >= 0`.
If `x` is empty this is trivially satisfied.
Args:
x: Numeric `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Defaults to "assert_non_negative".
Returns:
Op raising `InvalidArgumentError` unless `x` is all non-negative.
"""
message = message or ''
with ops.name_scope(name, 'assert_non_negative', [x, data]):
x = ops.convert_to_tensor(x, name='x')
if data is None:
data = [
message,
'Condition x >= 0 did not hold element-wise:',
'x (%s) = ' % x.name, x]
zero = ops.convert_to_tensor(0, dtype=x.dtype)
return assert_less_equal(zero, x, data=data, summarize=summarize)
def assert_non_positive(x, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x <= 0` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_non_positive(x)]):
output = tf.reduce_sum(x)
```
Non-positive means, for every element `x[i]` of `x`, we have `x[i] <= 0`.
If `x` is empty this is trivially satisfied.
Args:
x: Numeric `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Defaults to "assert_non_positive".
Returns:
Op raising `InvalidArgumentError` unless `x` is all non-positive.
"""
message = message or ''
with ops.name_scope(name, 'assert_non_positive', [x, data]):
x = ops.convert_to_tensor(x, name='x')
if data is None:
data = [
message,
'Condition x <= 0 did not hold element-wise:'
'x (%s) = ' % x.name, x]
zero = ops.convert_to_tensor(0, dtype=x.dtype)
return assert_less_equal(x, zero, data=data, summarize=summarize)
def assert_equal(x, y, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x == y` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_equal(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] == y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_equal".
Returns:
Op that raises `InvalidArgumentError` if `x == y` is False.
"""
message = message or ''
with ops.name_scope(name, 'assert_equal', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x == y did not hold element-wise:',
'x (%s) = ' % x.name, x,
'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.equal(x, y))
x_static = tensor_util.constant_value(x)
y_static = tensor_util.constant_value(y)
if x_static is not None and y_static is not None:
condition_static = (x_static == y_static).all()
_assert_static(condition_static, data)
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_none_equal(
x, y, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x != y` holds for all elements.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_none_equal(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] != y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Defaults to "assert_none_equal".
Returns:
Op that raises `InvalidArgumentError` if `x != y` is ever False.
"""
message = message or ''
with ops.name_scope(name, 'assert_none_equal', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x != y did not hold for every single element:'
'x (%s) = ' % x.name, x,
'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.not_equal(x, y))
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_less(x, y, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x < y` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_less(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] < y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_less".
Returns:
Op that raises `InvalidArgumentError` if `x < y` is False.
"""
message = message or ''
with ops.name_scope(name, 'assert_less', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x < y did not hold element-wise:'
'x (%s) = ' % x.name, x, 'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.less(x, y))
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_less_equal(x, y, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x <= y` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_less_equal(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] <= y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_less_equal"
Returns:
Op that raises `InvalidArgumentError` if `x <= y` is False.
"""
message = message or ''
with ops.name_scope(name, 'assert_less_equal', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x <= y did not hold element-wise:'
'x (%s) = ' % x.name, x, 'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.less_equal(x, y))
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_greater(x, y, data=None, summarize=None, message=None, name=None):
"""Assert the condition `x > y` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_greater(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] > y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_greater".
Returns:
Op that raises `InvalidArgumentError` if `x > y` is False.
"""
message = message or ''
with ops.name_scope(name, 'assert_greater', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x > y did not hold element-wise:'
'x (%s) = ' % x.name, x, 'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.greater(x, y))
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_greater_equal(x, y, data=None, summarize=None, message=None,
name=None):
"""Assert the condition `x >= y` holds element-wise.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_greater_equal(x, y)]):
output = tf.reduce_sum(x)
```
This condition holds if for every pair of (possibly broadcast) elements
`x[i]`, `y[i]`, we have `x[i] >= y[i]`.
If both `x` and `y` are empty, this is trivially satisfied.
Args:
x: Numeric `Tensor`.
y: Numeric `Tensor`, same dtype as and broadcastable to `x`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`, `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to
"assert_greater_equal"
Returns:
Op that raises `InvalidArgumentError` if `x >= y` is False.
"""
message = message or ''
with ops.name_scope(name, 'assert_greater_equal', [x, y, data]):
x = ops.convert_to_tensor(x, name='x')
y = ops.convert_to_tensor(y, name='y')
if data is None:
data = [
message,
'Condition x >= y did not hold element-wise:'
'x (%s) = ' % x.name, x, 'y (%s) = ' % y.name, y
]
condition = math_ops.reduce_all(math_ops.greater_equal(x, y))
return control_flow_ops.Assert(condition, data, summarize=summarize)
def _assert_rank_condition(
x, rank, static_condition, dynamic_condition, data, summarize):
"""Assert `x` has a rank that satisfies a given condition.
Args:
x: Numeric `Tensor`.
rank: Scalar `Tensor`.
static_condition: A python function that takes `[actual_rank, given_rank]`
and returns `True` if the condition is satisfied, `False` otherwise.
dynamic_condition: An `op` that takes [actual_rank, given_rank]
and return `True` if the condition is satisfied, `False` otherwise.
data: The tensors to print out if the condition is false. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
Returns:
Op raising `InvalidArgumentError` if `x` fails dynamic_condition.
Raises:
ValueError: If static checks determine `x` fails static_condition.
"""
assert_type(rank, dtypes.int32)
# Attempt to statically defined rank.
rank_static = tensor_util.constant_value(rank)
if rank_static is not None:
if rank_static.ndim != 0:
raise ValueError('Rank must be a scalar.')
x_rank_static = x.get_shape().ndims
if x_rank_static is not None:
if not static_condition(x_rank_static, rank_static):
raise ValueError(
'Static rank condition failed', x_rank_static, rank_static)
return control_flow_ops.no_op(name='static_checks_determined_all_ok')
condition = dynamic_condition(array_ops.rank(x), rank)
# Add the condition that `rank` must have rank zero. Prevents the bug where
# someone does assert_rank(x, [n]), rather than assert_rank(x, n).
if rank_static is None:
this_data = ['Rank must be a scalar. Received rank: ', rank]
rank_check = assert_rank(rank, 0, data=this_data)
condition = control_flow_ops.with_dependencies([rank_check], condition)
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_rank(x, rank, data=None, summarize=None, message=None, name=None):
"""Assert `x` has rank equal to `rank`.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_rank(x, 2)]):
output = tf.reduce_sum(x)
```
Args:
x: Numeric `Tensor`.
rank: Scalar integer `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_rank".
Returns:
Op raising `InvalidArgumentError` unless `x` has specified rank.
If static checks determine `x` has correct rank, a `no_op` is returned.
Raises:
ValueError: If static checks determine `x` has wrong rank.
"""
with ops.name_scope(name, 'assert_rank', (x, rank) + tuple(data or [])):
x = ops.convert_to_tensor(x, name='x')
rank = ops.convert_to_tensor(rank, name='rank')
message = message or ''
static_condition = lambda actual_rank, given_rank: actual_rank == given_rank
dynamic_condition = math_ops.equal
if data is None:
data = [
message,
'Tensor %s must have rank' % x.name, rank, 'Received shape: ',
array_ops.shape(x)
]
try:
assert_op = _assert_rank_condition(x, rank, static_condition,
dynamic_condition, data, summarize)
except ValueError as e:
if e.args[0] == 'Static rank condition failed':
raise ValueError(
'%s. Tensor %s must have rank %d. Received rank %d, shape %s' %
(message, x.name, e.args[2], e.args[1], x.get_shape()))
else:
raise
return assert_op
def assert_rank_at_least(
x, rank, data=None, summarize=None, message=None, name=None):
"""Assert `x` has rank equal to `rank` or higher.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_rank_at_least(x, 2)]):
output = tf.reduce_sum(x)
```
Args:
x: Numeric `Tensor`.
rank: Scalar `Tensor`.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Defaults to "assert_rank_at_least".
Returns:
Op raising `InvalidArgumentError` unless `x` has specified rank or higher.
If static checks determine `x` has correct rank, a `no_op` is returned.
Raises:
ValueError: If static checks determine `x` has wrong rank.
"""
with ops.name_scope(
name, 'assert_rank_at_least', (x, rank) + tuple(data or [])):
x = ops.convert_to_tensor(x, name='x')
rank = ops.convert_to_tensor(rank, name='rank')
message = message or ''
static_condition = lambda actual_rank, given_rank: actual_rank >= given_rank
dynamic_condition = math_ops.greater_equal
if data is None:
data = [
message,
'Tensor %s must have rank at least' % x.name, rank,
'Received shape: ', array_ops.shape(x)
]
try:
assert_op = _assert_rank_condition(x, rank, static_condition,
dynamic_condition, data, summarize)
except ValueError as e:
if e.args[0] == 'Static rank condition failed':
raise ValueError(
'%s. Tensor %s must have rank at least %d. Received rank %d, '
'shape %s' % (message, x.name, e.args[2], e.args[1], x.get_shape()))
else:
raise
return assert_op
def _static_rank_in(actual_rank, given_ranks):
return actual_rank in given_ranks
def _dynamic_rank_in(actual_rank, given_ranks):
if len(given_ranks) < 1:
return ops.convert_to_tensor(False)
result = math_ops.equal(given_ranks[0], actual_rank)
for given_rank in given_ranks[1:]:
result = math_ops.logical_or(
result, math_ops.equal(given_rank, actual_rank))
return result
def _assert_ranks_condition(
x, ranks, static_condition, dynamic_condition, data, summarize):
"""Assert `x` has a rank that satisfies a given condition.
Args:
x: Numeric `Tensor`.
ranks: Scalar `Tensor`.
static_condition: A python function that takes
`[actual_rank, given_ranks]` and returns `True` if the condition is
satisfied, `False` otherwise.
dynamic_condition: An `op` that takes [actual_rank, given_ranks]
and return `True` if the condition is satisfied, `False` otherwise.
data: The tensors to print out if the condition is false. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
Returns:
Op raising `InvalidArgumentError` if `x` fails dynamic_condition.
Raises:
ValueError: If static checks determine `x` fails static_condition.
"""
for rank in ranks:
assert_type(rank, dtypes.int32)
# Attempt to statically defined rank.
ranks_static = tuple([tensor_util.constant_value(rank) for rank in ranks])
if not any(r is None for r in ranks_static):
for rank_static in ranks_static:
if rank_static.ndim != 0:
raise ValueError('Rank must be a scalar.')
x_rank_static = x.get_shape().ndims
if x_rank_static is not None:
if not static_condition(x_rank_static, ranks_static):
raise ValueError(
'Static rank condition failed', x_rank_static, ranks_static)
return control_flow_ops.no_op(name='static_checks_determined_all_ok')
condition = dynamic_condition(array_ops.rank(x), ranks)
# Add the condition that `rank` must have rank zero. Prevents the bug where
# someone does assert_rank(x, [n]), rather than assert_rank(x, n).
for rank, rank_static in zip(ranks, ranks_static):
if rank_static is None:
this_data = ['Rank must be a scalar. Received rank: ', rank]
rank_check = assert_rank(rank, 0, data=this_data)
condition = control_flow_ops.with_dependencies([rank_check], condition)
return control_flow_ops.Assert(condition, data, summarize=summarize)
def assert_rank_in(
x, ranks, data=None, summarize=None, message=None, name=None):
"""Assert `x` has rank in `ranks`.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_rank_in(x, (2, 4))]):
output = tf.reduce_sum(x)
```
Args:
x: Numeric `Tensor`.
ranks: Iterable of scalar `Tensor` objects.
data: The tensors to print out if the condition is False. Defaults to
error message and first few entries of `x`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Defaults to "assert_rank_in".
Returns:
Op raising `InvalidArgumentError` unless rank of `x` is in `ranks`.
If static checks determine `x` has matching rank, a `no_op` is returned.
Raises:
ValueError: If static checks determine `x` has mismatched rank.
"""
with ops.name_scope(
name, 'assert_rank_in', (x,) + tuple(ranks) + tuple(data or [])):
x = ops.convert_to_tensor(x, name='x')
ranks = tuple([ops.convert_to_tensor(rank, name='rank') for rank in ranks])
message = message or ''
if data is None:
data = [
message, 'Tensor %s must have rank in' % x.name
] + list(ranks) + [
'Received shape: ', array_ops.shape(x)
]
try:
assert_op = _assert_ranks_condition(x, ranks, _static_rank_in,
_dynamic_rank_in, data, summarize)
except ValueError as e:
if e.args[0] == 'Static rank condition failed':
raise ValueError(
'%s. Tensor %s must have rank in %s. Received rank %d, '
'shape %s' % (message, x.name, e.args[2], e.args[1], x.get_shape()))
else:
raise
return assert_op
def assert_integer(x, message=None, name=None):
"""Assert that `x` is of integer dtype.
Example of adding a dependency to an operation:
```python
with tf.control_dependencies([tf.assert_integer(x)]):
output = tf.reduce_sum(x)
```
Args:
x: `Tensor` whose basetype is integer and is not quantized.
message: A string to prefix to the default message.
name: A name for this operation (optional). Defaults to "assert_integer".
Raises:
TypeError: If `x.dtype` is anything other than non-quantized integer.
Returns:
A `no_op` that does nothing. Type can be determined statically.
"""
message = message or ''
with ops.name_scope(name, 'assert_integer', [x]):
x = ops.convert_to_tensor(x, name='x')
if not x.dtype.is_integer:
err_msg = (
'%s Expected "x" to be integer type. Found: %s of dtype %s'
% (message, x.name, x.dtype))
raise TypeError(err_msg)
return control_flow_ops.no_op('statically_determined_was_integer')
def assert_type(tensor, tf_type, message=None, name=None):
"""Statically asserts that the given `Tensor` is of the specified type.
Args:
tensor: A tensorflow `Tensor`.
tf_type: A tensorflow type (`dtypes.float32`, `tf.int64`, `dtypes.bool`,
etc).
message: A string to prefix to the default message.
name: A name to give this `Op`. Defaults to "assert_type"
Raises:
TypeError: If the tensors data type doesn't match `tf_type`.
Returns:
A `no_op` that does nothing. Type can be determined statically.
"""
message = message or ''
with ops.name_scope(name, 'assert_type', [tensor]):
tensor = ops.convert_to_tensor(tensor, name='tensor')
if tensor.dtype != tf_type:
if context.in_graph_mode():
raise TypeError(
'%s %s must be of type %s' % (message, tensor.name, tf_type))
else:
raise TypeError(
'%s tensor must be of type %s' % (message, tf_type))
return control_flow_ops.no_op('statically_determined_correct_type')
# pylint: disable=line-too-long
def _get_diff_for_monotonic_comparison(x):
"""Gets the difference x[1:] - x[:-1]."""
x = array_ops.reshape(x, [-1])
if not is_numeric_tensor(x):
raise TypeError('Expected x to be numeric, instead found: %s' % x)
# If x has less than 2 elements, there is nothing to compare. So return [].
is_shorter_than_two = math_ops.less(array_ops.size(x), 2)
short_result = lambda: ops.convert_to_tensor([], dtype=x.dtype)
# With 2 or more elements, return x[1:] - x[:-1]
s_len = array_ops.shape(x) - 1
diff = lambda: array_ops.strided_slice(x, [1], [1] + s_len)- array_ops.strided_slice(x, [0], s_len)
return control_flow_ops.cond(is_shorter_than_two, short_result, diff)
def is_numeric_tensor(tensor):
return isinstance(tensor, ops.Tensor) and tensor.dtype in NUMERIC_TYPES
def is_non_decreasing(x, name=None):
"""Returns `True` if `x` is non-decreasing.
Elements of `x` are compared in row-major order. The tensor `[x[0],...]`
is non-decreasing if for every adjacent pair we have `x[i] <= x[i+1]`.
If `x` has less than two elements, it is trivially non-decreasing.
See also: `is_strictly_increasing`
Args:
x: Numeric `Tensor`.
name: A name for this operation (optional). Defaults to "is_non_decreasing"
Returns:
Boolean `Tensor`, equal to `True` iff `x` is non-decreasing.
Raises:
TypeError: if `x` is not a numeric tensor.
"""
with ops.name_scope(name, 'is_non_decreasing', [x]):
diff = _get_diff_for_monotonic_comparison(x)
# When len(x) = 1, diff = [], less_equal = [], and reduce_all([]) = True.
zero = ops.convert_to_tensor(0, dtype=diff.dtype)
return math_ops.reduce_all(math_ops.less_equal(zero, diff))
def is_strictly_increasing(x, name=None):
"""Returns `True` if `x` is strictly increasing.
Elements of `x` are compared in row-major order. The tensor `[x[0],...]`
is strictly increasing if for every adjacent pair we have `x[i] < x[i+1]`.
If `x` has less than two elements, it is trivially strictly increasing.
See also: `is_non_decreasing`
Args:
x: Numeric `Tensor`.
name: A name for this operation (optional).
Defaults to "is_strictly_increasing"
Returns:
Boolean `Tensor`, equal to `True` iff `x` is strictly increasing.
Raises:
TypeError: if `x` is not a numeric tensor.
"""
with ops.name_scope(name, 'is_strictly_increasing', [x]):
diff = _get_diff_for_monotonic_comparison(x)
# When len(x) = 1, diff = [], less = [], and reduce_all([]) = True.
zero = ops.convert_to_tensor(0, dtype=diff.dtype)
return math_ops.reduce_all(math_ops.less(zero, diff))
def _assert_same_base_type(items, expected_type=None):
r"""Asserts all items are of the same base type.
Args:
items: List of graph items (e.g., `Variable`, `Tensor`, `SparseTensor`,
`Operation`, or `IndexedSlices`). Can include `None` elements, which
will be ignored.
expected_type: Expected type. If not specified, assert all items are
of the same base type.
Returns:
Validated type, or none if neither expected_type nor items provided.
Raises:
ValueError: If any types do not match.
"""
original_item_str = None
for item in items:
if item is not None:
item_type = item.dtype.base_dtype
if not expected_type:
expected_type = item_type
original_item_str = item.name if hasattr(item, 'name') else str(item)
elif expected_type != item_type:
raise ValueError('%s, type=%s, must be of the same type (%s)%s.' % (
item.name if hasattr(item, 'name') else str(item),
item_type, expected_type,
(' as %s' % original_item_str) if original_item_str else ''))
return expected_type
def assert_same_float_dtype(tensors=None, dtype=None):
"""Validate and return float type based on `tensors` and `dtype`.
For ops such as matrix multiplication, inputs and weights must be of the
same float type. This function validates that all `tensors` are the same type,
validates that type is `dtype` (if supplied), and returns the type. Type must
be a floating point type. If neither `tensors` nor `dtype` is supplied,
the function will return `dtypes.float32`.
Args:
tensors: Tensors of input values. Can include `None` elements, which will be
ignored.
dtype: Expected type.
Returns:
Validated type.
Raises:
ValueError: if neither `tensors` nor `dtype` is supplied, or result is not
float, or the common type of the inputs is not a floating point type.
"""
if tensors:
dtype = _assert_same_base_type(tensors, dtype)
if not dtype:
dtype = dtypes.float32
elif not dtype.is_floating:
raise ValueError('Expected floating point type, got %s.' % dtype)
return dtype
def assert_scalar(tensor, name=None):
with ops.name_scope(name, 'assert_scalar', [tensor]) as name_scope:
tensor = ops.convert_to_tensor(tensor, name=name_scope)
shape = tensor.get_shape()
if shape.ndims != 0:
raise ValueError('Expected scalar shape for %s, saw shape: %s.'
% (tensor.name, shape))
return tensor
|
tornadozou/tensorflow
|
tensorflow/python/ops/check_ops.py
|
Python
|
apache-2.0
| 35,158
|
from django.contrib.auth import (authenticate,
logout as auth_logout,
login as auth_login)
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.forms import PasswordResetForm
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from rest_framework import decorators, status
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from apps.user.models import ItsiUser
from apps.user.itsi import ItsiService
@decorators.api_view(['POST', 'GET'])
@decorators.permission_classes((AllowAny, ))
def login(request):
response_data = {}
status_code = status.HTTP_200_OK
if request.method == 'POST':
user = authenticate(username=request.REQUEST.get('username'),
password=request.REQUEST.get('password'))
if user is not None:
if user.is_active:
auth_login(request, user)
response_data['result'] = 'success'
response_data['username'] = user.username
response_data['guest'] = False
response_data['id'] = user.id
else:
response_data['errors'] = ['Please activate your account']
response_data['guest'] = True
response_data['id'] = 0
status_code = status.HTTP_400_BAD_REQUEST
else:
response_data['errors'] = ['Invalid username or password']
response_data['guest'] = True
response_data['id'] = 0
status_code = status.HTTP_400_BAD_REQUEST
elif request.method == 'GET':
user = request.user
if user.is_authenticated() and user.is_active:
response_data['username'] = user.username
response_data['guest'] = False
response_data['id'] = user.id
else:
response_data['guest'] = True
response_data['id'] = 0
response_data['result'] = 'success'
status_code = status.HTTP_200_OK
return Response(data=response_data, status=status_code)
@decorators.api_view(['GET'])
@decorators.permission_classes((AllowAny, ))
def logout(request):
auth_logout(request)
if request.is_ajax():
response_data = {
'guest': True,
'result': 'success',
'id': 0
}
return Response(data=response_data)
else:
return render_to_response('user/logout.html')
itsi = ItsiService()
def itsi_login(request):
redirect_uri = request.build_absolute_uri(reverse('itsi_auth'))
params = {'redirect_uri': redirect_uri}
auth_url = itsi.get_authorize_url(**params)
return redirect(auth_url)
def itsi_auth(request):
code = request.GET.get('code', None)
# Basic validation
if code is None:
return redirect('/error/itsi')
try:
session = itsi.get_session_from_code(code)
itsi_user = session.get_user()
except:
# In case we are unable to reach ITSI and get an unexpected response
return redirect('/error/itsi')
user = authenticate(itsi_id=itsi_user['id'])
if user is not None and user.is_active:
auth_login(request, user)
return redirect('/')
else:
# User did not authenticate. Save their ITSI ID and send to /register
request.session['itsi_id'] = itsi_user['id']
return redirect(
'/sign-up/itsi/{username}/{first_name}/{last_name}'.format(
**itsi_user['extra']
)
)
@decorators.api_view(['POST'])
@decorators.permission_classes((AllowAny, ))
def itsi_sign_up(request):
# Validate request
errors = []
if 'itsi_id' not in request.session:
errors.append("There was an error in authenticating you with ITSI")
if 'username' not in request.POST or not request.POST.get('username'):
errors.append("Username must be specified")
elif User.objects.filter(username=request.POST.get('username')).exists():
errors.append("Username already exists")
if 'first_name' not in request.POST or not request.POST.get('first_name'):
errors.append("First name must be specified")
if 'last_name' not in request.POST or not request.POST.get('last_name'):
errors.append("Last name must be specified")
if 'agreed' not in request.POST or not request.POST.get('agreed'):
errors.append("You must agree to the terms")
if len(errors) > 0:
response_data = {"errors": errors}
return Response(data=response_data,
status=status.HTTP_400_BAD_REQUEST)
itsi_id = request.session['itsi_id']
# Create new user with given details and no email address or password
# since they will be authenticated using ITSI credentials
user = User.objects.create_user(
request.POST.get('username'),
email=None,
password=None,
first_name=request.POST.get('first_name'),
last_name=request.POST.get('last_name'),
)
user.save()
# Create corresponding itsi_user object that links to ITSI account
itsi_user = ItsiUser.objects.create_itsi_user(user, itsi_id)
itsi_user.save()
# Authenticate and log new user in
user = authenticate(itsi_id=itsi_id)
auth_login(request, user)
response_data = {'result': 'success',
'username': user.username,
'guest': False}
return Response(data=response_data,
status=status.HTTP_200_OK)
@decorators.api_view(['POST'])
@decorators.permission_classes((AllowAny, ))
def sign_up(request):
view = RegistrationView()
form = RegistrationFormUniqueEmail(request.POST)
if form.is_valid():
user = view.register(request, **form.cleaned_data)
response_data = {'result': 'success',
'username': user.username,
'guest': False}
return Response(data=response_data,
status=status.HTTP_200_OK)
else:
errors = []
if 'username' not in form.cleaned_data:
errors.append("Username is invalid or already in use")
if 'password1' not in form.cleaned_data:
errors.append("Password must be specified")
if 'password2' not in form.cleaned_data or \
form.cleaned_data['password1'] != form.cleaned_data['password2']:
errors.append("Passwords do not match")
if 'email' not in form.cleaned_data:
errors.append("Email is invalid or already in use")
if len(errors) == 0:
errors.append("Invalid data submitted")
response_data = {"errors": errors}
return Response(data=response_data,
status=status.HTTP_400_BAD_REQUEST)
@decorators.api_view(['POST'])
@decorators.permission_classes((AllowAny, ))
def forgot(request):
form = PasswordResetForm(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
try:
# If there are active user(s) that match email
next(form.get_users(email))
form.save(request=request)
response_data = {'result': 'success',
'guest': True}
status_code = status.HTTP_200_OK
except StopIteration:
response_data = {'errors': ["Email cannot be found"]}
status_code = status.HTTP_400_BAD_REQUEST
else:
response_data = {'errors': ["Email is invalid"]}
status_code = status.HTTP_400_BAD_REQUEST
return Response(data=response_data, status=status_code)
|
mmcfarland/model-my-watershed
|
src/mmw/apps/user/views.py
|
Python
|
apache-2.0
| 7,827
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Activity analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import gast
from tensorflow.contrib.autograph.pyct import anno
from tensorflow.contrib.autograph.pyct import transformer
from tensorflow.contrib.autograph.pyct.qual_names import QN
from tensorflow.contrib.autograph.pyct.static_analysis.annos import NodeAnno
# TODO(mdan): Add support for PY3 (e.g. Param vs arg).
class Scope(object):
"""Encloses local symbol definition and usage information.
This can track for instance whether a symbol is modified in the current scope.
Note that scopes do not necessarily align with Python's scopes. For example,
the body of an if statement may be considered a separate scope.
Attributes:
modified: identifiers modified in this scope
created: identifiers created in this scope
used: identifiers referenced in this scope
"""
def __init__(self, parent, isolated=True):
"""Create a new scope.
Args:
parent: A Scope or None.
isolated: Whether the scope is isolated, that is, whether variables
created in this scope should be visible to the parent scope.
"""
self.isolated = isolated
self.parent = parent
self.modified = set()
self.created = set()
self.used = set()
self.params = set()
self.returned = set()
# TODO(mdan): Rename to `locals`
@property
def referenced(self):
if not self.isolated and self.parent is not None:
return self.used | self.parent.referenced
return self.used
def __repr__(self):
return 'Scope{r=%s, c=%s, w=%s}' % (tuple(self.used), tuple(self.created),
tuple(self.modified))
def copy_from(self, other):
"""Recursively copies the contents of this scope from another scope."""
if (self.parent is None) != (other.parent is None):
raise ValueError('cannot copy scopes of different structures')
if other.parent is not None:
self.parent.copy_from(other.parent)
self.isolated = other.isolated
self.modified = copy.copy(other.modified)
self.created = copy.copy(other.created)
self.used = copy.copy(other.used)
self.params = copy.copy(other.params)
self.returned = copy.copy(other.returned)
@classmethod
def copy_of(cls, other):
if other.parent is not None:
parent = cls.copy_of(other.parent)
else:
parent = None
new_copy = cls(parent)
new_copy.copy_from(other)
return new_copy
def merge_from(self, other):
if (self.parent is None) != (other.parent is None):
raise ValueError('cannot merge scopes of different structures')
if other.parent is not None:
self.parent.merge_from(other.parent)
self.modified |= other.modified
self.created |= other.created
self.used |= other.used
self.params |= other.params
self.returned |= other.returned
def has(self, name):
if name in self.modified or name in self.params:
return True
elif self.parent is not None:
return self.parent.has(name)
return False
def is_modified_since_entry(self, name):
if name in self.modified:
return True
elif self.parent is not None and not self.isolated:
return self.parent.is_modified_since_entry(name)
return False
def is_param(self, name):
if name in self.params:
return True
elif self.parent is not None and not self.isolated:
return self.parent.is_param(name)
return False
def mark_read(self, name):
self.used.add(name)
if self.parent is not None and name not in self.created:
self.parent.mark_read(name)
def mark_param(self, name):
self.params.add(name)
def mark_creation(self, name):
if name.is_composite():
parent = name.parent
if self.has(parent):
# This is considered mutation of the parent, not creation.
# TODO(mdan): Is that really so?
return
else:
raise ValueError('Unknown symbol "%s".' % parent)
self.created.add(name)
def mark_write(self, name):
self.modified.add(name)
if self.isolated:
self.mark_creation(name)
else:
if self.parent is None:
self.mark_creation(name)
else:
if not self.parent.has(name):
self.mark_creation(name)
self.parent.mark_write(name)
def mark_returned(self, name):
self.returned.add(name)
if not self.isolated and self.parent is not None:
self.parent.mark_returned(name)
class ActivityAnalizer(transformer.Base):
"""Annotates nodes with local scope information. See Scope."""
def __init__(self, context, parent_scope):
super(ActivityAnalizer, self).__init__(context)
self.scope = Scope(parent_scope)
self._in_return_statement = False
def _track_symbol(self, node):
# This can happen when we have an attribute (or subscript) on a function
# call. Example: a().b
if not anno.hasanno(node, anno.Basic.QN):
return
qn = anno.getanno(node, anno.Basic.QN)
if isinstance(node.ctx, gast.Store):
self.scope.mark_write(qn)
elif isinstance(node.ctx, gast.Load):
self.scope.mark_read(qn)
elif isinstance(node.ctx, gast.Param):
# Param contexts appear in function defs, so they have the meaning of
# defining a variable.
# TODO(mdan): This bay be incorrect with nested functions.
# For nested functions, we'll have to add the notion of hiding args from
# the parent scope, not writing to them.
self.scope.mark_creation(qn)
self.scope.mark_param(qn)
else:
raise ValueError('Unknown context %s for node %s.' % (type(node.ctx), qn))
anno.setanno(node, NodeAnno.IS_LOCAL, self.scope.has(qn))
anno.setanno(node, NodeAnno.IS_MODIFIED_SINCE_ENTRY,
self.scope.is_modified_since_entry(qn))
anno.setanno(node, NodeAnno.IS_PARAM, self.scope.is_param(qn))
if self._in_return_statement:
self.scope.mark_returned(qn)
def visit_Name(self, node):
self.generic_visit(node)
self._track_symbol(node)
return node
def visit_Attribute(self, node):
self.generic_visit(node)
self._track_symbol(node)
return node
def visit_Print(self, node):
current_scope = self.scope
args_scope = Scope(current_scope)
self.scope = args_scope
for n in node.values:
self.visit(n)
anno.setanno(node, NodeAnno.ARGS_SCOPE, args_scope)
self.scope = current_scope
return node
def visit_Call(self, node):
current_scope = self.scope
args_scope = Scope(current_scope, isolated=False)
self.scope = args_scope
for n in node.args:
self.visit(n)
# TODO(mdan): Account starargs, kwargs
for n in node.keywords:
self.visit(n)
anno.setanno(node, NodeAnno.ARGS_SCOPE, args_scope)
self.scope = current_scope
self.visit(node.func)
return node
def _process_block_node(self, node, block, scope_name):
current_scope = self.scope
block_scope = Scope(current_scope, isolated=False)
self.scope = block_scope
for n in block:
self.visit(n)
anno.setanno(node, scope_name, block_scope)
self.scope = current_scope
return node
def _process_parallel_blocks(self, parent, children):
# Because the scopes are not isolated, processing any child block
# modifies the parent state causing the other child blocks to be
# processed incorrectly. So we need to checkpoint the parent scope so that
# each child sees the same context.
before_parent = Scope.copy_of(self.scope)
after_children = []
for child, scope_name in children:
self.scope.copy_from(before_parent)
parent = self._process_block_node(parent, child, scope_name)
after_child = Scope.copy_of(self.scope)
after_children.append(after_child)
for after_child in after_children:
self.scope.merge_from(after_child)
return parent
def visit_FunctionDef(self, node):
if self.scope:
qn = QN(node.name)
self.scope.mark_write(qn)
current_scope = self.scope
body_scope = Scope(current_scope, isolated=True)
self.scope = body_scope
self.generic_visit(node)
anno.setanno(node, NodeAnno.BODY_SCOPE, body_scope)
self.scope = current_scope
return node
def visit_With(self, node):
current_scope = self.scope
with_scope = Scope(current_scope, isolated=False)
self.scope = with_scope
self.generic_visit(node)
anno.setanno(node, NodeAnno.BODY_SCOPE, with_scope)
self.scope = current_scope
return node
def visit_If(self, node):
current_scope = self.scope
cond_scope = Scope(current_scope, isolated=False)
self.scope = cond_scope
self.visit(node.test)
anno.setanno(node, NodeAnno.COND_SCOPE, cond_scope)
self.scope = current_scope
node = self._process_parallel_blocks(node,
((node.body, NodeAnno.BODY_SCOPE),
(node.orelse, NodeAnno.ORELSE_SCOPE)))
return node
def visit_For(self, node):
self.visit(node.target)
self.visit(node.iter)
node = self._process_parallel_blocks(node,
((node.body, NodeAnno.BODY_SCOPE),
(node.orelse, NodeAnno.ORELSE_SCOPE)))
return node
def visit_While(self, node):
current_scope = self.scope
cond_scope = Scope(current_scope, isolated=False)
self.scope = cond_scope
self.visit(node.test)
anno.setanno(node, NodeAnno.COND_SCOPE, cond_scope)
self.scope = current_scope
node = self._process_parallel_blocks(node,
((node.body, NodeAnno.BODY_SCOPE),
(node.orelse, NodeAnno.ORELSE_SCOPE)))
return node
def visit_Return(self, node):
self._in_return_statement = True
node = self.generic_visit(node)
self._in_return_statement = False
return node
def resolve(node, context, parent_scope=None):
return ActivityAnalizer(context, parent_scope).visit(node)
|
allenlavoie/tensorflow
|
tensorflow/contrib/autograph/pyct/static_analysis/activity.py
|
Python
|
apache-2.0
| 10,778
|
from dgs2.discogs_client.exceptions import HTTPError
from dgs2.discogs_client.utils import parse_timestamp, update_qs, omit_none
class SimpleFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method.
If transform is a callable, the value will be passed through transform when
read. Useful for strings that should be ints, parsing timestamps, etc.
Shorthand for:
@property
def foo(self):
return self.fetch('foo')
"""
def __init__(self, name, writable=False, transform=None):
self.name = name
self.writable = writable
self.transform = transform
def __get__(self, instance, owner):
if instance is None:
return self
value = instance.fetch(self.name)
if self.transform:
value = self.transform(value)
return value
def __set__(self, instance, value):
if self.writable:
instance.changes[self.name] = value
return
raise AttributeError("can't set attribute")
class ObjectFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes the resulting value through an APIObject.
If optional = True, the value will be None (rather than an APIObject
instance) if the key is missing from the response.
If as_id = True, the value is treated as an ID for the new APIObject rather
than a partial dict of the APIObject.
Shorthand for:
@property
def baz(self):
return BazClass(self.client, self.fetch('baz'))
"""
def __init__(self, name, class_name, optional=False, as_id=False):
self.name = name
self.class_name = class_name
self.optional = optional
self.as_id = as_id
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
response_dict = instance.fetch(self.name)
if self.optional and not response_dict:
return None
if self.as_id:
# Response_dict wasn't really a dict. Make it so.
response_dict = {'id': response_dict}
return wrapper_class(instance.client, response_dict)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ListFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes each item in the resulting list through an APIObject.
Shorthand for:
@property
def bar(self):
return [BarClass(self.client, d) for d in self.fetch('bar', [])]
"""
def __init__(self, name, class_name):
self.name = name
self.class_name = class_name
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return [wrapper_class(instance.client, d) for d in instance.fetch(self.name, [])]
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ObjectCollectionDescriptor(object):
"""
An attribute that determines its value by fetching a URL to a paginated
list of related objects, and passes each item in the resulting list through
an APIObject.
Shorthand for:
@property
def frozzes(self):
return PaginatedList(self.client, self.fetch('frozzes_url'), 'frozzes', FrozClass)
"""
def __init__(self, name, class_name, url_key=None, list_class=None):
self.name = name
self.class_name = class_name
if url_key is None:
url_key = name + '_url'
self.url_key = url_key
if list_class is None:
list_class = PaginatedList
self.list_class = list_class
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return self.list_class(instance.client, instance.fetch(self.url_key), self.name, wrapper_class)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class Field(object):
"""
A placeholder for a descriptor. Is transformed into a descriptor by the
APIObjectMeta metaclass when the APIObject classes are created.
"""
_descriptor_class = None
def __init__(self, *args, **kwargs):
self.key = kwargs.pop('key', None)
self.args = args
self.kwargs = kwargs
def to_descriptor(self, attr_name):
return self._descriptor_class(self.key or attr_name, *self.args, **self.kwargs)
class SimpleField(Field):
"""A field that just returns the value of a given JSON key."""
_descriptor_class = SimpleFieldDescriptor
class ListField(Field):
"""A field that returns a list of APIObjects."""
_descriptor_class = ListFieldDescriptor
class ObjectField(Field):
"""A field that returns a single APIObject."""
_descriptor_class = ObjectFieldDescriptor
class ObjectCollection(Field):
"""A field that returns a paginated list of APIObjects."""
_descriptor_class = ObjectCollectionDescriptor
class APIObjectMeta(type):
def __new__(cls, name, bases, dict_):
for k, v in dict_.iteritems():
if isinstance(v, Field):
dict_[k] = v.to_descriptor(k)
return super(APIObjectMeta, cls).__new__(cls, name, bases, dict_)
class APIObject(object):
__metaclass__ = APIObjectMeta
class PrimaryAPIObject(APIObject):
"""A first-order API object that has a canonical endpoint of its own."""
def __init__(self, client, dict_):
self.data = dict_
self.client = client
self._known_invalid_keys = []
self.changes = {}
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.id == other.id
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def refresh(self):
if self.data.get('resource_url'):
data = self.client._get(self.data['resource_url'])
self.data.update(data)
self.changes = {}
def save(self):
if self.data.get('resource_url'):
# TODO: This should be PATCH
self.client._post(self.data['resource_url'], self.changes)
# Refresh the object, in case there were side-effects
self.refresh()
def delete(self):
if self.data.get('resource_url'):
self.client._delete(self.data['resource_url'])
def fetch(self, key, default=None):
if key in self._known_invalid_keys:
return default
try:
# First, look in the cache of pending changes
return self.changes[key]
except KeyError:
pass
try:
# Next, look in the potentially incomplete local cache
return self.data[key]
except KeyError:
pass
# Now refresh the object from its resource_url.
# The key might exist but not be in our cache.
self.refresh()
try:
return self.data[key]
except:
self._known_invalid_keys.append(key)
return default
# This is terribly cheesy, but makes the client API more consistent
class SecondaryAPIObject(APIObject):
"""
An object that wraps parts of a response and doesn't have its own
endpoint.
"""
def __init__(self, client, dict_):
self.client = client
self.data = dict_
def fetch(self, key, default=None):
return self.data.get(key, default)
class BasePaginatedResponse(object):
"""Base class for lists of objects spread across many URLs."""
def __init__(self, client, url):
self.client = client
self.url = url
self._num_pages = None
self._num_items = None
self._pages = {}
self._per_page = 50
self._list_key = 'items'
self._sort_key = None
self._sort_order = 'asc'
self._filters = {}
@property
def per_page(self):
return self._per_page
@per_page.setter
def per_page(self, value):
self._per_page = value
self._invalidate()
def _invalidate(self):
self._pages = {}
self._num_pages = None
self._num_items = None
def _load_pagination_info(self):
data = self.client._get(self._url_for_page(1))
self._num_pages = data['pagination']['pages']
self._num_items = data['pagination']['items']
def _url_for_page(self, page):
base_qs = {
'page': page,
'per_page': self._per_page,
}
if self._sort_key is not None:
base_qs.update({
'sort': self._sort_key,
'sort_order': self._sort_order,
})
base_qs.update(self._filters)
return update_qs(self.url, base_qs)
def sort(self, key, order='asc'):
if not order in ('asc', 'desc'):
raise ValueError("Order must be one of 'asc', 'desc'")
self._sort_key = key
self._sort_order = order
self._invalidate()
return self
def filter(self, **kwargs):
self._filters = kwargs
self._invalidate()
return self
@property
def pages(self):
if self._num_pages is None:
self._load_pagination_info()
return self._num_pages
@property
def count(self):
if self._num_items is None:
self._load_pagination_info()
return self._num_items
def page(self, index):
if not index in self._pages:
data = self.client._get(self._url_for_page(index))
self._pages[index] = [
self._transform(item) for item in data[self._list_key]
]
return self._pages[index]
def _transform(self, item):
return item
def __getitem__(self, index):
page_index = index / self.per_page + 1
offset = index % self.per_page
try:
page = self.page(page_index)
except HTTPError, e:
if e.status_code == 404:
raise IndexError(e.msg)
else:
raise
return page[offset]
def __len__(self):
return self.count
def __iter__(self):
for i in xrange(1, self.pages + 1):
page = self.page(i)
for item in page:
yield item
class PaginatedList(BasePaginatedResponse):
"""A paginated list of objects of a particular class."""
def __init__(self, client, url, key, class_):
super(PaginatedList, self).__init__(client, url)
self._list_key = key
self.class_ = class_
def _transform(self, item):
return self.class_(self.client, item)
class Wantlist(PaginatedList):
def add(self, release, notes=None, notes_public=None, rating=None):
release_id = release.id if isinstance(release, Release) else release
data = {
'release_id': release_id,
'notes': notes,
'notes_public': notes_public,
'rating': rating,
}
self.client._put(self.url + '/' + str(release_id), omit_none(data))
self._invalidate()
def remove(self, release):
release_id = release.id if isinstance(release, Release) else release
self.client._delete(self.url + '/' + str(release_id))
self._invalidate()
class OrderMessagesList(PaginatedList):
def add(self, message=None, status=None, email_buyer=True, email_seller=False):
data = {
'message': message,
'status': status,
'email_buyer': email_buyer,
'email_seller': email_seller,
}
self.client._post(self.url, omit_none(data))
self._invalidate()
class MixedPaginatedList(BasePaginatedResponse):
"""A paginated list of objects identified by their type parameter."""
def __init__(self, client, url, key):
super(MixedPaginatedList, self).__init__(client, url)
self._list_key = key
def _transform(self, item):
# In some cases, we want to map the 'title' key we get back in search
# results to 'name'. This way, you can repr() a page of search results
# without making 50 requests.
if item['type'] in ('label', 'artist'):
item['name'] = item['title']
return CLASS_MAP[item['type']](self.client, item)
class Artist(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
real_name = SimpleField(key='realname')
profile = SimpleField()
data_quality = SimpleField()
name_variations = SimpleField(key='namevariations')
url = SimpleField('uri')
urls = SimpleField()
aliases = ListField('Artist')
members = ListField('Artist')
groups = ListField('Artist')
def __init__(self, client, dict_):
super(Artist, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/artists/%d' % dict_['id']
@property
def releases(self):
return MixedPaginatedList(self.client, self.fetch('releases_url'), 'releases')
def __repr__(self):
return '<Artist %r %r>' % (self.id, self.name)
class Release(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
year = SimpleField()
thumb = SimpleField()
data_quality = SimpleField()
status = SimpleField()
genres = SimpleField()
country = SimpleField()
notes = SimpleField()
formats = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = ListField('Track')
artists = ListField('Artist')
credits = ListField('Artist', key='extraartists')
labels = ListField('Label')
companies = ListField('Label')
def __init__(self, client, dict_):
super(Release, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/releases/%d' % dict_['id']
@property
def master(self):
master_id = self.fetch('master_id')
if master_id:
return Master(self.client, {'id': master_id})
else:
return None
def __repr__(self):
return '<Release %r %r>' % (self.id, self.title)
class Master(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
data_quality = SimpleField()
styles = SimpleField()
genres = SimpleField()
images = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = ListField('Track')
main_release = ObjectField('Release', as_id=True)
versions = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Master, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/masters/%d' % dict_['id']
def __repr__(self):
return '<Master %r %r>' % (self.id, self.title)
class Label(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
profile = SimpleField()
urls = SimpleField()
images = SimpleField()
contact_info = SimpleField()
data_quality = SimpleField()
url = SimpleField('uri')
sublabels = ListField('Label')
parent_label = ObjectField('Label', optional=True)
releases = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Label, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/labels/%d' % dict_['id']
def __repr__(self):
return '<Label %r %r>' % (self.id, self.name)
class User(PrimaryAPIObject):
id = SimpleField()
username = SimpleField()
releases_contributed = SimpleField()
num_collection = SimpleField()
num_wantlist = SimpleField()
num_lists = SimpleField()
rank = SimpleField()
rating_avg = SimpleField()
url = SimpleField('uri')
name = SimpleField(writable=True)
profile = SimpleField(writable=True)
location = SimpleField(writable=True)
home_page = SimpleField(writable=True)
registered = SimpleField(transform=parse_timestamp)
inventory = ObjectCollection('Listing', key='listings', url_key='inventory_url')
wantlist = ObjectCollection('WantlistItem', key='wants', url_key='wantlist_url', list_class=Wantlist)
def __init__(self, client, dict_):
super(User, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/users/%s' % dict_['username']
@property
def orders(self):
return PaginatedList(self.client, self.client._base_url + '/marketplace/orders', 'orders', Order)
@property
def collection_folders(self):
resp = self.client._get(self.fetch('collection_folders_url'))
return [CollectionFolder(self.client, d) for d in resp['folders']]
def __repr__(self):
return '<User %r %r>' % (self.id, self.username)
class WantlistItem(PrimaryAPIObject):
id = SimpleField()
rating = SimpleField(writable=True)
notes = SimpleField(writable=True)
notes_public = SimpleField(writable=True)
release = ObjectField('Release', key='basic_information')
def __init__(self, client, dict_):
super(WantlistItem, self).__init__(client, dict_)
def __repr__(self):
return '<WantlistItem %r %r>' % (self.id, self.release.title)
# TODO: folder_id should be a Folder object; needs folder_url
# TODO: notes should be first-order (somehow); needs resource_url
class CollectionItemInstance(PrimaryAPIObject):
id = SimpleField()
rating = SimpleField()
folder_id = SimpleField()
notes = SimpleField()
release = ObjectField('Release', key='basic_information')
def __init__(self, client, dict_):
super(CollectionItemInstance, self).__init__(client, dict_)
def __repr__(self):
return '<CollectionItemInstance %r %r>' % (self.id, self.release.title)
class CollectionFolder(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
count = SimpleField()
def __init__(self, client, dict_):
super(CollectionFolder, self).__init__(client, dict_)
@property
def releases(self):
# TODO: Needs releases_url
return PaginatedList(self.client, self.fetch('resource_url') + '/releases', 'releases', CollectionItemInstance)
def __repr__(self):
return '<CollectionFolder %r %r>' % (self.id, self.name)
class Listing(PrimaryAPIObject):
id = SimpleField()
status = SimpleField()
allow_offers = SimpleField()
condition = SimpleField()
sleeve_condition = SimpleField()
ships_from = SimpleField()
comments = SimpleField()
audio = SimpleField()
url = SimpleField('uri')
price = ObjectField('Price')
release = ObjectField('Release')
seller = ObjectField('User')
posted = SimpleField(transform=parse_timestamp)
def __init__(self, client, dict_):
super(Listing, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/marketplace/listings/%d' % dict_['id']
def __repr__(self):
return '<Listing %r %r>' % (self.id, self.release.data['description'])
class Order(PrimaryAPIObject):
id = SimpleField()
next_status = SimpleField()
shipping_address = SimpleField()
additional_instructions = SimpleField()
url = SimpleField('uri')
status = SimpleField(writable=True)
fee = ObjectField('Price')
buyer = ObjectField('User')
seller = ObjectField('User')
created = SimpleField(transform=parse_timestamp)
last_activity = SimpleField(transform=parse_timestamp)
messages = ObjectCollection('OrderMessage', list_class=OrderMessagesList)
items = ListField('Listing')
def __init__(self, client, dict_):
super(Order, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/marketplace/orders/%s' % dict_['id']
# Setting shipping is a little weird -- you can't change the
# currency, and you use the 'shipping' key instead of 'value'
@property
def shipping(self):
return Price(self.client, self.fetch('shipping'))
@shipping.setter
def shipping(self, value):
self.changes['shipping'] = value
def __repr__(self):
return '<Order %r>' % self.id
class OrderMessage(SecondaryAPIObject):
subject = SimpleField()
message = SimpleField()
to = ObjectField('User')
order = ObjectField('Order')
timestamp = SimpleField(transform=parse_timestamp)
def __repr__(self):
return '<OrderMessage to:%r>' % self.to.username
class Track(SecondaryAPIObject):
duration = SimpleField()
position = SimpleField()
title = SimpleField()
artists = ListField('Artist')
credits = ListField('Artist', key='extraartists')
def __repr__(self):
return '<Track %r %r>' % (self.position, self.title)
class Price(SecondaryAPIObject):
currency = SimpleField()
value = SimpleField()
def __repr__(self):
return '<Price %r %r>' % (self.value, self.currency)
class Video(SecondaryAPIObject):
duration = SimpleField()
embed = SimpleField()
title = SimpleField()
description = SimpleField()
url = SimpleField('uri')
def __repr__(self):
return '<Video %r>' % (self.title)
CLASS_MAP = {
'artist': Artist,
'release': Release,
'master': Master,
'label': Label,
'price': Price,
'video': Video,
'track': Track,
'user': User,
'order': Order,
'listing': Listing,
'wantlistitem': WantlistItem,
'ordermessage': OrderMessage,
}
|
hzlf/openbroadcast
|
website/tools/dgs2/discogs_client/models.py
|
Python
|
gpl-3.0
| 21,790
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if self._play_context.check_mode:
return dict(skipped=True, msg='check mode not supported for this module')
if not tmp:
tmp = self._make_tmp_path()
creates = self._task.args.get('creates')
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
removes = self._task.args.get('removes')
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=removes), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and not stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
# the script name is the first item in the raw params, so we split it
# out now so we know the file name we need to transfer to the remote,
# and everything else is an argument to the script which we need later
# to append to the remote command
parts = self._task.args.get('_raw_params', '').strip().split()
source = parts[0]
args = ' '.join(parts[1:])
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim(source)
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
self._connection.put_file(source, tmp_src)
sudoable = True
# set file permissions, more permissive when the copy is done as a different user
if self._play_context.become and self._play_context.become_user != 'root':
chmod_mode = 'a+rx'
sudoable = False
else:
chmod_mode = '+rx'
self._remote_chmod(tmp, chmod_mode, tmp_src, sudoable=sudoable)
# add preparation steps to one ssh roundtrip executing the script
env_string = self._compute_environment_string()
script_cmd = ' '.join([env_string, tmp_src, args])
result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=sudoable)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
self._remove_tmp_path(tmp)
result['changed'] = True
return result
|
hnakamur/ansible
|
lib/ansible/plugins/action/script.py
|
Python
|
gpl-3.0
| 4,074
|
"""
Tri-Polar Grid Projected Plotting
=================================
This example demonstrates cell plots of data on the semi-structured ORCA2 model
grid.
First, the data is projected into the PlateCarree coordinate reference system.
Second four pcolormesh plots are created from this projected dataset,
using different projections for the output image.
"""
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import iris
import iris.analysis.cartography
import iris.plot as iplt
import iris.quickplot as qplt
def main():
# Load data
filepath = iris.sample_data_path("orca2_votemper.nc")
cube = iris.load_cube(filepath)
# Choose plot projections
projections = {}
projections["Mollweide"] = ccrs.Mollweide()
projections["PlateCarree"] = ccrs.PlateCarree()
projections["NorthPolarStereo"] = ccrs.NorthPolarStereo()
projections["Orthographic"] = ccrs.Orthographic(
central_longitude=-90, central_latitude=45
)
pcarree = projections["PlateCarree"]
# Transform cube to target projection
new_cube, extent = iris.analysis.cartography.project(
cube, pcarree, nx=400, ny=200
)
# Plot data in each projection
for name in sorted(projections):
fig = plt.figure()
fig.suptitle("ORCA2 Data Projected to {}".format(name))
# Set up axes and title
ax = plt.subplot(projection=projections[name])
# Set limits
ax.set_global()
# plot with Iris quickplot pcolormesh
qplt.pcolormesh(new_cube)
# Draw coastlines
ax.coastlines()
iplt.show()
if __name__ == "__main__":
main()
|
pp-mo/iris
|
docs/iris/example_code/General/orca_projection.py
|
Python
|
lgpl-3.0
| 1,647
|
#!/usr/bin/env python3
# Where to find explanation: https://www.geeksforgeeks.org/reverse-a-linked-list/
# Iterative approach:
# 1. Initialize three pointers prev as NULL, curr as head and next as NULL.
# 2. Iterate through the linked list. In loop, do following.
# 3. Before changing next of current, store next node
# 4. Now change next of current, This is where actual reversing happens.
# 5. Move prev and curr one step forward
# Recursive approach:
# 1. Divide the list in two parts - first and rest
# 2. Call reverse to the rest list
# 3. Link rest to first.
# 4. Fix head pointer
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
if head.next is None or head is None:
return head
rest = Solution().reverseList(head.next)
head.next.next = head
head.next = None
return rest
class Solution:
def iterative_reverse_list(self, head: ListNode) -> ListNode:
prv = None
next = None
cur = head
while cur is not None:
next = cur.next
cur.next = prv
prv = cur
cur = next
head = prv
return head
def main():
head = ListNode(1, ListNode(2, ListNode(3, ListNode(4, ListNode(5, None)))))
h = Solution().iterative_reverse_list(head)
if __name__ == "__main__":
main()
|
fedusia/python
|
leetcode/revers-linked-list.py
|
Python
|
apache-2.0
| 1,497
|
# This file is part of Firemix.
#
# Copyright 2013-2016 Jonathan Evans <jon@craftyjon.com>
#
# Firemix is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Firemix is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Firemix. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from lib.colors import hls_blend
from lib.transition import Transition
class Dissolve(Transition):
def __init__(self, app):
Transition.__init__(self, app)
def __str__(self):
return "Dissolve"
def render(self, start, end, progress, out):
hls_blend(start, end, out, progress, 'add', 1.0, 1.0)
|
Openlights/firemix
|
plugins/dissolve.py
|
Python
|
gpl-3.0
| 1,064
|
"""Platform to retrieve uptime for Home Assistant."""
import voluptuous as vol
from homeassistant.components.sensor import DEVICE_CLASS_TIMESTAMP, PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
DEFAULT_NAME = "Uptime"
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_UNIT_OF_MEASUREMENT),
PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default="days"): vol.All(
cv.string, vol.In(["minutes", "hours", "days", "seconds"])
),
}
),
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the uptime sensor platform."""
name = config.get(CONF_NAME)
async_add_entities([UptimeSensor(name)], True)
class UptimeSensor(Entity):
"""Representation of an uptime sensor."""
def __init__(self, name):
"""Initialize the uptime sensor."""
self._name = name
self._state = dt_util.now().isoformat()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_class(self):
"""Return device class."""
return DEVICE_CLASS_TIMESTAMP
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def should_poll(self) -> bool:
"""Disable polling for this entity."""
return False
|
tboyce1/home-assistant
|
homeassistant/components/uptime/sensor.py
|
Python
|
apache-2.0
| 1,659
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 by YOUR NAME HERE
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
# \mainpage RoboComp::name
#
# \section intro_sec Introduction
#
# Some information about the component...
#
# \section interface_sec Interface
#
# Descroption of the interface provided...
#
# \section install_sec Installation
#
# \subsection install1_ssec Software depencences
# Software dependences....
#
# \subsection install2_ssec Compile and install
# How to compile/install the component...
#
# \section guide_sec User guide
#
# \subsection config_ssec Configuration file
#
# <p>
# The configuration file...
# </p>
#
# \subsection execution_ssec Execution
#
# Just: "${PATH_TO_BINARY}/name --Ice.Config=${PATH_TO_CONFIG_FILE}"
#
# \subsection running_ssec Once running
#
#
#
import sys, traceback, Ice, IceStorm, subprocess, threading, time, Queue, os
# Ctrl+c handling
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
from PySide import *
from specificworker import *
ROBOCOMP = ''
try:
ROBOCOMP = os.environ['ROBOCOMP']
except:
pass
if len(ROBOCOMP)<1:
print 'ROBOCOMP environment variable not set! Exiting.'
sys.exit()
preStr = "-I"+ROBOCOMP+"/interfaces/ --all "+ROBOCOMP+"/interfaces/"
Ice.loadSlice(preStr+"CommonBehavior.ice")
import RoboCompCommonBehavior
Ice.loadSlice(preStr+"RCISMousePicker.ice")
import RoboCompRCISMousePicker
Ice.loadSlice(preStr+"TrajectoryRobot2D.ice")
import RoboCompTrajectoryRobot2D
class CommonBehaviorI(RoboCompCommonBehavior.CommonBehavior):
def __init__(self, _handler, _communicator):
self.handler = _handler
self.communicator = _communicator
def getFreq(self, current = None):
self.handler.getFreq()
def setFreq(self, freq, current = None):
self.handler.setFreq()
def timeAwake(self, current = None):
try:
return self.handler.timeAwake()
except:
print 'Problem getting timeAwake'
def killYourSelf(self, current = None):
self.handler.killYourSelf()
def getAttrList(self, current = None):
try:
return self.handler.getAttrList(self.communicator)
except:
print 'Problem getting getAttrList'
traceback.print_exc()
status = 1
return
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
ic = Ice.initialize(sys.argv)
status = 0
mprx = {}
try:
# Remote object connection for TrajectoryRobot2D
try:
proxyString = ic.getProperties().getProperty('TrajectoryRobot2DProxy')
try:
basePrx = ic.stringToProxy(proxyString)
trajectoryrobot2d_proxy = RoboCompTrajectoryRobot2D.TrajectoryRobot2DPrx.checkedCast(basePrx)
mprx["TrajectoryRobot2DProxy"] = trajectoryrobot2d_proxy
except Ice.Exception:
print 'Cannot connect to the remote object (TrajectoryRobot2D)', proxyString
#traceback.print_exc()
status = 1
except Ice.Exception, e:
print e
print 'Cannot get TrajectoryRobot2DProxy property.'
status = 1
# Topic Manager
proxy = ic.getProperties().getProperty("TopicManager.Proxy")
obj = ic.stringToProxy(proxy)
topicManager = IceStorm.TopicManagerPrx.checkedCast(obj)
except:
traceback.print_exc()
status = 1
if status == 0:
worker = SpecificWorker(mprx)
RCISMousePicker_adapter = ic.createObjectAdapter("RCISMousePickerTopic")
rcismousepickerI_ = RCISMousePickerI(worker)
rcismousepicker_proxy = RCISMousePicker_adapter.addWithUUID(rcismousepickerI_).ice_oneway()
subscribeDone = False
while not subscribeDone:
try:
rcismousepicker_topic = topicManager.retrieve("RCISMousePicker")
subscribeDone = True
except e:
print e
print "Error. Topic does not exist"
status = 0
qos = {}
rcismousepicker_topic.subscribeAndGetPublisher(qos, rcismousepicker_proxy)
RCISMousePicker_adapter.activate()
# adapter.add(CommonBehaviorI(<LOWER>I, ic), ic.stringToIdentity('commonbehavior'))
app.exec_()
if ic:
try:
ic.destroy()
except:
traceback.print_exc()
status = 1
|
robocomp/robocomp-ursus-rockin
|
components/trajtester/src/name.py
|
Python
|
gpl-3.0
| 4,582
|
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
from distlib.wheel import ABI
from distlib.wheel import IMPVER
def is_py2_or_pypy():
return str is bytes or '__pypy__' in sys.builtin_module_names
def expected_wheel_name(fmt):
return fmt.format(IMPVER, 'none' if is_py2_or_pypy() else ABI)
|
chriskuehl/pip-custom-platform
|
testing/util.py
|
Python
|
mit
| 344
|
from superset import security, sm
from .base_tests import SupersetTestCase
def get_perm_tuples(role_name):
perm_set = set()
for perm in sm.find_role(role_name).permissions:
perm_set.add((perm.permission.name, perm.view_menu.name))
return perm_set
class RolePermissionTests(SupersetTestCase):
"""Testing export import functionality for dashboards"""
def __init__(self, *args, **kwargs):
super(RolePermissionTests, self).__init__(*args, **kwargs)
def assert_can_read(self, view_menu, permissions_set):
self.assertIn(('can_show', view_menu), permissions_set)
self.assertIn(('can_list', view_menu), permissions_set)
def assert_can_write(self, view_menu, permissions_set):
self.assertIn(('can_add', view_menu), permissions_set)
self.assertIn(('can_download', view_menu), permissions_set)
self.assertIn(('can_delete', view_menu), permissions_set)
self.assertIn(('can_edit', view_menu), permissions_set)
def assert_cannot_write(self, view_menu, permissions_set):
self.assertNotIn(('can_add', view_menu), permissions_set)
self.assertNotIn(('can_download', view_menu), permissions_set)
self.assertNotIn(('can_delete', view_menu), permissions_set)
self.assertNotIn(('can_edit', view_menu), permissions_set)
self.assertNotIn(('can_save', view_menu), permissions_set)
def assert_can_all(self, view_menu, permissions_set):
self.assert_can_read(view_menu, permissions_set)
self.assert_can_write(view_menu, permissions_set)
def assert_cannot_gamma(self, perm_set):
self.assert_cannot_write('DruidColumnInlineView', perm_set)
def assert_can_gamma(self, perm_set):
self.assert_can_read('DatabaseAsync', perm_set)
self.assert_can_read('TableModelView', perm_set)
# make sure that user can create slices and dashboards
self.assert_can_all('SliceModelView', perm_set)
self.assert_can_all('DashboardModelView', perm_set)
self.assertIn(('can_add_slices', 'Superset'), perm_set)
self.assertIn(('can_copy_dash', 'Superset'), perm_set)
self.assertIn(('can_activity_per_day', 'Superset'), perm_set)
self.assertIn(('can_created_dashboards', 'Superset'), perm_set)
self.assertIn(('can_created_slices', 'Superset'), perm_set)
self.assertIn(('can_csv', 'Superset'), perm_set)
self.assertIn(('can_dashboard', 'Superset'), perm_set)
self.assertIn(('can_explore', 'Superset'), perm_set)
self.assertIn(('can_explore_json', 'Superset'), perm_set)
self.assertIn(('can_fave_dashboards', 'Superset'), perm_set)
self.assertIn(('can_fave_slices', 'Superset'), perm_set)
self.assertIn(('can_save_dash', 'Superset'), perm_set)
self.assertIn(('can_slice', 'Superset'), perm_set)
self.assertIn(('can_explore', 'Superset'), perm_set)
self.assertIn(('can_explore_json', 'Superset'), perm_set)
def assert_can_alpha(self, perm_set):
self.assert_can_all('SqlMetricInlineView', perm_set)
self.assert_can_all('TableColumnInlineView', perm_set)
self.assert_can_all('TableModelView', perm_set)
self.assert_can_all('DruidColumnInlineView', perm_set)
self.assert_can_all('DruidDatasourceModelView', perm_set)
self.assert_can_all('DruidMetricInlineView', perm_set)
self.assertIn(
('all_datasource_access', 'all_datasource_access'), perm_set)
self.assertIn(('muldelete', 'DruidDatasourceModelView'), perm_set)
def assert_cannot_alpha(self, perm_set):
self.assert_cannot_write('AccessRequestsModelView', perm_set)
self.assert_cannot_write('Queries', perm_set)
self.assert_cannot_write('RoleModelView', perm_set)
self.assert_cannot_write('UserDBModelView', perm_set)
def assert_can_admin(self, perm_set):
self.assert_can_all('DatabaseAsync', perm_set)
self.assert_can_all('DatabaseView', perm_set)
self.assert_can_all('DruidClusterModelView', perm_set)
self.assert_can_all('AccessRequestsModelView', perm_set)
self.assert_can_all('RoleModelView', perm_set)
self.assert_can_all('UserDBModelView', perm_set)
self.assertIn(('all_database_access', 'all_database_access'), perm_set)
self.assertIn(('can_override_role_permissions', 'Superset'), perm_set)
self.assertIn(('can_sync_druid_source', 'Superset'), perm_set)
self.assertIn(('can_override_role_permissions', 'Superset'), perm_set)
self.assertIn(('can_approve', 'Superset'), perm_set)
self.assertIn(('can_update_role', 'Superset'), perm_set)
def test_is_admin_only(self):
self.assertFalse(security.is_admin_only(
sm.find_permission_view_menu('can_show', 'TableModelView')))
self.assertFalse(security.is_admin_only(
sm.find_permission_view_menu(
'all_datasource_access', 'all_datasource_access')))
self.assertTrue(security.is_admin_only(
sm.find_permission_view_menu('can_delete', 'DatabaseView')))
self.assertTrue(security.is_admin_only(
sm.find_permission_view_menu(
'can_show', 'AccessRequestsModelView')))
self.assertTrue(security.is_admin_only(
sm.find_permission_view_menu(
'can_edit', 'UserDBModelView')))
self.assertTrue(security.is_admin_only(
sm.find_permission_view_menu(
'can_approve', 'Superset')))
self.assertTrue(security.is_admin_only(
sm.find_permission_view_menu(
'all_database_access', 'all_database_access')))
def test_is_alpha_only(self):
self.assertFalse(security.is_alpha_only(
sm.find_permission_view_menu('can_show', 'TableModelView')))
self.assertTrue(security.is_alpha_only(
sm.find_permission_view_menu('muldelete', 'TableModelView')))
self.assertTrue(security.is_alpha_only(
sm.find_permission_view_menu(
'all_datasource_access', 'all_datasource_access')))
self.assertTrue(security.is_alpha_only(
sm.find_permission_view_menu('can_edit', 'SqlMetricInlineView')))
self.assertTrue(security.is_alpha_only(
sm.find_permission_view_menu(
'can_delete', 'DruidMetricInlineView')))
def test_is_gamma_pvm(self):
self.assertTrue(security.is_gamma_pvm(
sm.find_permission_view_menu('can_show', 'TableModelView')))
def test_gamma_permissions(self):
self.assert_can_gamma(get_perm_tuples('Gamma'))
self.assert_cannot_gamma(get_perm_tuples('Gamma'))
self.assert_cannot_alpha(get_perm_tuples('Alpha'))
def test_alpha_permissions(self):
self.assert_can_gamma(get_perm_tuples('Alpha'))
self.assert_can_alpha(get_perm_tuples('Alpha'))
self.assert_cannot_alpha(get_perm_tuples('Alpha'))
def test_admin_permissions(self):
self.assert_can_gamma(get_perm_tuples('Admin'))
self.assert_can_alpha(get_perm_tuples('Admin'))
self.assert_can_admin(get_perm_tuples('Admin'))
def test_sql_lab_permissions(self):
sql_lab_set = get_perm_tuples('sql_lab')
self.assertIn(('can_sql_json', 'Superset'), sql_lab_set)
self.assertIn(('can_csv', 'Superset'), sql_lab_set)
self.assertIn(('can_search_queries', 'Superset'), sql_lab_set)
self.assert_cannot_gamma(sql_lab_set)
self.assert_cannot_alpha(sql_lab_set)
def test_granter_permissions(self):
granter_set = get_perm_tuples('granter')
self.assertIn(('can_override_role_permissions', 'Superset'), granter_set)
self.assertIn(('can_approve', 'Superset'), granter_set)
self.assert_cannot_gamma(granter_set)
self.assert_cannot_alpha(granter_set)
|
alanmcruickshank/superset-dev
|
tests/security_tests.py
|
Python
|
apache-2.0
| 7,916
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.